Compare commits

..

4 Commits

44 changed files with 1054 additions and 1326 deletions

View File

@@ -29,7 +29,7 @@ The codebase is split into:
## Language Style
- **Python**: Keep type hints on functions and attributes, and implement relevant special methods (e.g., `__repr__`, `__str__`). Prefer `TypedDict` over `dict` or `Mapping` for type safety and better code documentation.
- **Python**: Keep type hints on functions and attributes, and implement relevant special methods (e.g., `__repr__`, `__str__`).
- **TypeScript**: Use the strict config, rely on ESLint (`pnpm lint:fix` preferred) plus `pnpm type-check:tsgo`, and avoid `any` types.
## General Practices

View File

@@ -1,17 +1,13 @@
import logging
import time
from flask import request
from opentelemetry.trace import get_current_span
from opentelemetry.trace.span import INVALID_SPAN_ID, INVALID_TRACE_ID
from configs import dify_config
from contexts.wrapper import RecyclableContextVar
from controllers.console.error import UnauthorizedAndForceLogout
from core.logging.context import init_request_context
from dify_app import DifyApp
from services.enterprise.enterprise_service import EnterpriseService
from services.feature_service import LicenseStatus
logger = logging.getLogger(__name__)
@@ -35,38 +31,6 @@ def create_flask_app_with_configs() -> DifyApp:
init_request_context()
RecyclableContextVar.increment_thread_recycles()
# Enterprise license validation for API endpoints (both console and webapp)
# When license expires, block all API access except bootstrap endpoints needed
# for the frontend to load the license expiration page without infinite reloads.
if dify_config.ENTERPRISE_ENABLED:
is_console_api = request.path.startswith("/console/api/")
is_webapp_api = request.path.startswith("/api/") and not is_console_api
if is_console_api or is_webapp_api:
if is_console_api:
console_exempt_prefixes = (
"/console/api/system-features",
"/console/api/setup",
"/console/api/version",
"/console/api/activate/check",
)
is_exempt = any(request.path.startswith(p) for p in console_exempt_prefixes)
else: # webapp API
is_exempt = request.path.startswith("/api/system-features")
if not is_exempt:
try:
# Check license status with caching (10 min TTL)
license_status = EnterpriseService.get_cached_license_status()
if license_status in (LicenseStatus.INACTIVE, LicenseStatus.EXPIRED, LicenseStatus.LOST):
raise UnauthorizedAndForceLogout(
f"Enterprise license is {license_status}. Please contact your administrator."
)
except UnauthorizedAndForceLogout:
raise
except Exception:
logger.exception("Failed to check enterprise license status")
# add after request hook for injecting trace headers from OpenTelemetry span context
# Only adds headers when OTEL is enabled and has valid context
@dify_app.after_request

View File

@@ -6,13 +6,6 @@ from typing import Any
import httpx
from core.helper.trace_id_helper import generate_traceparent_header
from services.errors.enterprise import (
EnterpriseAPIBadRequestError,
EnterpriseAPIError,
EnterpriseAPIForbiddenError,
EnterpriseAPINotFoundError,
EnterpriseAPIUnauthorizedError,
)
logger = logging.getLogger(__name__)
@@ -71,56 +64,10 @@ class BaseRequest:
request_kwargs["timeout"] = timeout
response = client.request(method, url, **request_kwargs)
# Always validate HTTP status and raise domain-specific errors
if not response.is_success:
cls._handle_error_response(response)
# Legacy support: still respect raise_for_status parameter
if raise_for_status:
response.raise_for_status()
return response.json()
@classmethod
def _handle_error_response(cls, response: httpx.Response) -> None:
"""
Handle non-2xx HTTP responses by raising appropriate domain errors.
Attempts to extract error message from JSON response body,
falls back to status text if parsing fails.
"""
error_message = f"Enterprise API request failed: {response.status_code} {response.reason_phrase}"
# Try to extract error message from JSON response
try:
error_data = response.json()
if isinstance(error_data, dict):
# Common error response formats:
# {"error": "...", "message": "..."}
# {"message": "..."}
# {"detail": "..."}
error_message = (
error_data.get("message") or error_data.get("error") or error_data.get("detail") or error_message
)
except Exception:
# If JSON parsing fails, use the default message
logger.debug(
"Failed to parse error response from enterprise API (status=%s)", response.status_code, exc_info=True
)
# Raise specific error based on status code
if response.status_code == 400:
raise EnterpriseAPIBadRequestError(error_message)
elif response.status_code == 401:
raise EnterpriseAPIUnauthorizedError(error_message)
elif response.status_code == 403:
raise EnterpriseAPIForbiddenError(error_message)
elif response.status_code == 404:
raise EnterpriseAPINotFoundError(error_message)
else:
raise EnterpriseAPIError(error_message, status_code=response.status_code)
class EnterpriseRequest(BaseRequest):
base_url = os.environ.get("ENTERPRISE_API_URL", "ENTERPRISE_API_URL")

View File

@@ -5,15 +5,11 @@ from datetime import datetime
from pydantic import BaseModel, ConfigDict, Field, model_validator
from configs import dify_config
from extensions.ext_redis import redis_client
from services.enterprise.base import EnterpriseRequest
logger = logging.getLogger(__name__)
DEFAULT_WORKSPACE_JOIN_TIMEOUT_SECONDS = 1.0
# License status cache configuration
LICENSE_STATUS_CACHE_KEY = "enterprise:license:status"
LICENSE_STATUS_CACHE_TTL = 600 # 10 minutes
class WebAppSettings(BaseModel):
@@ -227,47 +223,3 @@ class EnterpriseService:
params = {"appId": app_id}
EnterpriseRequest.send_request("DELETE", "/webapp/clean", params=params)
@classmethod
def get_cached_license_status(cls):
"""
Get enterprise license status with Redis caching to reduce HTTP calls.
Only caches valid statuses (active/expiring) since invalid statuses
should be re-checked every request — the admin may update the license
at any time.
Returns license status string or None if unavailable.
"""
if not dify_config.ENTERPRISE_ENABLED:
return None
# Try cache first — only valid statuses are cached
try:
cached_status = redis_client.get(LICENSE_STATUS_CACHE_KEY)
if cached_status:
if isinstance(cached_status, bytes):
cached_status = cached_status.decode("utf-8")
return cached_status
except Exception:
logger.debug("Failed to get license status from cache, calling enterprise API")
# Cache miss or failure — call enterprise API
try:
info = cls.get_info()
license_info = info.get("License")
if license_info:
from services.feature_service import LicenseStatus
status = license_info.get("status", LicenseStatus.INACTIVE)
# Only cache valid statuses so license updates are picked up immediately
if status in (LicenseStatus.ACTIVE, LicenseStatus.EXPIRING):
try:
redis_client.setex(LICENSE_STATUS_CACHE_KEY, LICENSE_STATUS_CACHE_TTL, status)
except Exception:
logger.debug("Failed to cache license status")
return status
except Exception:
logger.exception("Failed to get enterprise license status")
return None

View File

@@ -7,7 +7,6 @@ from . import (
conversation,
dataset,
document,
enterprise,
file,
index,
message,
@@ -22,7 +21,6 @@ __all__ = [
"conversation",
"dataset",
"document",
"enterprise",
"file",
"index",
"message",

View File

@@ -1,45 +0,0 @@
"""Enterprise service errors."""
from services.errors.base import BaseServiceError
class EnterpriseServiceError(BaseServiceError):
"""Base exception for enterprise service errors."""
def __init__(self, description: str | None = None, status_code: int | None = None):
super().__init__(description)
self.status_code = status_code
class EnterpriseAPIError(EnterpriseServiceError):
"""Generic enterprise API error (non-2xx response)."""
pass
class EnterpriseAPINotFoundError(EnterpriseServiceError):
"""Enterprise API returned 404 Not Found."""
def __init__(self, description: str | None = None):
super().__init__(description, status_code=404)
class EnterpriseAPIForbiddenError(EnterpriseServiceError):
"""Enterprise API returned 403 Forbidden."""
def __init__(self, description: str | None = None):
super().__init__(description, status_code=403)
class EnterpriseAPIUnauthorizedError(EnterpriseServiceError):
"""Enterprise API returned 401 Unauthorized."""
def __init__(self, description: str | None = None):
super().__init__(description, status_code=401)
class EnterpriseAPIBadRequestError(EnterpriseServiceError):
"""Enterprise API returned 400 Bad Request."""
def __init__(self, description: str | None = None):
super().__init__(description, status_code=400)

View File

@@ -379,14 +379,11 @@ class FeatureService:
)
features.webapp_auth.sso_config.protocol = enterprise_info.get("SSOEnforcedForWebProtocol", "")
# License status and expiry are always exposed so the login page can
# show the expiry UI after a force-logout (the user is unauthenticated
# at that point). Workspace usage details remain auth-gated.
if license_info := enterprise_info.get("License"):
if is_authenticated and (license_info := enterprise_info.get("License")):
features.license.status = LicenseStatus(license_info.get("status", LicenseStatus.INACTIVE))
features.license.expired_at = license_info.get("expiredAt", "")
if is_authenticated and (workspaces_info := license_info.get("workspaces")):
if workspaces_info := license_info.get("workspaces"):
features.license.workspaces.enabled = workspaces_info.get("enabled", False)
features.license.workspaces.limit = workspaces_info.get("limit", 0)
features.license.workspaces.size = workspaces_info.get("used", 0)

View File

@@ -1,252 +0,0 @@
"""Container-backed integration tests for DocumentService.rename_document real SQL paths."""
import datetime
import json
from unittest.mock import create_autospec, patch
from uuid import uuid4
import pytest
from models import Account
from models.dataset import Dataset, Document
from models.enums import CreatorUserRole
from models.model import UploadFile
from services.dataset_service import DocumentService
FIXED_UPLOAD_CREATED_AT = datetime.datetime(2024, 1, 1, 0, 0, 0)
@pytest.fixture
def mock_env():
"""Patch only non-SQL dependency used by rename_document: current_user context."""
with patch("services.dataset_service.current_user", create_autospec(Account, instance=True)) as current_user:
current_user.current_tenant_id = str(uuid4())
current_user.id = str(uuid4())
yield {"current_user": current_user}
def make_dataset(db_session_with_containers, dataset_id=None, tenant_id=None, built_in_field_enabled=False):
"""Persist a dataset row for rename_document integration scenarios."""
dataset_id = dataset_id or str(uuid4())
tenant_id = tenant_id or str(uuid4())
dataset = Dataset(
tenant_id=tenant_id,
name=f"dataset-{uuid4()}",
data_source_type="upload_file",
created_by=str(uuid4()),
)
dataset.id = dataset_id
dataset.built_in_field_enabled = built_in_field_enabled
db_session_with_containers.add(dataset)
db_session_with_containers.commit()
return dataset
def make_document(
db_session_with_containers,
document_id=None,
dataset_id=None,
tenant_id=None,
name="Old Name",
data_source_info=None,
doc_metadata=None,
):
"""Persist a document row used by rename_document integration scenarios."""
document_id = document_id or str(uuid4())
dataset_id = dataset_id or str(uuid4())
tenant_id = tenant_id or str(uuid4())
doc = Document(
tenant_id=tenant_id,
dataset_id=dataset_id,
position=1,
data_source_type="upload_file",
data_source_info=json.dumps(data_source_info or {}),
batch=f"batch-{uuid4()}",
name=name,
created_from="web",
created_by=str(uuid4()),
doc_form="text_model",
)
doc.id = document_id
doc.indexing_status = "completed"
doc.doc_metadata = dict(doc_metadata or {})
db_session_with_containers.add(doc)
db_session_with_containers.commit()
return doc
def make_upload_file(db_session_with_containers, tenant_id: str, file_id: str, name: str):
"""Persist an upload file row referenced by document.data_source_info."""
upload_file = UploadFile(
tenant_id=tenant_id,
storage_type="local",
key=f"uploads/{uuid4()}",
name=name,
size=128,
extension="pdf",
mime_type="application/pdf",
created_by_role=CreatorUserRole.ACCOUNT,
created_by=str(uuid4()),
created_at=FIXED_UPLOAD_CREATED_AT,
used=False,
)
upload_file.id = file_id
db_session_with_containers.add(upload_file)
db_session_with_containers.commit()
return upload_file
def test_rename_document_success(db_session_with_containers, mock_env):
"""Rename succeeds and returns the renamed document identity by id."""
# Arrange
dataset_id = str(uuid4())
document_id = str(uuid4())
new_name = "New Document Name"
dataset = make_dataset(db_session_with_containers, dataset_id, mock_env["current_user"].current_tenant_id)
document = make_document(
db_session_with_containers,
document_id=document_id,
dataset_id=dataset_id,
tenant_id=mock_env["current_user"].current_tenant_id,
)
# Act
result = DocumentService.rename_document(dataset.id, document_id, new_name)
# Assert
db_session_with_containers.refresh(document)
assert result.id == document.id
assert document.name == new_name
def test_rename_document_with_built_in_fields(db_session_with_containers, mock_env):
"""Built-in document_name metadata is updated while existing metadata keys are preserved."""
# Arrange
dataset_id = str(uuid4())
document_id = str(uuid4())
new_name = "Renamed"
dataset = make_dataset(
db_session_with_containers,
dataset_id,
mock_env["current_user"].current_tenant_id,
built_in_field_enabled=True,
)
document = make_document(
db_session_with_containers,
document_id=document_id,
dataset_id=dataset.id,
tenant_id=mock_env["current_user"].current_tenant_id,
doc_metadata={"foo": "bar"},
)
# Act
DocumentService.rename_document(dataset.id, document.id, new_name)
# Assert
db_session_with_containers.refresh(document)
assert document.name == new_name
assert document.doc_metadata["document_name"] == new_name
assert document.doc_metadata["foo"] == "bar"
def test_rename_document_updates_upload_file_when_present(db_session_with_containers, mock_env):
"""Rename propagates to UploadFile.name when upload_file_id is present in data_source_info."""
# Arrange
dataset_id = str(uuid4())
document_id = str(uuid4())
file_id = str(uuid4())
new_name = "Renamed"
dataset = make_dataset(db_session_with_containers, dataset_id, mock_env["current_user"].current_tenant_id)
document = make_document(
db_session_with_containers,
document_id=document_id,
dataset_id=dataset.id,
tenant_id=mock_env["current_user"].current_tenant_id,
data_source_info={"upload_file_id": file_id},
)
upload_file = make_upload_file(
db_session_with_containers,
tenant_id=mock_env["current_user"].current_tenant_id,
file_id=file_id,
name="old.pdf",
)
# Act
DocumentService.rename_document(dataset.id, document.id, new_name)
# Assert
db_session_with_containers.refresh(document)
db_session_with_containers.refresh(upload_file)
assert document.name == new_name
assert upload_file.name == new_name
def test_rename_document_does_not_update_upload_file_when_missing_id(db_session_with_containers, mock_env):
"""Rename does not update UploadFile when data_source_info lacks upload_file_id."""
# Arrange
dataset_id = str(uuid4())
document_id = str(uuid4())
new_name = "Another Name"
dataset = make_dataset(db_session_with_containers, dataset_id, mock_env["current_user"].current_tenant_id)
document = make_document(
db_session_with_containers,
document_id=document_id,
dataset_id=dataset.id,
tenant_id=mock_env["current_user"].current_tenant_id,
data_source_info={"url": "https://example.com"},
)
untouched_file = make_upload_file(
db_session_with_containers,
tenant_id=mock_env["current_user"].current_tenant_id,
file_id=str(uuid4()),
name="untouched.pdf",
)
# Act
DocumentService.rename_document(dataset.id, document.id, new_name)
# Assert
db_session_with_containers.refresh(document)
db_session_with_containers.refresh(untouched_file)
assert document.name == new_name
assert untouched_file.name == "untouched.pdf"
def test_rename_document_dataset_not_found(db_session_with_containers, mock_env):
"""Rename raises Dataset not found when dataset id does not exist."""
# Arrange
missing_dataset_id = str(uuid4())
# Act / Assert
with pytest.raises(ValueError, match="Dataset not found"):
DocumentService.rename_document(missing_dataset_id, str(uuid4()), "x")
def test_rename_document_not_found(db_session_with_containers, mock_env):
"""Rename raises Document not found when document id is absent in the dataset."""
# Arrange
dataset = make_dataset(db_session_with_containers, str(uuid4()), mock_env["current_user"].current_tenant_id)
# Act / Assert
with pytest.raises(ValueError, match="Document not found"):
DocumentService.rename_document(dataset.id, str(uuid4()), "x")
def test_rename_document_permission_denied_when_tenant_mismatch(db_session_with_containers, mock_env):
"""Rename raises No permission when document tenant differs from current_user tenant."""
# Arrange
dataset = make_dataset(db_session_with_containers, str(uuid4()), mock_env["current_user"].current_tenant_id)
document = make_document(
db_session_with_containers,
dataset_id=dataset.id,
tenant_id=str(uuid4()),
)
# Act / Assert
with pytest.raises(ValueError, match="No permission"):
DocumentService.rename_document(dataset.id, document.id, "x")

View File

@@ -3,7 +3,6 @@ from unittest.mock import MagicMock, patch
import pytest
from faker import Faker
from core.indexing_runner import DocumentIsPausedError
from enums.cloud_plan import CloudPlan
from models import Account, Tenant, TenantAccountJoin, TenantAccountRole
from models.dataset import Dataset, Document, DocumentSegment
@@ -283,7 +282,7 @@ class TestDuplicateDocumentIndexingTasks:
return dataset, documents
def _test_duplicate_document_indexing_task_success(
def test_duplicate_document_indexing_task_success(
self, db_session_with_containers, mock_external_service_dependencies
):
"""
@@ -325,7 +324,7 @@ class TestDuplicateDocumentIndexingTasks:
processed_documents = call_args[0][0] # First argument should be documents list
assert len(processed_documents) == 3
def _test_duplicate_document_indexing_task_with_segment_cleanup(
def test_duplicate_document_indexing_task_with_segment_cleanup(
self, db_session_with_containers, mock_external_service_dependencies
):
"""
@@ -375,7 +374,7 @@ class TestDuplicateDocumentIndexingTasks:
mock_external_service_dependencies["indexing_runner"].assert_called_once()
mock_external_service_dependencies["indexing_runner_instance"].run.assert_called_once()
def _test_duplicate_document_indexing_task_dataset_not_found(
def test_duplicate_document_indexing_task_dataset_not_found(
self, db_session_with_containers, mock_external_service_dependencies
):
"""
@@ -446,7 +445,7 @@ class TestDuplicateDocumentIndexingTasks:
processed_documents = call_args[0][0] # First argument should be documents list
assert len(processed_documents) == 2 # Only existing documents
def _test_duplicate_document_indexing_task_indexing_runner_exception(
def test_duplicate_document_indexing_task_indexing_runner_exception(
self, db_session_with_containers, mock_external_service_dependencies
):
"""
@@ -487,7 +486,7 @@ class TestDuplicateDocumentIndexingTasks:
assert updated_document.indexing_status == "parsing"
assert updated_document.processing_started_at is not None
def _test_duplicate_document_indexing_task_billing_sandbox_plan_batch_limit(
def test_duplicate_document_indexing_task_billing_sandbox_plan_batch_limit(
self, db_session_with_containers, mock_external_service_dependencies
):
"""
@@ -550,7 +549,7 @@ class TestDuplicateDocumentIndexingTasks:
# Verify indexing runner was not called due to early validation error
mock_external_service_dependencies["indexing_runner_instance"].run.assert_not_called()
def _test_duplicate_document_indexing_task_billing_vector_space_limit_exceeded(
def test_duplicate_document_indexing_task_billing_vector_space_limit_exceeded(
self, db_session_with_containers, mock_external_service_dependencies
):
"""
@@ -784,90 +783,3 @@ class TestDuplicateDocumentIndexingTasks:
document_ids=document_ids,
)
mock_queue.delete_task_key.assert_not_called()
def test_successful_duplicate_document_indexing(
self, db_session_with_containers, mock_external_service_dependencies
):
"""Test successful duplicate document indexing flow."""
self._test_duplicate_document_indexing_task_success(
db_session_with_containers, mock_external_service_dependencies
)
def test_duplicate_document_indexing_dataset_not_found(
self, db_session_with_containers, mock_external_service_dependencies
):
"""Test duplicate document indexing when dataset is not found."""
self._test_duplicate_document_indexing_task_dataset_not_found(
db_session_with_containers, mock_external_service_dependencies
)
def test_duplicate_document_indexing_with_billing_enabled_sandbox_plan(
self, db_session_with_containers, mock_external_service_dependencies
):
"""Test duplicate document indexing with billing enabled and sandbox plan."""
self._test_duplicate_document_indexing_task_billing_sandbox_plan_batch_limit(
db_session_with_containers, mock_external_service_dependencies
)
def test_duplicate_document_indexing_with_billing_limit_exceeded(
self, db_session_with_containers, mock_external_service_dependencies
):
"""Test duplicate document indexing when billing limit is exceeded."""
self._test_duplicate_document_indexing_task_billing_vector_space_limit_exceeded(
db_session_with_containers, mock_external_service_dependencies
)
def test_duplicate_document_indexing_runner_error(
self, db_session_with_containers, mock_external_service_dependencies
):
"""Test duplicate document indexing when IndexingRunner raises an error."""
self._test_duplicate_document_indexing_task_indexing_runner_exception(
db_session_with_containers, mock_external_service_dependencies
)
def _test_duplicate_document_indexing_task_document_is_paused(
self, db_session_with_containers, mock_external_service_dependencies
):
"""Test duplicate document indexing when document is paused."""
# Arrange
dataset, documents = self._create_test_dataset_and_documents(
db_session_with_containers, mock_external_service_dependencies, document_count=2
)
for document in documents:
document.is_paused = True
db_session_with_containers.add(document)
db_session_with_containers.commit()
document_ids = [doc.id for doc in documents]
mock_external_service_dependencies["indexing_runner_instance"].run.side_effect = DocumentIsPausedError(
"Document paused"
)
# Act
_duplicate_document_indexing_task(dataset.id, document_ids)
db_session_with_containers.expire_all()
# Assert
for doc_id in document_ids:
updated_document = db_session_with_containers.query(Document).where(Document.id == doc_id).first()
assert updated_document.is_paused is True
assert updated_document.indexing_status == "parsing"
assert updated_document.display_status == "paused"
assert updated_document.processing_started_at is not None
mock_external_service_dependencies["indexing_runner_instance"].run.assert_called_once()
def test_duplicate_document_indexing_document_is_paused(
self, db_session_with_containers, mock_external_service_dependencies
):
"""Test duplicate document indexing when document is paused."""
self._test_duplicate_document_indexing_task_document_is_paused(
db_session_with_containers, mock_external_service_dependencies
)
def test_duplicate_document_indexing_cleans_old_segments(
self, db_session_with_containers, mock_external_service_dependencies
):
"""Test that duplicate document indexing cleans old segments."""
self._test_duplicate_document_indexing_task_with_segment_cleanup(
db_session_with_containers, mock_external_service_dependencies
)

View File

@@ -0,0 +1,176 @@
from types import SimpleNamespace
from unittest.mock import Mock, create_autospec, patch
import pytest
from models import Account
from services.dataset_service import DocumentService
@pytest.fixture
def mock_env():
"""Patch dependencies used by DocumentService.rename_document.
Mocks:
- DatasetService.get_dataset
- DocumentService.get_document
- current_user (with current_tenant_id)
- db.session
"""
with (
patch("services.dataset_service.DatasetService.get_dataset") as get_dataset,
patch("services.dataset_service.DocumentService.get_document") as get_document,
patch("services.dataset_service.current_user", create_autospec(Account, instance=True)) as current_user,
patch("extensions.ext_database.db.session") as db_session,
):
current_user.current_tenant_id = "tenant-123"
yield {
"get_dataset": get_dataset,
"get_document": get_document,
"current_user": current_user,
"db_session": db_session,
}
def make_dataset(dataset_id="dataset-123", tenant_id="tenant-123", built_in_field_enabled=False):
return SimpleNamespace(id=dataset_id, tenant_id=tenant_id, built_in_field_enabled=built_in_field_enabled)
def make_document(
document_id="document-123",
dataset_id="dataset-123",
tenant_id="tenant-123",
name="Old Name",
data_source_info=None,
doc_metadata=None,
):
doc = Mock()
doc.id = document_id
doc.dataset_id = dataset_id
doc.tenant_id = tenant_id
doc.name = name
doc.data_source_info = data_source_info or {}
# property-like usage in code relies on a dict
doc.data_source_info_dict = dict(doc.data_source_info)
doc.doc_metadata = dict(doc_metadata or {})
return doc
def test_rename_document_success(mock_env):
dataset_id = "dataset-123"
document_id = "document-123"
new_name = "New Document Name"
dataset = make_dataset(dataset_id)
document = make_document(document_id=document_id, dataset_id=dataset_id)
mock_env["get_dataset"].return_value = dataset
mock_env["get_document"].return_value = document
result = DocumentService.rename_document(dataset_id, document_id, new_name)
assert result is document
assert document.name == new_name
mock_env["db_session"].add.assert_called_once_with(document)
mock_env["db_session"].commit.assert_called_once()
def test_rename_document_with_built_in_fields(mock_env):
dataset_id = "dataset-123"
document_id = "document-123"
new_name = "Renamed"
dataset = make_dataset(dataset_id, built_in_field_enabled=True)
document = make_document(document_id=document_id, dataset_id=dataset_id, doc_metadata={"foo": "bar"})
mock_env["get_dataset"].return_value = dataset
mock_env["get_document"].return_value = document
DocumentService.rename_document(dataset_id, document_id, new_name)
assert document.name == new_name
# BuiltInField.document_name == "document_name" in service code
assert document.doc_metadata["document_name"] == new_name
assert document.doc_metadata["foo"] == "bar"
def test_rename_document_updates_upload_file_when_present(mock_env):
dataset_id = "dataset-123"
document_id = "document-123"
new_name = "Renamed"
file_id = "file-123"
dataset = make_dataset(dataset_id)
document = make_document(
document_id=document_id,
dataset_id=dataset_id,
data_source_info={"upload_file_id": file_id},
)
mock_env["get_dataset"].return_value = dataset
mock_env["get_document"].return_value = document
# Intercept UploadFile rename UPDATE chain
mock_query = Mock()
mock_query.where.return_value = mock_query
mock_env["db_session"].query.return_value = mock_query
DocumentService.rename_document(dataset_id, document_id, new_name)
assert document.name == new_name
mock_env["db_session"].query.assert_called() # update executed
def test_rename_document_does_not_update_upload_file_when_missing_id(mock_env):
"""
When data_source_info_dict exists but does not contain "upload_file_id",
UploadFile should not be updated.
"""
dataset_id = "dataset-123"
document_id = "document-123"
new_name = "Another Name"
dataset = make_dataset(dataset_id)
# Ensure data_source_info_dict is truthy but lacks the key
document = make_document(
document_id=document_id,
dataset_id=dataset_id,
data_source_info={"url": "https://example.com"},
)
mock_env["get_dataset"].return_value = dataset
mock_env["get_document"].return_value = document
DocumentService.rename_document(dataset_id, document_id, new_name)
assert document.name == new_name
# Should NOT attempt to update UploadFile
mock_env["db_session"].query.assert_not_called()
def test_rename_document_dataset_not_found(mock_env):
mock_env["get_dataset"].return_value = None
with pytest.raises(ValueError, match="Dataset not found"):
DocumentService.rename_document("missing", "doc", "x")
def test_rename_document_not_found(mock_env):
dataset = make_dataset("dataset-123")
mock_env["get_dataset"].return_value = dataset
mock_env["get_document"].return_value = None
with pytest.raises(ValueError, match="Document not found"):
DocumentService.rename_document(dataset.id, "missing", "x")
def test_rename_document_permission_denied_when_tenant_mismatch(mock_env):
dataset = make_dataset("dataset-123")
# different tenant than current_user.current_tenant_id
document = make_document(dataset_id=dataset.id, tenant_id="tenant-other")
mock_env["get_dataset"].return_value = dataset
mock_env["get_document"].return_value = document
with pytest.raises(ValueError, match="No permission"):
DocumentService.rename_document(dataset.id, document.id, "x")

View File

@@ -1,38 +1,158 @@
"""Unit tests for queue/wrapper behaviors in duplicate document indexing tasks (non-database logic)."""
"""
Unit tests for duplicate document indexing tasks.
This module tests the duplicate document indexing task functionality including:
- Task enqueuing to different queues (normal, priority, tenant-isolated)
- Batch processing of multiple duplicate documents
- Progress tracking through task lifecycle
- Error handling and retry mechanisms
- Cleanup of old document data before re-indexing
"""
import uuid
from unittest.mock import Mock, patch
from unittest.mock import MagicMock, Mock, patch
import pytest
from core.indexing_runner import DocumentIsPausedError, IndexingRunner
from core.rag.pipeline.queue import TenantIsolatedTaskQueue
from enums.cloud_plan import CloudPlan
from models.dataset import Dataset, Document, DocumentSegment
from tasks.duplicate_document_indexing_task import (
_duplicate_document_indexing_task,
_duplicate_document_indexing_task_with_tenant_queue,
duplicate_document_indexing_task,
normal_duplicate_document_indexing_task,
priority_duplicate_document_indexing_task,
)
# ============================================================================
# Fixtures
# ============================================================================
@pytest.fixture
def tenant_id():
"""Generate a unique tenant ID for testing."""
return str(uuid.uuid4())
@pytest.fixture
def dataset_id():
"""Generate a unique dataset ID for testing."""
return str(uuid.uuid4())
@pytest.fixture
def document_ids():
"""Generate a list of document IDs for testing."""
return [str(uuid.uuid4()) for _ in range(3)]
@pytest.fixture
def mock_dataset(dataset_id, tenant_id):
"""Create a mock Dataset object."""
dataset = Mock(spec=Dataset)
dataset.id = dataset_id
dataset.tenant_id = tenant_id
dataset.indexing_technique = "high_quality"
dataset.embedding_model_provider = "openai"
dataset.embedding_model = "text-embedding-ada-002"
return dataset
@pytest.fixture
def mock_documents(document_ids, dataset_id):
"""Create mock Document objects."""
documents = []
for doc_id in document_ids:
doc = Mock(spec=Document)
doc.id = doc_id
doc.dataset_id = dataset_id
doc.indexing_status = "waiting"
doc.error = None
doc.stopped_at = None
doc.processing_started_at = None
doc.doc_form = "text_model"
documents.append(doc)
return documents
@pytest.fixture
def mock_document_segments(document_ids):
"""Create mock DocumentSegment objects."""
segments = []
for doc_id in document_ids:
for i in range(3):
segment = Mock(spec=DocumentSegment)
segment.id = str(uuid.uuid4())
segment.document_id = doc_id
segment.index_node_id = f"node-{doc_id}-{i}"
segments.append(segment)
return segments
@pytest.fixture
def mock_db_session():
"""Mock database session via session_factory.create_session()."""
with patch("tasks.duplicate_document_indexing_task.session_factory", autospec=True) as mock_sf:
session = MagicMock()
# Allow tests to observe session.close() via context manager teardown
session.close = MagicMock()
cm = MagicMock()
cm.__enter__.return_value = session
def _exit_side_effect(*args, **kwargs):
session.close()
cm.__exit__.side_effect = _exit_side_effect
mock_sf.create_session.return_value = cm
query = MagicMock()
session.query.return_value = query
query.where.return_value = query
session.scalars.return_value = MagicMock()
yield session
@pytest.fixture
def mock_indexing_runner():
"""Mock IndexingRunner."""
with patch("tasks.duplicate_document_indexing_task.IndexingRunner", autospec=True) as mock_runner_class:
mock_runner = MagicMock(spec=IndexingRunner)
mock_runner_class.return_value = mock_runner
yield mock_runner
@pytest.fixture
def mock_feature_service():
"""Mock FeatureService."""
with patch("tasks.duplicate_document_indexing_task.FeatureService", autospec=True) as mock_service:
mock_features = Mock()
mock_features.billing = Mock()
mock_features.billing.enabled = False
mock_features.vector_space = Mock()
mock_features.vector_space.size = 0
mock_features.vector_space.limit = 1000
mock_service.get_features.return_value = mock_features
yield mock_service
@pytest.fixture
def mock_index_processor_factory():
"""Mock IndexProcessorFactory."""
with patch("tasks.duplicate_document_indexing_task.IndexProcessorFactory", autospec=True) as mock_factory:
mock_processor = MagicMock()
mock_processor.clean = Mock()
mock_factory.return_value.init_index_processor.return_value = mock_processor
yield mock_factory
@pytest.fixture
def mock_tenant_isolated_queue():
"""Mock TenantIsolatedTaskQueue."""
with patch("tasks.duplicate_document_indexing_task.TenantIsolatedTaskQueue", autospec=True) as mock_queue_class:
mock_queue = Mock(spec=TenantIsolatedTaskQueue)
mock_queue = MagicMock(spec=TenantIsolatedTaskQueue)
mock_queue.pull_tasks.return_value = []
mock_queue.delete_task_key = Mock()
mock_queue.set_task_waiting_time = Mock()
@@ -40,6 +160,11 @@ def mock_tenant_isolated_queue():
yield mock_queue
# ============================================================================
# Tests for deprecated duplicate_document_indexing_task
# ============================================================================
class TestDuplicateDocumentIndexingTask:
"""Tests for the deprecated duplicate_document_indexing_task function."""
@@ -65,6 +190,258 @@ class TestDuplicateDocumentIndexingTask:
mock_core_func.assert_called_once_with(dataset_id, document_ids)
# ============================================================================
# Tests for _duplicate_document_indexing_task core function
# ============================================================================
class TestDuplicateDocumentIndexingTaskCore:
"""Tests for the _duplicate_document_indexing_task core function."""
def test_successful_duplicate_document_indexing(
self,
mock_db_session,
mock_indexing_runner,
mock_feature_service,
mock_index_processor_factory,
mock_dataset,
mock_documents,
mock_document_segments,
dataset_id,
document_ids,
):
"""Test successful duplicate document indexing flow."""
# Arrange
# Dataset via query.first()
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
# scalars() call sequence:
# 1) documents list
# 2..N) segments per document
def _scalars_side_effect(*args, **kwargs):
m = MagicMock()
# First call returns documents; subsequent calls return segments
if not hasattr(_scalars_side_effect, "_calls"):
_scalars_side_effect._calls = 0
if _scalars_side_effect._calls == 0:
m.all.return_value = mock_documents
else:
m.all.return_value = mock_document_segments
_scalars_side_effect._calls += 1
return m
mock_db_session.scalars.side_effect = _scalars_side_effect
# Act
_duplicate_document_indexing_task(dataset_id, document_ids)
# Assert
# Verify IndexingRunner was called
mock_indexing_runner.run.assert_called_once()
# Verify all documents were set to parsing status
for doc in mock_documents:
assert doc.indexing_status == "parsing"
assert doc.processing_started_at is not None
# Verify session operations
assert mock_db_session.commit.called
assert mock_db_session.close.called
def test_duplicate_document_indexing_dataset_not_found(self, mock_db_session, dataset_id, document_ids):
"""Test duplicate document indexing when dataset is not found."""
# Arrange
mock_db_session.query.return_value.where.return_value.first.return_value = None
# Act
_duplicate_document_indexing_task(dataset_id, document_ids)
# Assert
# Should close the session at least once
assert mock_db_session.close.called
def test_duplicate_document_indexing_with_billing_enabled_sandbox_plan(
self,
mock_db_session,
mock_feature_service,
mock_dataset,
dataset_id,
document_ids,
):
"""Test duplicate document indexing with billing enabled and sandbox plan."""
# Arrange
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_features = mock_feature_service.get_features.return_value
mock_features.billing.enabled = True
mock_features.billing.subscription.plan = CloudPlan.SANDBOX
# Act
_duplicate_document_indexing_task(dataset_id, document_ids)
# Assert
# For sandbox plan with multiple documents, should fail
mock_db_session.commit.assert_called()
def test_duplicate_document_indexing_with_billing_limit_exceeded(
self,
mock_db_session,
mock_feature_service,
mock_dataset,
mock_documents,
dataset_id,
document_ids,
):
"""Test duplicate document indexing when billing limit is exceeded."""
# Arrange
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
# First scalars() -> documents; subsequent -> empty segments
def _scalars_side_effect(*args, **kwargs):
m = MagicMock()
if not hasattr(_scalars_side_effect, "_calls"):
_scalars_side_effect._calls = 0
if _scalars_side_effect._calls == 0:
m.all.return_value = mock_documents
else:
m.all.return_value = []
_scalars_side_effect._calls += 1
return m
mock_db_session.scalars.side_effect = _scalars_side_effect
mock_features = mock_feature_service.get_features.return_value
mock_features.billing.enabled = True
mock_features.billing.subscription.plan = CloudPlan.TEAM
mock_features.vector_space.size = 990
mock_features.vector_space.limit = 1000
# Act
_duplicate_document_indexing_task(dataset_id, document_ids)
# Assert
# Should commit the session
assert mock_db_session.commit.called
# Should close the session
assert mock_db_session.close.called
def test_duplicate_document_indexing_runner_error(
self,
mock_db_session,
mock_indexing_runner,
mock_feature_service,
mock_index_processor_factory,
mock_dataset,
mock_documents,
dataset_id,
document_ids,
):
"""Test duplicate document indexing when IndexingRunner raises an error."""
# Arrange
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
def _scalars_side_effect(*args, **kwargs):
m = MagicMock()
if not hasattr(_scalars_side_effect, "_calls"):
_scalars_side_effect._calls = 0
if _scalars_side_effect._calls == 0:
m.all.return_value = mock_documents
else:
m.all.return_value = []
_scalars_side_effect._calls += 1
return m
mock_db_session.scalars.side_effect = _scalars_side_effect
mock_indexing_runner.run.side_effect = Exception("Indexing error")
# Act
_duplicate_document_indexing_task(dataset_id, document_ids)
# Assert
# Should close the session even after error
mock_db_session.close.assert_called_once()
def test_duplicate_document_indexing_document_is_paused(
self,
mock_db_session,
mock_indexing_runner,
mock_feature_service,
mock_index_processor_factory,
mock_dataset,
mock_documents,
dataset_id,
document_ids,
):
"""Test duplicate document indexing when document is paused."""
# Arrange
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
def _scalars_side_effect(*args, **kwargs):
m = MagicMock()
if not hasattr(_scalars_side_effect, "_calls"):
_scalars_side_effect._calls = 0
if _scalars_side_effect._calls == 0:
m.all.return_value = mock_documents
else:
m.all.return_value = []
_scalars_side_effect._calls += 1
return m
mock_db_session.scalars.side_effect = _scalars_side_effect
mock_indexing_runner.run.side_effect = DocumentIsPausedError("Document paused")
# Act
_duplicate_document_indexing_task(dataset_id, document_ids)
# Assert
# Should handle DocumentIsPausedError gracefully
mock_db_session.close.assert_called_once()
def test_duplicate_document_indexing_cleans_old_segments(
self,
mock_db_session,
mock_indexing_runner,
mock_feature_service,
mock_index_processor_factory,
mock_dataset,
mock_documents,
mock_document_segments,
dataset_id,
document_ids,
):
"""Test that duplicate document indexing cleans old segments."""
# Arrange
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
def _scalars_side_effect(*args, **kwargs):
m = MagicMock()
if not hasattr(_scalars_side_effect, "_calls"):
_scalars_side_effect._calls = 0
if _scalars_side_effect._calls == 0:
m.all.return_value = mock_documents
else:
m.all.return_value = mock_document_segments
_scalars_side_effect._calls += 1
return m
mock_db_session.scalars.side_effect = _scalars_side_effect
mock_processor = mock_index_processor_factory.return_value.init_index_processor.return_value
# Act
_duplicate_document_indexing_task(dataset_id, document_ids)
# Assert
# Verify clean was called for each document
assert mock_processor.clean.call_count == len(mock_documents)
# Verify segments were deleted in batch (DELETE FROM document_segments)
execute_sqls = [" ".join(str(c[0][0]).split()) for c in mock_db_session.execute.call_args_list]
assert any("DELETE FROM document_segments" in sql for sql in execute_sqls)
# ============================================================================
# Tests for tenant queue wrapper function
# ============================================================================
class TestDuplicateDocumentIndexingTaskWithTenantQueue:
"""Tests for _duplicate_document_indexing_task_with_tenant_queue function."""
@@ -159,6 +536,11 @@ class TestDuplicateDocumentIndexingTaskWithTenantQueue:
mock_tenant_isolated_queue.pull_tasks.assert_called_once()
# ============================================================================
# Tests for normal_duplicate_document_indexing_task
# ============================================================================
class TestNormalDuplicateDocumentIndexingTask:
"""Tests for normal_duplicate_document_indexing_task function."""
@@ -199,6 +581,11 @@ class TestNormalDuplicateDocumentIndexingTask:
)
# ============================================================================
# Tests for priority_duplicate_document_indexing_task
# ============================================================================
class TestPriorityDuplicateDocumentIndexingTask:
"""Tests for priority_duplicate_document_indexing_task function."""

12
api/uv.lock generated
View File

@@ -441,14 +441,14 @@ wheels = [
[[package]]
name = "authlib"
version = "1.6.7"
version = "1.6.6"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cryptography" },
]
sdist = { url = "https://files.pythonhosted.org/packages/49/dc/ed1681bf1339dd6ea1ce56136bad4baabc6f7ad466e375810702b0237047/authlib-1.6.7.tar.gz", hash = "sha256:dbf10100011d1e1b34048c9d120e83f13b35d69a826ae762b93d2fb5aafc337b", size = 164950, upload-time = "2026-02-06T14:04:14.171Z" }
sdist = { url = "https://files.pythonhosted.org/packages/bb/9b/b1661026ff24bc641b76b78c5222d614776b0c085bcfdac9bd15a1cb4b35/authlib-1.6.6.tar.gz", hash = "sha256:45770e8e056d0f283451d9996fbb59b70d45722b45d854d58f32878d0a40c38e", size = 164894, upload-time = "2025-12-12T08:01:41.464Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/f8/00/3ed12264094ec91f534fae429945efbaa9f8c666f3aa7061cc3b2a26a0cd/authlib-1.6.7-py2.py3-none-any.whl", hash = "sha256:c637340d9a02789d2efa1d003a7437d10d3e565237bcb5fcbc6c134c7b95bab0", size = 244115, upload-time = "2026-02-06T14:04:12.141Z" },
{ url = "https://files.pythonhosted.org/packages/54/51/321e821856452f7386c4e9df866f196720b1ad0c5ea1623ea7399969ae3b/authlib-1.6.6-py2.py3-none-any.whl", hash = "sha256:7d9e9bc535c13974313a87f53e8430eb6ea3d1cf6ae4f6efcd793f2e949143fd", size = 244005, upload-time = "2025-12-12T08:01:40.209Z" },
]
[[package]]
@@ -1989,11 +1989,11 @@ wheels = [
[[package]]
name = "fickling"
version = "0.1.9"
version = "0.1.8"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/25/bd/ca7127df0201596b0b30f9ab3d36e565bb9d6f8f4da1560758b817e81b65/fickling-0.1.9.tar.gz", hash = "sha256:bb518c2fd833555183bc46b6903bb4022f3ae0436a69c3fb149cfc75eebaac33", size = 336940, upload-time = "2026-03-03T23:32:19.449Z" }
sdist = { url = "https://files.pythonhosted.org/packages/88/be/cd91e3921f064230ac9462479e4647fb91a7b0d01677103fce89f52e3042/fickling-0.1.8.tar.gz", hash = "sha256:25a0bc7acda76176a9087b405b05f7f5021f76079aa26c6fe3270855ec57d9bf", size = 336756, upload-time = "2026-02-21T00:57:26.106Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/92/49/c597bad508c74917901432b41ae5a8f036839a7fb8d0d29a89765f5d3643/fickling-0.1.9-py3-none-any.whl", hash = "sha256:ccc3ce3b84733406ade2fe749717f6e428047335157c6431eefd3e7e970a06d1", size = 52786, upload-time = "2026-03-03T23:32:17.533Z" },
{ url = "https://files.pythonhosted.org/packages/02/92/af72f783ac57fa2452f8f921c9441366c42ae1f03f5af41718445114c82f/fickling-0.1.8-py3-none-any.whl", hash = "sha256:97218785cfe00a93150808dcf9e3eb512371e0484e3ce0b05bc460b97240f292", size = 52613, upload-time = "2026-02-21T00:57:24.82Z" },
]
[[package]]

View File

@@ -8,7 +8,7 @@ import GotoAnything from '@/app/components/goto-anything'
import Header from '@/app/components/header'
import HeaderWrapper from '@/app/components/header/header-wrapper'
import ReadmePanel from '@/app/components/plugins/readme-panel'
import { AppContextProvider } from '@/context/app-context-provider'
import { AppContextProvider } from '@/context/app-context'
import { EventEmitterContextProvider } from '@/context/event-emitter'
import { ModalContextProvider } from '@/context/modal-context'
import { ProviderContextProvider } from '@/context/provider-context'

View File

@@ -4,7 +4,7 @@ import { AppInitializer } from '@/app/components/app-initializer'
import AmplitudeProvider from '@/app/components/base/amplitude'
import GA, { GaType } from '@/app/components/base/ga'
import HeaderWrapper from '@/app/components/header/header-wrapper'
import { AppContextProvider } from '@/context/app-context-provider'
import { AppContextProvider } from '@/context/app-context'
import { EventEmitterContextProvider } from '@/context/event-emitter'
import { ModalContextProvider } from '@/context/modal-context'
import { ProviderContextProvider } from '@/context/provider-context'

View File

@@ -2,7 +2,7 @@
import Loading from '@/app/components/base/loading'
import Header from '@/app/signin/_header'
import { AppContextProvider } from '@/context/app-context-provider'
import { AppContextProvider } from '@/context/app-context'
import { useGlobalPublicStore } from '@/context/global-public-context'
import useDocumentTitle from '@/hooks/use-document-title'
import { useIsLogin } from '@/service/use-common'
@@ -38,7 +38,7 @@ export default function SignInLayout({ children }: any) {
</div>
</div>
{systemFeatures.branding.enabled === false && (
<div className="px-8 py-6 text-text-tertiary system-xs-regular">
<div className="system-xs-regular px-8 py-6 text-text-tertiary">
©
{' '}
{new Date().getFullYear()}

View File

@@ -295,7 +295,13 @@ const AudioPlayer: React.FC<AudioPlayerProps> = ({ src, srcs }) => {
<source key={index} src={srcUrl} />
))}
</audio>
<button type="button" data-testid="play-pause-btn" className="inline-flex shrink-0 cursor-pointer items-center justify-center border-none text-text-accent transition-all hover:text-text-accent-secondary disabled:text-components-button-primary-bg-disabled" onClick={togglePlay} disabled={!isAudioAvailable}>
<button
type="button"
data-testid="play-pause-btn"
className="inline-flex shrink-0 cursor-pointer items-center justify-center border-none text-text-accent transition-all hover:text-text-accent-secondary disabled:text-components-button-primary-bg-disabled"
onClick={togglePlay}
disabled={!isAudioAvailable}
>
{isPlaying
? (
<div className="i-ri-pause-circle-fill h-5 w-5" />

View File

@@ -158,7 +158,7 @@ const Answer: FC<AnswerProps> = ({
<div className={cn('group relative pr-10', chatAnswerContainerInner)}>
<div
ref={humanInputFormContainerRef}
className={cn('body-lg-regular relative inline-block w-full max-w-full rounded-2xl bg-chat-bubble-bg px-4 py-3 text-text-primary')}
className={cn('relative inline-block w-full max-w-full rounded-2xl bg-chat-bubble-bg px-4 py-3 text-text-primary body-lg-regular')}
>
{
!responding && contentIsEmpty && !hasAgentThoughts && (
@@ -227,7 +227,7 @@ const Answer: FC<AnswerProps> = ({
<div className="absolute -top-2 left-6 h-3 w-0.5 bg-chat-answer-human-input-form-divider-bg" />
<div
ref={contentRef}
className="body-lg-regular relative inline-block w-full max-w-full rounded-2xl bg-chat-bubble-bg px-4 py-3 text-text-primary"
className="relative inline-block w-full max-w-full rounded-2xl bg-chat-bubble-bg px-4 py-3 text-text-primary body-lg-regular"
>
{
!responding && (
@@ -322,7 +322,7 @@ const Answer: FC<AnswerProps> = ({
<div className={cn('group relative pr-10', chatAnswerContainerInner)}>
<div
ref={contentRef}
className={cn('body-lg-regular relative inline-block max-w-full rounded-2xl bg-chat-bubble-bg px-4 py-3 text-text-primary', workflowProcess && 'w-full')}
className={cn('relative inline-block max-w-full rounded-2xl bg-chat-bubble-bg px-4 py-3 text-text-primary body-lg-regular', workflowProcess && 'w-full')}
>
{
!responding && (

View File

@@ -332,8 +332,7 @@ const Chat: FC<ChatProps> = ({
!noStopResponding && isResponding && (
<div data-testid="stop-responding-container" className="mb-2 flex justify-center">
<Button className="border-components-panel-border bg-components-panel-bg text-components-button-secondary-text" onClick={onStopResponding}>
{/* eslint-disable-next-line tailwindcss/no-unknown-classes */}
<div className="i-custom-vender-solid-mediaanddevices-stop-circle mr-[5px] h-3.5 w-3.5" />
<span className="i-custom-vender-solid-mediaAndDevices-stop-circle mr-[5px] h-3.5 w-3.5" />
<span className="text-xs font-normal">{t('operation.stopResponding', { ns: 'appDebug' })}</span>
</Button>
</div>

View File

@@ -1,4 +1,5 @@
/* eslint-disable next/no-img-element */
import type { ExtraProps } from 'streamdown'
import { render, screen } from '@testing-library/react'
import userEvent from '@testing-library/user-event'
import { beforeEach, describe, expect, it, vi } from 'vitest'
@@ -25,13 +26,14 @@ vi.mock('@/app/components/base/image-uploader/image-preview', () => ({
}))
/**
* Interfaces to avoid 'any' and satisfy strict linting
* Helper to build a minimal hast-compatible Element node for testing.
* The runtime code only reads `node.children[*].tagName` and `.properties.src`,
* so we keep the mock minimal and cast to satisfy the full hast Element type.
*/
type MockNode = {
children?: Array<{
tagName?: string
properties?: { src?: string }
}>
type MockChild = { tagName?: string, properties?: { src?: string } }
function mockNode(children: MockChild[]): ExtraProps['node'] {
return { type: 'element', tagName: 'p', properties: {}, children } as unknown as ExtraProps['node']
}
type HookReturn = {
@@ -64,7 +66,7 @@ describe('PluginParagraph', () => {
})
it('should render a standard paragraph when not an image', () => {
const node: MockNode = { children: [{ tagName: 'span' }] }
const node = mockNode([{ tagName: 'span' }])
render(
<PluginParagraph node={node}>
Hello World
@@ -75,9 +77,7 @@ describe('PluginParagraph', () => {
})
it('should render an ImageGallery when the first child is an image', () => {
const node: MockNode = {
children: [{ tagName: 'img', properties: { src: 'test-img.png' } }],
}
const node = mockNode([{ tagName: 'img', properties: { src: 'test-img.png' } }])
vi.mocked(getMarkdownImageURL).mockReturnValue('https://cdn.com/test-img.png')
const { container } = render(
@@ -93,9 +93,7 @@ describe('PluginParagraph', () => {
})
it('should use a blob URL when asset data is successfully fetched', () => {
const node: MockNode = {
children: [{ tagName: 'img', properties: { src: 'test-img.png' } }],
}
const node = mockNode([{ tagName: 'img', properties: { src: 'test-img.png' } }])
const mockBlob = new Blob([''], { type: 'image/png' })
vi.mocked(usePluginReadmeAsset).mockReturnValue({
data: mockBlob,
@@ -114,12 +112,10 @@ describe('PluginParagraph', () => {
})
it('should render remaining children below the image gallery', () => {
const node: MockNode = {
children: [
{ tagName: 'img', properties: { src: 'test-img.png' } },
{ tagName: 'text' },
],
}
const node = mockNode([
{ tagName: 'img', properties: { src: 'test-img.png' } },
{ tagName: 'text' },
])
render(
<PluginParagraph pluginInfo={mockPluginInfo} node={node}>
@@ -132,9 +128,7 @@ describe('PluginParagraph', () => {
})
it('should revoke the blob URL on unmount to prevent memory leaks', () => {
const node: MockNode = {
children: [{ tagName: 'img', properties: { src: 'test-img.png' } }],
}
const node = mockNode([{ tagName: 'img', properties: { src: 'test-img.png' } }])
const mockBlob = new Blob([''], { type: 'image/png' })
vi.mocked(usePluginReadmeAsset).mockReturnValue({
data: mockBlob,
@@ -155,9 +149,7 @@ describe('PluginParagraph', () => {
it('should open the image preview modal when an image in the gallery is clicked', async () => {
const user = userEvent.setup()
const node: MockNode = {
children: [{ tagName: 'img', properties: { src: 'test-img.png' } }],
}
const node = mockNode([{ tagName: 'img', properties: { src: 'test-img.png' } }])
vi.mocked(getMarkdownImageURL).mockReturnValue('https://cdn.com/gallery.png')
const { container } = render(

View File

@@ -1,61 +0,0 @@
import { render, screen } from '@testing-library/react'
import * as React from 'react'
import { describe, expect, it } from 'vitest'
import PreCode from '../pre-code'
describe('PreCode Component', () => {
it('renders children correctly inside the pre tag', () => {
const { container } = render(
<PreCode>
<code data-testid="test-code">console.log("hello world")</code>
</PreCode>,
)
const preElement = container.querySelector('pre')
const codeElement = screen.getByTestId('test-code')
expect(preElement).toBeInTheDocument()
expect(codeElement).toBeInTheDocument()
// Verify code is a descendant of pre
expect(preElement).toContainElement(codeElement)
expect(codeElement.textContent).toBe('console.log("hello world")')
})
it('contains the copy button span for CSS targeting', () => {
const { container } = render(
<PreCode>
<code>test content</code>
</PreCode>,
)
const copySpan = container.querySelector('.copy-code-button')
expect(copySpan).toBeInTheDocument()
expect(copySpan?.tagName).toBe('SPAN')
})
it('renders as a <pre> element', () => {
const { container } = render(<PreCode>Content</PreCode>)
expect(container.querySelector('pre')).toBeInTheDocument()
})
it('handles multiple children correctly', () => {
render(
<PreCode>
<span>Line 1</span>
<span>Line 2</span>
</PreCode>,
)
expect(screen.getByText('Line 1')).toBeInTheDocument()
expect(screen.getByText('Line 2')).toBeInTheDocument()
})
it('correctly instantiates the pre element node', () => {
const { container } = render(<PreCode>Ref check</PreCode>)
const pre = container.querySelector('pre')
// Verifies the node is an actual HTMLPreElement,
// confirming the ref-linked element rendered correctly.
expect(pre).toBeInstanceOf(HTMLPreElement)
})
})

View File

@@ -1,69 +0,0 @@
import { cleanup, render } from '@testing-library/react'
import * as React from 'react'
import { afterEach, describe, expect, it } from 'vitest'
import ScriptBlock from '../script-block'
afterEach(() => {
cleanup()
})
type ScriptNode = {
children: Array<{ value?: string }>
}
describe('ScriptBlock', () => {
it('renders script tag string when child has value', () => {
const node: ScriptNode = {
children: [{ value: 'alert("hi")' }],
}
const { container } = render(
<ScriptBlock node={node} />,
)
expect(container.textContent).toBe('<script>alert("hi")</script>')
})
it('renders empty script tag when child value is undefined', () => {
const node: ScriptNode = {
children: [{}],
}
const { container } = render(
<ScriptBlock node={node} />,
)
expect(container.textContent).toBe('<script></script>')
})
it('renders empty script tag when children array is empty', () => {
const node: ScriptNode = {
children: [],
}
const { container } = render(
<ScriptBlock node={node} />,
)
expect(container.textContent).toBe('<script></script>')
})
it('preserves multiline script content', () => {
const multi = `console.log("line1");
console.log("line2");`
const node: ScriptNode = {
children: [{ value: multi }],
}
const { container } = render(
<ScriptBlock node={node} />,
)
expect(container.textContent).toBe(`<script>${multi}</script>`)
})
it('has displayName set correctly', () => {
expect(ScriptBlock.displayName).toBe('ScriptBlock')
})
})

View File

@@ -399,7 +399,6 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
}}
language={match?.[1]}
showLineNumbers
PreTag="div"
>
{content}
</SyntaxHighlighter>
@@ -413,7 +412,7 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
return (
<div className="relative">
<div className="flex h-8 items-center justify-between rounded-t-[10px] border-b border-divider-subtle bg-components-input-bg-normal p-1 pl-3">
<div className="system-xs-semibold-uppercase text-text-secondary">{languageShowName}</div>
<div className="text-text-secondary system-xs-semibold-uppercase">{languageShowName}</div>
<div className="flex items-center gap-1">
{language === 'svg' && <SVGBtn isSVG={isSVG} setIsSVG={setIsSVG} />}
<ActionButton>

View File

@@ -3,11 +3,12 @@
* Extracted from the main markdown renderer for modularity.
* Uses the ImageGallery component to display images.
*/
import * as React from 'react'
import { memo, useMemo } from 'react'
import ImageGallery from '@/app/components/base/image-gallery'
const Img = ({ src }: any) => {
return <div className="markdown-img-wrapper"><ImageGallery srcs={[src]} /></div>
}
const Img = memo(({ src }: { src: string }) => {
const srcs = useMemo(() => [src], [src])
return <div className="markdown-img-wrapper"><ImageGallery srcs={srcs} /></div>
})
export default Img

View File

@@ -13,8 +13,6 @@ export { default as Link } from './link'
export { default as Paragraph } from './paragraph'
export * from './plugin-img'
export * from './plugin-paragraph'
export { default as PreCode } from './pre-code'
export { default as ScriptBlock } from './script-block'
export { default as ThinkBlock } from './think-block'
export { default as VideoBlock } from './video-block'

View File

@@ -4,8 +4,7 @@ import type { SimplePluginInfo } from '../markdown/react-markdown-wrapper'
* Extracted from the main markdown renderer for modularity.
* Uses the ImageGallery component to display images.
*/
import * as React from 'react'
import { useEffect, useMemo, useState } from 'react'
import { memo, useEffect, useMemo, useState } from 'react'
import ImageGallery from '@/app/components/base/image-gallery'
import { usePluginReadmeAsset } from '@/service/use-plugins'
import { getMarkdownImageURL } from './utils'
@@ -15,7 +14,7 @@ type ImgProps = {
pluginInfo?: SimplePluginInfo
}
export const PluginImg: React.FC<ImgProps> = ({ src, pluginInfo }) => {
export const PluginImg = memo<ImgProps>(({ src, pluginInfo }) => {
const { pluginUniqueIdentifier, pluginId } = pluginInfo || {}
const { data: assetData } = usePluginReadmeAsset({ plugin_unique_identifier: pluginUniqueIdentifier, file_name: src })
const [blobUrl, setBlobUrl] = useState<string>()
@@ -41,9 +40,11 @@ export const PluginImg: React.FC<ImgProps> = ({ src, pluginInfo }) => {
return getMarkdownImageURL(src, pluginId)
}, [blobUrl, pluginId, src])
const srcs = useMemo(() => [imageUrl], [imageUrl])
return (
<div className="markdown-img-wrapper">
<ImageGallery srcs={[imageUrl]} />
<ImageGallery srcs={srcs} />
</div>
)
}
})

View File

@@ -1,24 +1,30 @@
import type { SimplePluginInfo } from '../markdown/react-markdown-wrapper'
import * as React from 'react'
import { useEffect, useMemo, useState } from 'react'
/**
* @fileoverview Paragraph component for rendering <p> tags in Markdown.
* Extracted from the main markdown renderer for modularity.
* Handles special rendering for paragraphs that directly contain an image.
*/
import type { ExtraProps } from 'streamdown'
import type { SimplePluginInfo } from '../markdown/react-markdown-wrapper'
import * as React from 'react'
import { useEffect, useMemo, useState } from 'react'
import ImageGallery from '@/app/components/base/image-gallery'
import { usePluginReadmeAsset } from '@/service/use-plugins'
import { getMarkdownImageURL } from './utils'
type HastChildNode = {
tagName?: string
properties?: { src?: string, [key: string]: unknown }
}
type PluginParagraphProps = {
pluginInfo?: SimplePluginInfo
node?: any
node?: ExtraProps['node']
children?: React.ReactNode
}
export const PluginParagraph: React.FC<PluginParagraphProps> = ({ pluginInfo, node, children }) => {
const { pluginUniqueIdentifier, pluginId } = pluginInfo || {}
const childrenNode = node?.children as Array<any> | undefined
const childrenNode = node?.children as HastChildNode[] | undefined
const firstChild = childrenNode?.[0]
const isImageParagraph = firstChild?.tagName === 'img'
const imageSrc = isImageParagraph ? firstChild?.properties?.src : undefined

View File

@@ -1,23 +0,0 @@
/**
* @fileoverview PreCode component for rendering <pre> tags in Markdown.
* Extracted from the main markdown renderer for modularity.
* This is a simple wrapper around the HTML <pre> element.
*/
import * as React from 'react'
import { useRef } from 'react'
function PreCode(props: { children: any }) {
const ref = useRef<HTMLPreElement>(null)
return (
<pre ref={ref}>
<span
className="copy-code-button"
>
</span>
{props.children}
</pre>
)
}
export default PreCode

View File

@@ -1,15 +0,0 @@
/**
* @fileoverview ScriptBlock component for handling <script> tags in Markdown.
* Extracted from the main markdown renderer for modularity.
* Note: Current implementation returns the script tag as a string, which might not execute as expected in React.
* This behavior is preserved from the original implementation and may need review for security and functionality.
*/
import { memo } from 'react'
const ScriptBlock = memo(({ node }: any) => {
const scriptContent = node.children[0]?.value || ''
return `<script>${scriptContent}</script>`
})
ScriptBlock.displayName = 'ScriptBlock'
export default ScriptBlock

View File

@@ -99,7 +99,7 @@ describe('Markdown', () => {
it('should pass customComponents through', () => {
const customComponents = {
h1: ({ children }: { children: React.ReactNode }) => <h1>{children}</h1>,
h1: ({ children }: { children?: React.ReactNode }) => <h1>{children}</h1>,
}
render(<Markdown content="# title" customComponents={customComponents} />)
const props = getLastWrapperProps()

View File

@@ -1,6 +1,6 @@
import type { PropsWithChildren, ReactNode } from 'react'
import { render, screen } from '@testing-library/react'
import { ReactMarkdownWrapper } from '../react-markdown-wrapper'
import ReactMarkdownWrapper from '../react-markdown-wrapper'
vi.mock('@/app/components/base/markdown-blocks', () => ({
AudioBlock: ({ children }: PropsWithChildren) => <div data-testid="audio-block">{children}</div>,

View File

@@ -1,11 +1,16 @@
import type { ReactMarkdownWrapperProps, SimplePluginInfo } from './react-markdown-wrapper'
import { flow } from 'es-toolkit/compat'
import dynamic from 'next/dynamic'
import { memo, useMemo } from 'react'
import { cn } from '@/utils/classnames'
import { preprocessLaTeX, preprocessThinkTag } from './markdown-utils'
import 'katex/dist/katex.min.css'
const ReactMarkdown = dynamic(() => import('./react-markdown-wrapper').then(mod => mod.ReactMarkdownWrapper), { ssr: false })
const ReactMarkdown = dynamic(() => import('./react-markdown-wrapper'), { ssr: false })
const preprocess = flow([preprocessThinkTag, preprocessLaTeX])
const EMPTY_COMPONENTS = {} as const
/**
* @fileoverview Main Markdown rendering component.
@@ -18,24 +23,32 @@ export type MarkdownProps = {
content: string
className?: string
pluginInfo?: SimplePluginInfo
} & Pick<ReactMarkdownWrapperProps, 'customComponents' | 'customDisallowedElements' | 'rehypePlugins'>
} & Pick<ReactMarkdownWrapperProps, 'customComponents' | 'customDisallowedElements' | 'rehypePlugins' | 'isAnimating'>
export const Markdown = (props: MarkdownProps) => {
const { customComponents = {}, pluginInfo } = props
const latexContent = flow([
preprocessThinkTag,
preprocessLaTeX,
])(props.content)
export const Markdown = memo((props: MarkdownProps) => {
const {
content,
customComponents = EMPTY_COMPONENTS,
pluginInfo,
isAnimating,
customDisallowedElements,
rehypePlugins,
className,
} = props
const latexContent = useMemo(() => preprocess(content), [content])
return (
<div className={cn('markdown-body', '!text-text-primary', props.className)}>
<div className={cn('markdown-body', '!text-text-primary', className)}>
<ReactMarkdown
pluginInfo={pluginInfo}
latexContent={latexContent}
customComponents={customComponents}
customDisallowedElements={props.customDisallowedElements}
rehypePlugins={props.rehypePlugins}
customDisallowedElements={customDisallowedElements}
rehypePlugins={rehypePlugins}
isAnimating={isAnimating}
/>
</div>
)
}
})
Markdown.displayName = 'Markdown'

View File

@@ -1,81 +1,164 @@
import type { FC } from 'react'
import type { Components, StreamdownProps } from 'streamdown'
import { createMathPlugin } from '@streamdown/math'
import dynamic from 'next/dynamic'
import ReactMarkdown from 'react-markdown'
import RehypeKatex from 'rehype-katex'
import RehypeRaw from 'rehype-raw'
import { memo, useMemo } from 'react'
import RemarkBreaks from 'remark-breaks'
import RemarkGfm from 'remark-gfm'
import RemarkMath from 'remark-math'
import { AudioBlock, Img, Link, MarkdownButton, MarkdownForm, Paragraph, PluginImg, PluginParagraph, ScriptBlock, ThinkBlock, VideoBlock } from '@/app/components/base/markdown-blocks'
import { defaultRehypePlugins, defaultRemarkPlugins, Streamdown } from 'streamdown'
import {
AudioBlock,
Img,
Link,
MarkdownButton,
MarkdownForm,
Paragraph,
PluginImg,
PluginParagraph,
ThinkBlock,
VideoBlock,
} from '@/app/components/base/markdown-blocks'
import { ENABLE_SINGLE_DOLLAR_LATEX } from '@/config'
import { customUrlTransform } from './markdown-utils'
import 'katex/dist/katex.min.css'
type PluggableList = NonNullable<StreamdownProps['rehypePlugins']>
type Pluggable = PluggableList[number]
const CodeBlock = dynamic(() => import('@/app/components/base/markdown-blocks/code-block'), { ssr: false })
const mathPlugin = createMathPlugin({
singleDollarTextMath: ENABLE_SINGLE_DOLLAR_LATEX,
})
/**
* Allowed HTML tags and their permitted data attributes for rehype-sanitize.
* Keys = tag names to allow; values = attribute names in **hast** property format
* (camelCase, e.g. `dataThink` for `data-think`, or the wildcard `data*`).
*/
const ALLOWED_TAGS: Record<string, string[]> = {
button: ['data*'],
form: ['data*'],
details: ['dataThink'],
video: ['src', 'controls', 'width', 'height', 'data*'],
audio: ['src', 'controls', 'data*'],
source: ['src'],
mark: [],
sub: [],
sup: [],
kbd: [],
}
/**
* Build a rehype plugin list that includes the default raw → sanitize → harden
* pipeline with `ALLOWED_TAGS` baked into the sanitize schema, plus any extra
* plugins the caller provides.
*
* This sidesteps the streamdown `allowedTags` prop, which only takes effect
* when `rehypePlugins` is the exact default reference (identity check).
*/
function buildRehypePlugins(extraPlugins?: PluggableList): PluggableList {
// defaultRehypePlugins.sanitize is [rehypeSanitize, schema]
const [sanitizePlugin, defaultSanitizeSchema] = defaultRehypePlugins.sanitize as [Pluggable, Record<string, unknown>]
const tagNamesSet = new Set([
...((defaultSanitizeSchema.tagNames as string[]) ?? []),
...Object.keys(ALLOWED_TAGS),
])
const customSchema = {
...defaultSanitizeSchema,
tagNames: Array.from(tagNamesSet),
attributes: {
...(defaultSanitizeSchema.attributes as Record<string, string[]>),
...ALLOWED_TAGS,
},
}
return [
defaultRehypePlugins.raw,
...(extraPlugins ?? []),
[sanitizePlugin, customSchema] as Pluggable,
defaultRehypePlugins.harden,
]
}
export type SimplePluginInfo = {
pluginUniqueIdentifier: string
pluginId: string
}
export type ReactMarkdownWrapperProps = {
latexContent: any
latexContent: string
customDisallowedElements?: string[]
customComponents?: Record<string, React.ComponentType<any>>
customComponents?: Components
pluginInfo?: SimplePluginInfo
rehypePlugins?: any// js: PluggableList[]
rehypePlugins?: StreamdownProps['rehypePlugins']
isAnimating?: boolean
className?: string
}
export const ReactMarkdownWrapper: FC<ReactMarkdownWrapperProps> = (props) => {
const { customComponents, latexContent, pluginInfo } = props
const ReactMarkdownWrapper = (props: ReactMarkdownWrapperProps) => {
const { customComponents, latexContent, pluginInfo, isAnimating, className } = props
const remarkPlugins = useMemo(
() => [
[Array.isArray(defaultRemarkPlugins.gfm) ? defaultRemarkPlugins.gfm[0] : defaultRemarkPlugins.gfm, { singleTilde: false }] as Pluggable,
RemarkBreaks,
],
[],
)
const rehypePlugins = useMemo(
() => buildRehypePlugins(props.rehypePlugins ?? undefined),
[props.rehypePlugins],
)
const plugins = useMemo(
() => ({
math: mathPlugin,
}),
[],
)
const disallowedElements = useMemo(
() => ['iframe', 'head', 'html', 'meta', 'link', 'style', 'body', ...(props.customDisallowedElements || [])],
[props.customDisallowedElements],
)
const components: Components = useMemo(
() => ({
code: CodeBlock,
img: imgProps => pluginInfo ? <PluginImg src={String(imgProps.src ?? '')} pluginInfo={pluginInfo} /> : <Img src={String(imgProps.src ?? '')} />,
video: VideoBlock,
audio: AudioBlock,
a: Link,
p: pProps => pluginInfo ? <PluginParagraph {...pProps} pluginInfo={pluginInfo} /> : <Paragraph {...pProps} />,
button: MarkdownButton,
form: MarkdownForm,
details: ThinkBlock as React.ComponentType,
...customComponents,
}),
[pluginInfo, customComponents],
)
const controls = useMemo(() => ({
table: false,
}), [])
return (
<ReactMarkdown
remarkPlugins={[
[RemarkGfm, { singleTilde: false }],
[RemarkMath, { singleDollarTextMath: ENABLE_SINGLE_DOLLAR_LATEX }],
RemarkBreaks,
]}
rehypePlugins={[
RehypeKatex,
RehypeRaw as any,
// The Rehype plug-in is used to remove the ref attribute of an element
() => {
return (tree: any) => {
const iterate = (node: any) => {
if (node.type === 'element' && node.properties?.ref)
delete node.properties.ref
if (node.type === 'element' && !/^[a-z][a-z0-9]*$/i.test(node.tagName)) {
node.type = 'text'
node.value = `<${node.tagName}`
}
if (node.children)
node.children.forEach(iterate)
}
tree.children.forEach(iterate)
}
},
...(props.rehypePlugins || []),
]}
<Streamdown
className={className}
remarkPlugins={remarkPlugins}
rehypePlugins={rehypePlugins}
plugins={plugins}
urlTransform={customUrlTransform}
disallowedElements={['iframe', 'head', 'html', 'meta', 'link', 'style', 'body', ...(props.customDisallowedElements || [])]}
components={{
code: CodeBlock,
img: (props: any) => pluginInfo ? <PluginImg {...props} pluginInfo={pluginInfo} /> : <Img {...props} />,
video: VideoBlock,
audio: AudioBlock,
a: Link,
p: (props: any) => pluginInfo ? <PluginParagraph {...props} pluginInfo={pluginInfo} /> : <Paragraph {...props} />,
button: MarkdownButton,
form: MarkdownForm,
script: ScriptBlock as any,
details: ThinkBlock,
...customComponents,
}}
disallowedElements={disallowedElements}
components={components}
controls={controls}
isAnimating={isAnimating}
>
{/* Markdown detect has problem. */}
{latexContent}
</ReactMarkdown>
</Streamdown>
)
}
export default memo(ReactMarkdownWrapper)

View File

@@ -99,7 +99,7 @@ const ModelProviderPage = ({ searchText }: Props) => {
return (
<div className="relative -mt-2 pt-1">
<div className={cn('mb-2 flex items-center')}>
<div className="grow text-text-primary system-md-semibold">{t('modelProvider.models', { ns: 'common' })}</div>
<div className="system-md-semibold grow text-text-primary">{t('modelProvider.models', { ns: 'common' })}</div>
<div className={cn(
'relative flex shrink-0 items-center justify-end gap-2 rounded-lg border border-transparent p-px',
defaultModelNotConfigured && 'border-components-panel-border bg-components-panel-bg-blur pl-2 shadow-xs',
@@ -107,7 +107,7 @@ const ModelProviderPage = ({ searchText }: Props) => {
>
{defaultModelNotConfigured && <div className="absolute bottom-0 left-0 right-0 top-0 opacity-40" style={{ background: 'linear-gradient(92deg, rgba(247, 144, 9, 0.25) 0%, rgba(255, 255, 255, 0.00) 100%)' }} />}
{defaultModelNotConfigured && (
<div className="flex items-center gap-1 text-text-primary system-xs-medium">
<div className="system-xs-medium flex items-center gap-1 text-text-primary">
<RiAlertFill className="h-4 w-4 text-text-warning-secondary" />
<span className="max-w-[460px] truncate" title={t('modelProvider.notConfigured', { ns: 'common' })}>{t('modelProvider.notConfigured', { ns: 'common' })}</span>
</div>
@@ -129,8 +129,8 @@ const ModelProviderPage = ({ searchText }: Props) => {
<div className="flex h-10 w-10 items-center justify-center rounded-[10px] border-[0.5px] border-components-card-border bg-components-card-bg shadow-lg backdrop-blur">
<RiBrainLine className="h-5 w-5 text-text-primary" />
</div>
<div className="mt-2 text-text-secondary system-sm-medium">{t('modelProvider.emptyProviderTitle', { ns: 'common' })}</div>
<div className="mt-1 text-text-tertiary system-xs-regular">{t('modelProvider.emptyProviderTip', { ns: 'common' })}</div>
<div className="system-sm-medium mt-2 text-text-secondary">{t('modelProvider.emptyProviderTitle', { ns: 'common' })}</div>
<div className="system-xs-regular mt-1 text-text-tertiary">{t('modelProvider.emptyProviderTip', { ns: 'common' })}</div>
</div>
)}
{!!filteredConfiguredProviders?.length && (
@@ -145,7 +145,7 @@ const ModelProviderPage = ({ searchText }: Props) => {
)}
{!!filteredNotConfiguredProviders?.length && (
<>
<div className="mb-2 flex items-center pt-2 text-text-primary system-md-semibold">{t('modelProvider.toBeConfigured', { ns: 'common' })}</div>
<div className="system-md-semibold mb-2 flex items-center pt-2 text-text-primary">{t('modelProvider.toBeConfigured', { ns: 'common' })}</div>
<div className="relative">
{filteredNotConfiguredProviders?.map(provider => (
<ProviderAddedCard

View File

@@ -2,7 +2,6 @@
import type { FC } from 'react'
import type { FormInputItem, UserAction } from '../types'
import type { ButtonProps } from '@/app/components/base/button'
import { RiCloseLine } from '@remixicon/react'
import * as React from 'react'
import { useTranslation } from 'react-i18next'
import ActionButton from '@/app/components/base/action-button'
@@ -47,15 +46,15 @@ const FormContentPreview: FC<FormContentPreviewProps> = ({
>
<div className="flex h-[26px] items-center justify-between px-4">
<Badge uppercase className="border-text-accent-secondary text-text-accent-secondary">{t(`${i18nPrefix}.formContent.preview`, { ns: 'workflow' })}</Badge>
<ActionButton onClick={onClose}><RiCloseLine className="w-5 text-text-tertiary" /></ActionButton>
<ActionButton onClick={onClose}><span className="i-ri-close-line w-5 text-text-tertiary" /></ActionButton>
</div>
<div className="max-h-[calc(100vh-167px)] overflow-y-auto px-4">
<Markdown
content={content}
rehypePlugins={[rehypeVariable, rehypeNotes]}
customComponents={{
variable: ({ node }: { node: { properties?: { [key: string]: string } } }) => {
const path = node.properties?.['data-path'] as string
variable: ({ node }) => {
const path = String(node?.properties?.['data-path'] ?? '')
let newPath = path
if (path) {
newPath = path.replace(/#([^#.]+)([.#])/g, (match, nodeId, sep) => {
@@ -64,8 +63,8 @@ const FormContentPreview: FC<FormContentPreviewProps> = ({
}
return <Variable path={newPath} />
},
section: ({ node }: { node: { properties?: { [key: string]: string } } }) => (() => {
const name = node.properties?.['data-name'] as string
section: ({ node }) => (() => {
const name = String(node?.properties?.['data-name'] ?? '')
const input = formInputs.find(i => i.output_variable_name === name)
if (!input) {
return (
@@ -92,7 +91,7 @@ const FormContentPreview: FC<FormContentPreviewProps> = ({
</Button>
))}
</div>
<div className="system-xs-regular mt-1 text-text-tertiary">{t('nodes.humanInput.editor.previewTip', { ns: 'workflow' })}</div>
<div className="mt-1 text-text-tertiary system-xs-regular">{t('nodes.humanInput.editor.previewTip', { ns: 'workflow' })}</div>
</div>
</div>
)

View File

@@ -141,10 +141,6 @@
font-size: 1em;
}
.markdown-body hr {
margin: 24px 0;
}
.markdown-body hr::before {
display: table;
content: "";
@@ -275,18 +271,6 @@
border-radius: 6px;
}
.markdown-body h1,
.markdown-body h2,
.markdown-body h3,
.markdown-body h4,
.markdown-body h5,
.markdown-body h6 {
padding-top: 12px;
margin-bottom: 12px;
font-weight: var(--base-text-weight-semibold, 600);
line-height: 1.25;
}
.markdown-body h1 {
font-size: 18px;
}
@@ -379,14 +363,6 @@
content: "";
}
.markdown-body>*:first-child {
margin-top: 0 !important;
}
.markdown-body>*:last-child {
margin-bottom: 0 !important;
}
.markdown-body a:not([href]) {
color: inherit;
text-decoration: none;
@@ -407,18 +383,6 @@
outline: none;
}
.markdown-body p,
.markdown-body blockquote,
.markdown-body ul,
.markdown-body ol,
.markdown-body dl,
.markdown-body table,
.markdown-body pre,
.markdown-body details {
margin-top: 0;
margin-bottom: 12px;
}
.markdown-body ul,
.markdown-body ol {
padding-left: 2em;
@@ -542,14 +506,6 @@
margin-bottom: 0;
}
.markdown-body li>p {
margin-top: 16px;
}
.markdown-body li+li {
margin-top: 0.25em;
}
.markdown-body dl {
padding: 0;
}

View File

@@ -1,73 +0,0 @@
'use client'
import type { ICurrentWorkspace, LangGeniusVersionResponse, UserProfileResponse } from '@/models/common'
import { noop } from 'es-toolkit/function'
import { createContext, useContext, useContextSelector } from 'use-context-selector'
export type AppContextValue = {
userProfile: UserProfileResponse
mutateUserProfile: VoidFunction
currentWorkspace: ICurrentWorkspace
isCurrentWorkspaceManager: boolean
isCurrentWorkspaceOwner: boolean
isCurrentWorkspaceEditor: boolean
isCurrentWorkspaceDatasetOperator: boolean
mutateCurrentWorkspace: VoidFunction
langGeniusVersionInfo: LangGeniusVersionResponse
useSelector: typeof useSelector
isLoadingCurrentWorkspace: boolean
isValidatingCurrentWorkspace: boolean
}
export const userProfilePlaceholder = {
id: '',
name: '',
email: '',
avatar: '',
avatar_url: '',
is_password_set: false,
}
export const initialLangGeniusVersionInfo = {
current_env: '',
current_version: '',
latest_version: '',
release_date: '',
release_notes: '',
version: '',
can_auto_update: false,
}
export const initialWorkspaceInfo: ICurrentWorkspace = {
id: '',
name: '',
plan: '',
status: '',
created_at: 0,
role: 'normal',
providers: [],
trial_credits: 200,
trial_credits_used: 0,
next_credit_reset_date: 0,
}
export const AppContext = createContext<AppContextValue>({
userProfile: userProfilePlaceholder,
currentWorkspace: initialWorkspaceInfo,
isCurrentWorkspaceManager: false,
isCurrentWorkspaceOwner: false,
isCurrentWorkspaceEditor: false,
isCurrentWorkspaceDatasetOperator: false,
mutateUserProfile: noop,
mutateCurrentWorkspace: noop,
langGeniusVersionInfo: initialLangGeniusVersionInfo,
useSelector,
isLoadingCurrentWorkspace: false,
isValidatingCurrentWorkspace: false,
})
export function useSelector<T>(selector: (value: AppContextValue) => T): T {
return useContextSelector(AppContext, selector)
}
export const useAppContext = () => useContext(AppContext)

View File

@@ -3,18 +3,13 @@
import type { FC, ReactNode } from 'react'
import type { ICurrentWorkspace, LangGeniusVersionResponse, UserProfileResponse } from '@/models/common'
import { useQueryClient } from '@tanstack/react-query'
import { noop } from 'es-toolkit/function'
import { useCallback, useEffect, useMemo } from 'react'
import { createContext, useContext, useContextSelector } from 'use-context-selector'
import { setUserId, setUserProperties } from '@/app/components/base/amplitude'
import { setZendeskConversationFields } from '@/app/components/base/zendesk/utils'
import MaintenanceNotice from '@/app/components/header/maintenance-notice'
import { ZENDESK_FIELD_IDS } from '@/config'
import {
AppContext,
initialLangGeniusVersionInfo,
initialWorkspaceInfo,
userProfilePlaceholder,
useSelector,
} from '@/context/app-context'
import { env } from '@/env'
import {
useCurrentWorkspace,
@@ -23,6 +18,72 @@ import {
} from '@/service/use-common'
import { useGlobalPublicStore } from './global-public-context'
export type AppContextValue = {
userProfile: UserProfileResponse
mutateUserProfile: VoidFunction
currentWorkspace: ICurrentWorkspace
isCurrentWorkspaceManager: boolean
isCurrentWorkspaceOwner: boolean
isCurrentWorkspaceEditor: boolean
isCurrentWorkspaceDatasetOperator: boolean
mutateCurrentWorkspace: VoidFunction
langGeniusVersionInfo: LangGeniusVersionResponse
useSelector: typeof useSelector
isLoadingCurrentWorkspace: boolean
isValidatingCurrentWorkspace: boolean
}
const userProfilePlaceholder = {
id: '',
name: '',
email: '',
avatar: '',
avatar_url: '',
is_password_set: false,
}
const initialLangGeniusVersionInfo = {
current_env: '',
current_version: '',
latest_version: '',
release_date: '',
release_notes: '',
version: '',
can_auto_update: false,
}
const initialWorkspaceInfo: ICurrentWorkspace = {
id: '',
name: '',
plan: '',
status: '',
created_at: 0,
role: 'normal',
providers: [],
trial_credits: 200,
trial_credits_used: 0,
next_credit_reset_date: 0,
}
const AppContext = createContext<AppContextValue>({
userProfile: userProfilePlaceholder,
currentWorkspace: initialWorkspaceInfo,
isCurrentWorkspaceManager: false,
isCurrentWorkspaceOwner: false,
isCurrentWorkspaceEditor: false,
isCurrentWorkspaceDatasetOperator: false,
mutateUserProfile: noop,
mutateCurrentWorkspace: noop,
langGeniusVersionInfo: initialLangGeniusVersionInfo,
useSelector,
isLoadingCurrentWorkspace: false,
isValidatingCurrentWorkspace: false,
})
export function useSelector<T>(selector: (value: AppContextValue) => T): T {
return useContextSelector(AppContext, selector)
}
export type AppContextProviderProps = {
children: ReactNode
}
@@ -109,7 +170,7 @@ export const AppContextProvider: FC<AppContextProviderProps> = ({ children }) =>
// Report user and workspace info to Amplitude when loaded
if (userProfile?.id) {
setUserId(userProfile.email)
const properties: Record<string, string | number | boolean> = {
const properties: Record<string, any> = {
email: userProfile.email,
name: userProfile.name,
has_password: userProfile.is_password_set,
@@ -152,3 +213,7 @@ export const AppContextProvider: FC<AppContextProviderProps> = ({ children }) =>
</AppContext.Provider>
)
}
export const useAppContext = () => useContext(AppContext)
export default AppContext

View File

@@ -10,9 +10,6 @@ This document tracks the migration away from legacy overlay APIs.
- `@/app/components/base/modal`
- `@/app/components/base/confirm`
- `@/app/components/base/select` (including `custom` / `pure`)
- `@/app/components/base/popover`
- `@/app/components/base/dropdown`
- `@/app/components/base/dialog`
- Replacement primitives:
- `@/app/components/base/ui/tooltip`
- `@/app/components/base/ui/dropdown-menu`

View File

@@ -275,9 +275,6 @@
}
},
"app/account/(commonLayout)/delete-account/components/feed-back.tsx": {
"no-restricted-imports": {
"count": 1
},
"tailwindcss/enforce-consistent-class-order": {
"count": 1
}
@@ -290,17 +287,15 @@
"count": 3
}
},
"app/account/(commonLayout)/delete-account/index.tsx": {
"no-restricted-imports": {
"count": 1
}
},
"app/account/(commonLayout)/header.tsx": {
"tailwindcss/enforce-consistent-class-order": {
"count": 2
}
},
"app/account/oauth/authorize/layout.tsx": {
"tailwindcss/enforce-consistent-class-order": {
"count": 1
},
"ts/no-explicit-any": {
"count": 1
}
@@ -441,9 +436,6 @@
}
},
"app/components/app/annotation/header-opts/index.tsx": {
"no-restricted-imports": {
"count": 1
},
"react/no-nested-component-definitions": {
"count": 1
},
@@ -973,11 +965,6 @@
"count": 1
}
},
"app/components/app/configuration/debug/debug-with-multiple-model/debug-item.tsx": {
"no-restricted-imports": {
"count": 2
}
},
"app/components/app/configuration/debug/debug-with-multiple-model/index.spec.tsx": {
"ts/no-explicit-any": {
"count": 5
@@ -1388,7 +1375,7 @@
},
"app/components/apps/app-card.tsx": {
"no-restricted-imports": {
"count": 3
"count": 1
},
"react-hooks-extra/no-direct-set-state-in-use-effect": {
"count": 1
@@ -1675,9 +1662,6 @@
"react-hooks-extra/no-direct-set-state-in-use-effect": {
"count": 3
},
"tailwindcss/enforce-consistent-class-order": {
"count": 3
},
"ts/no-explicit-any": {
"count": 1
}
@@ -2365,9 +2349,6 @@
"react-hooks-extra/no-direct-set-state-in-use-effect": {
"count": 7
},
"tailwindcss/enforce-consistent-class-order": {
"count": 1
},
"ts/no-explicit-any": {
"count": 9
}
@@ -2383,11 +2364,6 @@
"count": 11
}
},
"app/components/base/markdown-blocks/img.tsx": {
"ts/no-explicit-any": {
"count": 1
}
},
"app/components/base/markdown-blocks/link.tsx": {
"ts/no-explicit-any": {
"count": 1
@@ -2406,19 +2382,6 @@
"app/components/base/markdown-blocks/plugin-paragraph.tsx": {
"react-hooks-extra/no-direct-set-state-in-use-effect": {
"count": 2
},
"ts/no-explicit-any": {
"count": 2
}
},
"app/components/base/markdown-blocks/pre-code.tsx": {
"ts/no-explicit-any": {
"count": 1
}
},
"app/components/base/markdown-blocks/script-block.tsx": {
"ts/no-explicit-any": {
"count": 1
}
},
"app/components/base/markdown-blocks/think-block.tsx": {
@@ -2444,11 +2407,6 @@
"count": 1
}
},
"app/components/base/markdown/react-markdown-wrapper.tsx": {
"ts/no-explicit-any": {
"count": 9
}
},
"app/components/base/mermaid/index.tsx": {
"react-hooks-extra/no-direct-set-state-in-use-effect": {
"count": 7
@@ -2877,16 +2835,6 @@
"count": 1
}
},
"app/components/base/tag-management/panel.tsx": {
"no-restricted-imports": {
"count": 1
}
},
"app/components/base/tag-management/selector.tsx": {
"no-restricted-imports": {
"count": 1
}
},
"app/components/base/tag-management/tag-item-editor.tsx": {
"no-restricted-imports": {
"count": 2
@@ -3205,11 +3153,6 @@
"count": 1
}
},
"app/components/datasets/create-from-pipeline/list/template-card/actions.tsx": {
"no-restricted-imports": {
"count": 1
}
},
"app/components/datasets/create-from-pipeline/list/template-card/content.tsx": {
"tailwindcss/enforce-consistent-class-order": {
"count": 3
@@ -3299,7 +3242,7 @@
},
"app/components/datasets/create/step-two/components/indexing-mode-section.tsx": {
"no-restricted-imports": {
"count": 2
"count": 1
},
"tailwindcss/enforce-consistent-class-order": {
"count": 8
@@ -3334,9 +3277,6 @@
}
},
"app/components/datasets/create/step-two/language-select/index.tsx": {
"no-restricted-imports": {
"count": 1
},
"tailwindcss/enforce-consistent-class-order": {
"count": 2
}
@@ -3470,7 +3410,7 @@
},
"app/components/datasets/documents/components/operations.tsx": {
"no-restricted-imports": {
"count": 3
"count": 2
}
},
"app/components/datasets/documents/components/rename-modal.tsx": {
@@ -3890,9 +3830,6 @@
}
},
"app/components/datasets/documents/detail/segment-add/index.tsx": {
"no-restricted-imports": {
"count": 1
},
"react-refresh/only-export-components": {
"count": 1
},
@@ -4107,11 +4044,6 @@
"count": 1
}
},
"app/components/datasets/list/dataset-card/components/operations-popover.tsx": {
"no-restricted-imports": {
"count": 1
}
},
"app/components/datasets/list/dataset-card/hooks/use-dataset-card-state.ts": {
"react-hooks-extra/no-direct-set-state-in-use-effect": {
"count": 1
@@ -4545,9 +4477,6 @@
}
},
"app/components/header/account-setting/data-source-page-new/operator.tsx": {
"no-restricted-imports": {
"count": 2
},
"tailwindcss/enforce-consistent-class-order": {
"count": 5
}
@@ -4694,6 +4623,11 @@
"count": 3
}
},
"app/components/header/account-setting/model-provider-page/index.tsx": {
"tailwindcss/enforce-consistent-class-order": {
"count": 5
}
},
"app/components/header/account-setting/model-provider-page/install-from-marketplace.tsx": {
"tailwindcss/enforce-consistent-class-order": {
"count": 3
@@ -4847,9 +4781,6 @@
}
},
"app/components/header/account-setting/model-provider-page/model-parameter-modal/presets-parameter.tsx": {
"no-restricted-imports": {
"count": 1
},
"tailwindcss/enforce-consistent-class-order": {
"count": 1
}
@@ -6464,6 +6395,11 @@
"count": 2
}
},
"app/components/workflow/__tests__/trigger-status-sync.test.tsx": {
"ts/no-explicit-any": {
"count": 2
}
},
"app/components/workflow/block-selector/all-start-blocks.tsx": {
"react-hooks-extra/no-direct-set-state-in-use-effect": {
"count": 1
@@ -7716,11 +7652,6 @@
"count": 2
}
},
"app/components/workflow/nodes/human-input/components/form-content-preview.tsx": {
"tailwindcss/enforce-consistent-class-order": {
"count": 1
}
},
"app/components/workflow/nodes/human-input/components/form-content.tsx": {
"react-hooks-extra/no-direct-set-state-in-use-effect": {
"count": 1
@@ -8475,6 +8406,11 @@
"count": 5
}
},
"app/components/workflow/nodes/tool/__tests__/output-schema-utils.test.ts": {
"ts/no-explicit-any": {
"count": 1
}
},
"app/components/workflow/nodes/tool/components/copy-id.tsx": {
"no-restricted-imports": {
"count": 1
@@ -8599,6 +8535,11 @@
"count": 1
}
},
"app/components/workflow/nodes/trigger-plugin/utils/__tests__/form-helpers.test.ts": {
"ts/no-explicit-any": {
"count": 2
}
},
"app/components/workflow/nodes/trigger-plugin/utils/form-helpers.ts": {
"ts/no-explicit-any": {
"count": 7
@@ -9571,6 +9512,14 @@
"count": 5
}
},
"context/app-context.tsx": {
"react-refresh/only-export-components": {
"count": 2
},
"ts/no-explicit-any": {
"count": 1
}
},
"context/datasets-context.tsx": {
"react-refresh/only-export-components": {
"count": 1
@@ -9731,6 +9680,17 @@
"count": 1
}
},
"lib/utils.ts": {
"import/consistent-type-specifier-style": {
"count": 1
},
"perfectionist/sort-named-imports": {
"count": 1
},
"style/quotes": {
"count": 2
}
},
"models/common.ts": {
"ts/no-explicit-any": {
"count": 3

View File

@@ -195,24 +195,6 @@ export default antfu(
'**/base/confirm/index',
],
message: 'Deprecated: use @/app/components/base/ui/alert-dialog instead. See issue #32767.',
}, {
group: [
'**/base/popover',
'**/base/popover/index',
],
message: 'Deprecated: use @/app/components/base/ui/popover instead. See issue #32767.',
}, {
group: [
'**/base/dropdown',
'**/base/dropdown/index',
],
message: 'Deprecated: use @/app/components/base/ui/dropdown-menu instead. See issue #32767.',
}, {
group: [
'**/base/dialog',
'**/base/dialog/index',
],
message: 'Deprecated: use @/app/components/base/ui/dialog instead. See issue #32767.',
}],
}],
},

View File

@@ -85,6 +85,7 @@
"@orpc/tanstack-query": "1.13.6",
"@remixicon/react": "4.7.0",
"@sentry/react": "8.55.0",
"@streamdown/math": "1.0.2",
"@svgdotjs/svg.js": "3.2.5",
"@t3-oss/env-nextjs": "0.13.10",
"@tailwindcss/typography": "0.5.19",
@@ -139,7 +140,6 @@
"react-easy-crop": "5.5.3",
"react-hotkeys-hook": "4.6.2",
"react-i18next": "16.5.0",
"react-markdown": "9.1.0",
"react-multi-email": "1.0.25",
"react-papaparse": "4.4.0",
"react-pdf-highlighter": "8.0.0-rc.0",
@@ -149,15 +149,12 @@
"react-textarea-autosize": "8.5.9",
"react-window": "1.8.11",
"reactflow": "11.11.4",
"rehype-katex": "7.0.1",
"rehype-raw": "7.0.0",
"remark-breaks": "4.0.0",
"remark-gfm": "4.0.1",
"remark-math": "6.0.0",
"scheduler": "0.27.0",
"semver": "7.7.3",
"sharp": "0.33.5",
"sortablejs": "1.15.6",
"streamdown": "2.3.0",
"string-ts": "2.3.1",
"tailwind-merge": "2.6.1",
"tldts": "7.0.17",
@@ -243,9 +240,8 @@
"tsx": "4.21.0",
"typescript": "5.9.3",
"uglify-js": "3.19.3",
"vinext": "https://pkg.pr.new/hyoban/vinext@556a6d6",
"vinext": "https://pkg.pr.new/hyoban/vinext@a30ba79",
"vite": "8.0.0-beta.16",
"vite-plugin-inspect": "11.3.3",
"vite-tsconfig-paths": "6.1.1",
"vitest": "4.0.18",
"vitest-canvas-mock": "1.1.3"

266
web/pnpm-lock.yaml generated
View File

@@ -126,6 +126,9 @@ importers:
'@sentry/react':
specifier: 8.55.0
version: 8.55.0(react@19.2.4)
'@streamdown/math':
specifier: 1.0.2
version: 1.0.2(react@19.2.4)
'@svgdotjs/svg.js':
specifier: 3.2.5
version: 3.2.5
@@ -288,9 +291,6 @@ importers:
react-i18next:
specifier: 16.5.0
version: 16.5.0(i18next@25.7.3(typescript@5.9.3))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(typescript@5.9.3)
react-markdown:
specifier: 9.1.0
version: 9.1.0(@types/react@19.2.9)(react@19.2.4)
react-multi-email:
specifier: 1.0.25
version: 1.0.25(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
@@ -318,21 +318,9 @@ importers:
reactflow:
specifier: 11.11.4
version: 11.11.4(@types/react@19.2.9)(immer@11.1.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
rehype-katex:
specifier: 7.0.1
version: 7.0.1
rehype-raw:
specifier: 7.0.0
version: 7.0.0
remark-breaks:
specifier: 4.0.0
version: 4.0.0
remark-gfm:
specifier: 4.0.1
version: 4.0.1
remark-math:
specifier: 6.0.0
version: 6.0.0
scheduler:
specifier: 0.27.0
version: 0.27.0
@@ -345,6 +333,9 @@ importers:
sortablejs:
specifier: 1.15.6
version: 1.15.6
streamdown:
specifier: 2.3.0
version: 2.3.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
string-ts:
specifier: 2.3.1
version: 2.3.1
@@ -596,14 +587,11 @@ importers:
specifier: 3.19.3
version: 3.19.3
vinext:
specifier: https://pkg.pr.new/hyoban/vinext@556a6d6
version: https://pkg.pr.new/hyoban/vinext@556a6d6(next@16.1.5(@babel/core@7.29.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(sass@1.93.2))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(typescript@5.9.3)(vite@8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2))(webpack@5.104.1(esbuild@0.27.2)(uglify-js@3.19.3))
specifier: https://pkg.pr.new/hyoban/vinext@a30ba79
version: https://pkg.pr.new/hyoban/vinext@a30ba79(next@16.1.5(@babel/core@7.29.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(sass@1.93.2))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(typescript@5.9.3)(vite@8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2))(webpack@5.104.1(esbuild@0.27.2)(uglify-js@3.19.3))
vite:
specifier: 8.0.0-beta.16
version: 8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2)
vite-plugin-inspect:
specifier: 11.3.3
version: 11.3.3(vite@8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2))
vite-tsconfig-paths:
specifier: 6.1.1
version: 6.1.1(typescript@5.9.3)(vite@8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2))
@@ -2178,13 +2166,19 @@ packages:
resolution: {integrity: sha512-tmmZ3lQxAe/k/+rNnXQRawJ4NjxO2hqiOLTHvWchtGZULp4RyFeh6aU4XdOYBFe2KE1oShQTv4AblOs2iOrNnQ==}
engines: {node: '>= 10.0.0'}
'@pivanov/utils@0.0.2':
resolution: {integrity: sha512-q9CN0bFWxWgMY5hVVYyBgez1jGiLBa6I+LkG37ycylPhFvEGOOeaADGtUSu46CaZasPnlY8fCdVJZmrgKb1EPA==}
peerDependencies:
react: '>=18'
react-dom: '>=18'
'@pkgjs/parseargs@0.11.0':
resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==}
engines: {node: '>=14'}
'@pkgr/core@0.2.9':
resolution: {integrity: sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==}
engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0}
'@polka/url@1.0.0-next.29':
resolution: {integrity: sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww==}
'@preact/signals-core@1.12.2':
resolution: {integrity: sha512-5Yf8h1Ke3SMHr15xl630KtwPTW4sYDFkkxS0vQ8UiQLWwZQnrF9IKaVG1mN5VcJz52EcWs2acsc/Npjha/7ysA==}
@@ -2872,6 +2866,11 @@ packages:
typescript:
optional: true
'@streamdown/math@1.0.2':
resolution: {integrity: sha512-r8Ur9/lBuFnzZAFdEWrLUF2s/gRwRRRwruqltdZibyjbCBnuW7SJbFm26nXqvpJPW/gzpBUMrBVBzd88z05D5g==}
peerDependencies:
react: ^18.0.0 || ^19.0.0
'@stylistic/eslint-plugin@https://pkg.pr.new/@stylistic/eslint-plugin@258f9d8':
resolution: {tarball: https://pkg.pr.new/@stylistic/eslint-plugin@258f9d8}
version: 5.9.0
@@ -3870,9 +3869,6 @@ packages:
birecord@0.1.1:
resolution: {integrity: sha512-VUpsf/qykW0heRlC8LooCq28Kxn3mAqKohhDG/49rrsQ1dT1CXyj/pgXS+5BSRzFTR/3DyIBOqQOrGyZOh71Aw==}
birpc@2.9.0:
resolution: {integrity: sha512-KrayHS5pBi69Xi9JmvoqrIgYGDkD6mcSe/i6YKi3w5kekCLzrX4+nawcXqrj2tIp50Kw/mT/s3p+GVK0A0sKxw==}
bl@4.1.0:
resolution: {integrity: sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==}
@@ -4579,9 +4575,6 @@ packages:
resolution: {integrity: sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q==}
engines: {node: '>=18'}
error-stack-parser-es@1.0.5:
resolution: {integrity: sha512-5qucVt2XcuGMcEGgWI7i+yZpmpByQ8J1lHhcL7PwqCwu9FPP3VUXzT4ltHe5i2z9dePwEHcDVOAfSnHsOlCXRA==}
es-module-lexer@1.7.0:
resolution: {integrity: sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==}
@@ -5207,6 +5200,9 @@ packages:
hast-util-raw@9.1.0:
resolution: {integrity: sha512-Y8/SBAHkZGoNkpzqqfCldijcuUKh7/su31kEBp67cFY09Wy0mTRgtsLYsiIxMJxlu0f6AA5SUTbDR8K0rxnbUw==}
hast-util-sanitize@5.0.2:
resolution: {integrity: sha512-3yTWghByc50aGS7JlGhk61SPenfE/p1oaFeNwkOOyrscaOkMGrcW9+Cy/QAIOBpZxP1yqDIzFMR0+Np0i0+usg==}
hast-util-to-estree@3.1.3:
resolution: {integrity: sha512-48+B/rJWAp0jamNbAAf9M7Uf//UVqAoMmgXhBdxTDJLGKY+LRnZ99qcG+Qjl5HfMpYNzS5v4EAwVEF34LeAj7w==}
@@ -5605,6 +5601,10 @@ packages:
resolution: {integrity: sha512-woHRUZ/iF23GBP1dkDQMh1QBad9dmr8/PAwNA54VrSOVYgI12MAcE14TqnDdQOdzyEonGzMepYnqBMYdsoAr8Q==}
hasBin: true
katex@0.16.33:
resolution: {integrity: sha512-q3N5u+1sY9Bu7T4nlXoiRBXWfwSefNGoKeOwekV+gw0cAXQlz2Ww6BLcmBxVDeXBMUDQv6fK5bcNaJLxob3ZQA==}
hasBin: true
keyv@4.5.4:
resolution: {integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==}
@@ -5837,6 +5837,11 @@ packages:
engines: {node: '>= 18'}
hasBin: true
marked@17.0.3:
resolution: {integrity: sha512-jt1v2ObpyOKR8p4XaUJVk3YWRJ5n+i4+rjQopxvV32rSndTJXvIzuUdWWIy/1pFQMkQmvTXawzDNqOH/CUmx6A==}
engines: {node: '>= 20'}
hasBin: true
mdast-util-find-and-replace@3.0.2:
resolution: {integrity: sha512-Tmd1Vg/m3Xz43afeNxDIhWRtFZgM2VLyaf4vSTYwudTyeuTneoL3qtWMA5jeLyz/O1vDJmmV4QuScFCA2tBPwg==}
@@ -6111,10 +6116,6 @@ packages:
moo-color@1.0.3:
resolution: {integrity: sha512-i/+ZKXMDf6aqYtBhuOcej71YSlbjT3wCO/4H1j8rPvxDJEifdwgg5MaFyu6iYAT8GBZJg2z0dkgK4YMzvURALQ==}
mrmime@2.0.1:
resolution: {integrity: sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ==}
engines: {node: '>=10'}
ms@2.1.3:
resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==}
@@ -6237,9 +6238,6 @@ packages:
obug@2.1.1:
resolution: {integrity: sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==}
ohash@2.0.11:
resolution: {integrity: sha512-RdR9FQrFwNBNXAr4GixM8YaRZRJ5PUWbKYbE5eOsrwAjJW0q2REGcf79oYPsLyskQCZG1PLN+S/K1V00joZAoQ==}
once@1.4.0:
resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==}
@@ -6363,9 +6361,6 @@ packages:
pend@1.2.0:
resolution: {integrity: sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==}
perfect-debounce@2.1.0:
resolution: {integrity: sha512-LjgdTytVFXeUgtHZr9WYViYSM/g8MkcTPYDlPa3cDqMirHjKiSZPYd6DoL7pK8AJQr+uWkQvCjHNdiMqsrJs+g==}
periscopic@4.0.2:
resolution: {integrity: sha512-sqpQDUy8vgB7ycLkendSKS6HnVz1Rneoc3Rc+ZBUCe2pbqlVuCC5vF52l0NJ1aiMg/r1qfYF9/myz8CZeI2rjA==}
@@ -6628,12 +6623,6 @@ packages:
react-is@17.0.2:
resolution: {integrity: sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==}
react-markdown@9.1.0:
resolution: {integrity: sha512-xaijuJB0kzGiUdG7nc2MOMDUDBWPyGAjZtUrow9XxUeua8IqeP+VlIfAZ3bphpcLTnSZXz6z9jcVC/TCwbfgdw==}
peerDependencies:
'@types/react': '>=18'
react: '>=18'
react-multi-email@1.0.25:
resolution: {integrity: sha512-Wmv28FvIk4nWgdpHzlIPonY4iSs7bPV35+fAiWYzSBhTo+vhXfglEhjY1WnjHQINW/Pibu2xlb/q1heVuytQHQ==}
peerDependencies:
@@ -6802,6 +6791,9 @@ packages:
resolution: {integrity: sha512-NZQZdC5wOE/H3UT28fVGL+ikOZcEzfMGk/c3iN9UGxzWHMa1op7274oyiUVrAG4B2EuFhus8SvkaYnhvW92p9Q==}
hasBin: true
rehype-harden@1.1.8:
resolution: {integrity: sha512-Qn7vR1xrf6fZCrkm9TDWi/AB4ylrHy+jqsNm1EHOAmbARYA6gsnVJBq/sdBh6kmT4NEZxH5vgIjrscefJAOXcw==}
rehype-katex@7.0.1:
resolution: {integrity: sha512-OiM2wrZ/wuhKkigASodFoo8wimG3H12LWQaH8qSPVJn9apWKFSH3YOCtbKpBorTVw/eI7cuT21XBbvwEswbIOA==}
@@ -6811,6 +6803,9 @@ packages:
rehype-recma@1.0.0:
resolution: {integrity: sha512-lqA4rGUf1JmacCNWWZx0Wv1dHqMwxzsDWYMTowuplHF3xH0N/MmrZ/G3BDZnzAkRmxDadujCjaKM2hqYdCBOGw==}
rehype-sanitize@6.0.0:
resolution: {integrity: sha512-CsnhKNsyI8Tub6L4sm5ZFsme4puGfc6pYylvXo1AeqaGbjOYyzNv3qZPwvs0oMJ39eryyeOdmxwUIo94IpEhqg==}
remark-breaks@4.0.0:
resolution: {integrity: sha512-IjEjJOkH4FuJvHZVIW0QCDWxcG96kCq7An/KVH2NfJe6rKZU2AsHeB3OEjPNRxi4QC34Xdx7I2KGYn6IpT7gxQ==}
@@ -6832,6 +6827,9 @@ packages:
remark-stringify@11.0.0:
resolution: {integrity: sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw==}
remend@1.2.1:
resolution: {integrity: sha512-4wC12bgXsfKAjF1ewwkNIQz5sqewz/z1xgIgjEMb3r1pEytQ37F0Cm6i+OhbTWEvguJD7lhOUJhK5fSasw9f0w==}
require-from-string@2.0.2:
resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==}
engines: {node: '>=0.10.0'}
@@ -6994,10 +6992,6 @@ packages:
simple-swizzle@0.2.4:
resolution: {integrity: sha512-nAu1WFPQSMNr2Zn9PGSZK9AGn4t/y97lEm+MXTtUDwfP0ksAIX4nO+6ruD9Jwut4C49SB1Ws+fbXsm/yScWOHw==}
sirv@3.0.2:
resolution: {integrity: sha512-2wcC/oGxHis/BoHkkPwldgiPSYcpZK3JU28WoMVv55yHJgcZ8rlXvuG9iZggz+sU1d4bRgIGASwyWqjxu3FM0g==}
engines: {node: '>=18'}
sisteransi@1.0.5:
resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==}
@@ -7075,6 +7069,12 @@ packages:
prettier:
optional: true
streamdown@2.3.0:
resolution: {integrity: sha512-OqS3by/lt91lSicE8RQP2nTsYI6Q/dQgGP2vcyn9YesCmRHhNjswAuBAZA1z0F4+oBU3II/eV51LqjCqwTb1lw==}
peerDependencies:
react: ^18.0.0 || ^19.0.0
react-dom: ^18.0.0 || ^19.0.0
strict-event-emitter@0.5.1:
resolution: {integrity: sha512-vMgjE/GGEPEFnhFub6pa4FmJBRBVOLpIII2hvCZ8Kzb7K0hlHo7mQv6xYrBvCL2LtAIBwFUK8wvuJgTVSQ5MFQ==}
@@ -7202,6 +7202,9 @@ packages:
tailwind-merge@2.6.1:
resolution: {integrity: sha512-Oo6tHdpZsGpkKG88HJ8RR1rg/RdnEkQEfMoEk2x1XRI3F1AxeU+ijRXpiVUF4UbLfcxxRGw6TbUINKYdWVsQTQ==}
tailwind-merge@3.5.0:
resolution: {integrity: sha512-I8K9wewnVDkL1NTGoqWmVEIlUcB9gFriAEkXkfCjX5ib8ezGxtR3xD7iZIxrfArjEsH7F1CHD4RFUtxefdqV/A==}
tailwindcss@3.4.19:
resolution: {integrity: sha512-3ofp+LL8E+pK/JuPLPggVAIaEuhvIz4qNcf3nA1Xn2o/7fb7s/TYpHhwGDv1ZU3PkBluUVaF8PyCHcm48cKLWQ==}
engines: {node: '>=14.0.0'}
@@ -7305,10 +7308,6 @@ packages:
resolution: {integrity: sha512-A5F0cM6+mDleacLIEUkmfpkBbnHJFV1d2rprHU2MXNk7mlxHq2zGojA+SRvQD1RoMo9gqjZPWEaKG4v1BQ48lw==}
engines: {node: ^20.19.0 || ^22.13.0 || >=24}
totalist@3.0.1:
resolution: {integrity: sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==}
engines: {node: '>=6'}
tough-cookie@6.0.0:
resolution: {integrity: sha512-kXuRi1mtaKMrsLUxz3sQYvVl37B0Ns6MzfrtV5DvJceE9bPyspOqk9xxv7XbZWcfLWbFmm997vl83qUWVJA64w==}
engines: {node: '>=16'}
@@ -7456,10 +7455,6 @@ packages:
unpic@4.2.2:
resolution: {integrity: sha512-z6T2ScMgRV2y2H8MwwhY5xHZWXhUx/YxtOCGJwfURSl7ypVy4HpLIMWoIZKnnxQa/RKzM0kg8hUh0paIrpLfvw==}
unplugin-utils@0.3.1:
resolution: {integrity: sha512-5lWVjgi6vuHhJ526bI4nlCOmkCIF3nnfXkCMDeMJrtdvxTs6ZFCM8oNufGTsDbKv/tJ/xj8RpvXjRuPBZJuJog==}
engines: {node: '>=20.19.0'}
unplugin@2.1.0:
resolution: {integrity: sha512-us4j03/499KhbGP8BU7Hrzrgseo+KdfJYWcbcajCOqsAyb8Gk0Yn2kiUIcZISYCb1JFaZfIuG3b42HmguVOKCQ==}
engines: {node: '>=18.12.0'}
@@ -7566,8 +7561,8 @@ packages:
vfile@6.0.3:
resolution: {integrity: sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==}
vinext@https://pkg.pr.new/hyoban/vinext@556a6d6:
resolution: {integrity: sha512-Sz8RkTDsY6cnGrevlQi4nXgahu8okEGsdKY5m31d/L9tXo35bNETMHfVee5gaI2UKZS9LMcffWaTOxxINUgogQ==, tarball: https://pkg.pr.new/hyoban/vinext@556a6d6}
vinext@https://pkg.pr.new/hyoban/vinext@a30ba79:
resolution: {integrity: sha512-yx/gCneOli5eGTrLUq6/M7A6DGQs14qOJW/Xp9RN6sTI0mErKyWWjO5E7FZT98BJbqH5xzI5nk8EOCLF3bojkA==, tarball: https://pkg.pr.new/hyoban/vinext@a30ba79}
version: 0.0.5
engines: {node: '>=22'}
hasBin: true
@@ -7576,32 +7571,12 @@ packages:
react-dom: '>=19.2.0'
vite: ^7.0.0
vite-dev-rpc@1.1.0:
resolution: {integrity: sha512-pKXZlgoXGoE8sEKiKJSng4hI1sQ4wi5YT24FCrwrLt6opmkjlqPPVmiPWWJn8M8byMxRGzp1CrFuqQs4M/Z39A==}
peerDependencies:
vite: ^2.9.0 || ^3.0.0-0 || ^4.0.0-0 || ^5.0.0-0 || ^6.0.1 || ^7.0.0-0
vite-hot-client@2.1.0:
resolution: {integrity: sha512-7SpgZmU7R+dDnSmvXE1mfDtnHLHQSisdySVR7lO8ceAXvM0otZeuQQ6C8LrS5d/aYyP/QZ0hI0L+dIPrm4YlFQ==}
peerDependencies:
vite: ^2.6.0 || ^3.0.0 || ^4.0.0 || ^5.0.0-0 || ^6.0.0-0 || ^7.0.0-0
vite-plugin-commonjs@0.10.4:
resolution: {integrity: sha512-eWQuvQKCcx0QYB5e5xfxBNjQKyrjEWZIR9UOkOV6JAgxVhtbZvCOF+FNC2ZijBJ3U3Px04ZMMyyMyFBVWIJ5+g==}
vite-plugin-dynamic-import@1.6.0:
resolution: {integrity: sha512-TM0sz70wfzTIo9YCxVFwS8OA9lNREsh+0vMHGSkWDTZ7bgd1Yjs5RV8EgB634l/91IsXJReg0xtmuQqP0mf+rg==}
vite-plugin-inspect@11.3.3:
resolution: {integrity: sha512-u2eV5La99oHoYPHE6UvbwgEqKKOQGz86wMg40CCosP6q8BkB6e5xPneZfYagK4ojPJSj5anHCrnvC20DpwVdRA==}
engines: {node: '>=14'}
peerDependencies:
'@nuxt/kit': '*'
vite: ^6.0.0 || ^7.0.0-0
peerDependenciesMeta:
'@nuxt/kit':
optional: true
vite-plugin-storybook-nextjs@3.2.2:
resolution: {integrity: sha512-ZJXCrhi9mW4jEJTKhJ5sUtpBe84mylU40me2aMuLSgIJo4gE/Rc559hZvMYLFTWta1gX7Rm8Co5EEHakPct+wA==}
peerDependencies:
@@ -9751,9 +9726,14 @@ snapshots:
'@parcel/watcher-win32-x64': 2.5.6
optional: true
'@pkgr/core@0.2.9': {}
'@pivanov/utils@0.0.2(react-dom@19.2.4(react@19.2.4))(react@19.2.4)':
dependencies:
react: 19.2.4
react-dom: 19.2.4(react@19.2.4)
'@pkgjs/parseargs@0.11.0':
optional: true
'@polka/url@1.0.0-next.29': {}
'@pkgr/core@0.2.9': {}
'@preact/signals-core@1.12.2': {}
@@ -10397,6 +10377,15 @@ snapshots:
transitivePeerDependencies:
- supports-color
'@streamdown/math@1.0.2(react@19.2.4)':
dependencies:
katex: 0.16.33
react: 19.2.4
rehype-katex: 7.0.1
remark-math: 6.0.0
transitivePeerDependencies:
- supports-color
'@stylistic/eslint-plugin@https://pkg.pr.new/@stylistic/eslint-plugin@258f9d8(eslint@10.0.2(jiti@1.21.7))':
dependencies:
'@eslint-community/eslint-utils': 4.9.1(eslint@10.0.2(jiti@1.21.7))
@@ -11571,8 +11560,6 @@ snapshots:
birecord@0.1.1: {}
birpc@2.9.0: {}
bl@4.1.0:
dependencies:
buffer: 5.7.1
@@ -12277,8 +12264,6 @@ snapshots:
environment@1.1.0: {}
error-stack-parser-es@1.0.5: {}
es-module-lexer@1.7.0: {}
es-module-lexer@2.0.0: {}
@@ -13135,6 +13120,12 @@ snapshots:
web-namespaces: 2.0.1
zwitch: 2.0.4
hast-util-sanitize@5.0.2:
dependencies:
'@types/hast': 3.0.4
'@ungap/structured-clone': 1.3.0
unist-util-position: 5.0.0
hast-util-to-estree@3.1.3:
dependencies:
'@types/estree': 1.0.8
@@ -13540,6 +13531,10 @@ snapshots:
dependencies:
commander: 8.3.0
katex@0.16.33:
dependencies:
commander: 8.3.0
keyv@4.5.4:
dependencies:
json-buffer: 3.0.1
@@ -13760,6 +13755,8 @@ snapshots:
marked@15.0.12: {}
marked@17.0.3: {}
mdast-util-find-and-replace@3.0.2:
dependencies:
'@types/mdast': 4.0.4
@@ -14342,8 +14339,6 @@ snapshots:
dependencies:
color-name: 1.1.4
mrmime@2.0.1: {}
ms@2.1.3: {}
mz@2.7.0:
@@ -14440,8 +14435,6 @@ snapshots:
obug@2.1.1: {}
ohash@2.0.11: {}
once@1.4.0:
dependencies:
wrappy: 1.0.2
@@ -14595,8 +14588,6 @@ snapshots:
pend@1.2.0: {}
perfect-debounce@2.1.0: {}
periscopic@4.0.2:
dependencies:
'@types/estree': 1.0.8
@@ -14862,24 +14853,6 @@ snapshots:
react-is@17.0.2: {}
react-markdown@9.1.0(@types/react@19.2.9)(react@19.2.4):
dependencies:
'@types/hast': 3.0.4
'@types/mdast': 4.0.4
'@types/react': 19.2.9
devlop: 1.1.0
hast-util-to-jsx-runtime: 2.3.6
html-url-attributes: 3.0.1
mdast-util-to-hast: 13.2.1
react: 19.2.4
remark-parse: 11.0.0
remark-rehype: 11.1.2
unified: 11.0.5
unist-util-visit: 5.1.0
vfile: 6.0.3
transitivePeerDependencies:
- supports-color
react-multi-email@1.0.25(react-dom@19.2.4(react@19.2.4))(react@19.2.4):
dependencies:
react: 19.2.4
@@ -15104,6 +15077,10 @@ snapshots:
dependencies:
jsesc: 3.1.0
rehype-harden@1.1.8:
dependencies:
unist-util-visit: 5.1.0
rehype-katex@7.0.1:
dependencies:
'@types/hast': 3.0.4
@@ -15128,6 +15105,11 @@ snapshots:
transitivePeerDependencies:
- supports-color
rehype-sanitize@6.0.0:
dependencies:
'@types/hast': 3.0.4
hast-util-sanitize: 5.0.2
remark-breaks@4.0.0:
dependencies:
'@types/mdast': 4.0.4
@@ -15184,6 +15166,8 @@ snapshots:
mdast-util-to-markdown: 2.1.2
unified: 11.0.5
remend@1.2.1: {}
require-from-string@2.0.2: {}
reselect@5.1.1: {}
@@ -15429,12 +15413,6 @@ snapshots:
dependencies:
is-arrayish: 0.3.4
sirv@3.0.2:
dependencies:
'@polka/url': 1.0.0-next.29
mrmime: 2.0.1
totalist: 3.0.1
sisteransi@1.0.5: {}
size-sensor@1.0.3: {}
@@ -15512,6 +15490,28 @@ snapshots:
- react-dom
- utf-8-validate
streamdown@2.3.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4):
dependencies:
clsx: 2.1.1
hast-util-to-jsx-runtime: 2.3.6
html-url-attributes: 3.0.1
marked: 17.0.3
react: 19.2.4
react-dom: 19.2.4(react@19.2.4)
rehype-harden: 1.1.8
rehype-raw: 7.0.0
rehype-sanitize: 6.0.0
remark-gfm: 4.0.1
remark-parse: 11.0.0
remark-rehype: 11.1.2
remend: 1.2.1
tailwind-merge: 3.5.0
unified: 11.0.5
unist-util-visit: 5.1.0
unist-util-visit-parents: 6.0.2
transitivePeerDependencies:
- supports-color
strict-event-emitter@0.5.1: {}
string-argv@0.3.2: {}
@@ -15626,6 +15626,8 @@ snapshots:
tailwind-merge@2.6.1: {}
tailwind-merge@3.5.0: {}
tailwindcss@3.4.19(tsx@4.21.0)(yaml@2.8.2):
dependencies:
'@alloc/quick-lru': 5.2.0
@@ -15750,8 +15752,6 @@ snapshots:
dependencies:
eslint-visitor-keys: 5.0.0
totalist@3.0.1: {}
tough-cookie@6.0.0:
dependencies:
tldts: 7.0.17
@@ -15896,11 +15896,6 @@ snapshots:
unpic@4.2.2: {}
unplugin-utils@0.3.1:
dependencies:
pathe: 2.0.3
picomatch: 4.0.3
unplugin@2.1.0:
dependencies:
acorn: 8.16.0
@@ -15994,7 +15989,7 @@ snapshots:
'@types/unist': 3.0.3
vfile-message: 4.0.3
vinext@https://pkg.pr.new/hyoban/vinext@556a6d6(next@16.1.5(@babel/core@7.29.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(sass@1.93.2))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(typescript@5.9.3)(vite@8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2))(webpack@5.104.1(esbuild@0.27.2)(uglify-js@3.19.3)):
vinext@https://pkg.pr.new/hyoban/vinext@a30ba79(next@16.1.5(@babel/core@7.29.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(sass@1.93.2))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(typescript@5.9.3)(vite@8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2))(webpack@5.104.1(esbuild@0.27.2)(uglify-js@3.19.3)):
dependencies:
'@unpic/react': 1.0.2(next@16.1.5(@babel/core@7.29.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(sass@1.93.2))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
'@vercel/og': 0.8.6
@@ -16013,16 +16008,6 @@ snapshots:
- typescript
- webpack
vite-dev-rpc@1.1.0(vite@8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2)):
dependencies:
birpc: 2.9.0
vite: 8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2)
vite-hot-client: 2.1.0(vite@8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2))
vite-hot-client@2.1.0(vite@8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2)):
dependencies:
vite: 8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2)
vite-plugin-commonjs@0.10.4:
dependencies:
acorn: 8.16.0
@@ -16036,21 +16021,6 @@ snapshots:
fast-glob: 3.3.3
magic-string: 0.30.21
vite-plugin-inspect@11.3.3(vite@8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2)):
dependencies:
ansis: 4.2.0
debug: 4.4.3
error-stack-parser-es: 1.0.5
ohash: 2.0.11
open: 10.2.0
perfect-debounce: 2.1.0
sirv: 3.0.2
unplugin-utils: 0.3.1
vite: 8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2)
vite-dev-rpc: 1.1.0(vite@8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2))
transitivePeerDependencies:
- supports-color
vite-plugin-storybook-nextjs@3.2.2(next@16.1.5(@babel/core@7.29.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(sass@1.93.2))(storybook@10.2.13(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(typescript@5.9.3)(vite@8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2)):
dependencies:
'@next/env': 16.0.0

View File

@@ -6,6 +6,7 @@ const config = {
'./app/**/*.{js,ts,jsx,tsx}',
'./components/**/*.{js,ts,jsx,tsx}',
'./context/**/*.{js,ts,jsx,tsx}',
'./node_modules/streamdown/dist/*.js',
],
...commonConfig,
}

View File

@@ -6,7 +6,6 @@ import react from '@vitejs/plugin-react'
import { codeInspectorPlugin } from 'code-inspector-plugin'
import vinext from 'vinext'
import { defineConfig } from 'vite'
import Inspect from 'vite-plugin-inspect'
import tsconfigPaths from 'vite-tsconfig-paths'
const __dirname = path.dirname(fileURLToPath(import.meta.url))
@@ -71,93 +70,6 @@ const createForceInspectorClientInjectionPlugin = (): Plugin => {
}
}
function customI18nHmrPlugin(): Plugin {
const injectTarget = inspectorInjectTarget
const i18nHmrClientMarker = 'custom-i18n-hmr-client'
const i18nHmrClientSnippet = `/* ${i18nHmrClientMarker} */
if (import.meta.hot) {
const getI18nUpdateTarget = (file) => {
const match = file.match(/[/\\\\]i18n[/\\\\]([^/\\\\]+)[/\\\\]([^/\\\\]+)\\.json$/)
if (!match)
return null
const [, locale, namespaceFile] = match
return { locale, namespaceFile }
}
import.meta.hot.on('i18n-update', async ({ file, content }) => {
const target = getI18nUpdateTarget(file)
if (!target)
return
const [{ getI18n }, { camelCase }] = await Promise.all([
import('react-i18next'),
import('es-toolkit/string'),
])
const i18n = getI18n()
if (!i18n)
return
if (target.locale !== i18n.language)
return
let resources
try {
resources = JSON.parse(content)
}
catch {
return
}
const namespace = camelCase(target.namespaceFile)
i18n.addResourceBundle(target.locale, namespace, resources, true, true)
i18n.emit('languageChanged', i18n.language)
})
}
`
const injectI18nHmrClient = (code: string) => {
if (code.includes(i18nHmrClientMarker))
return code
const useClientMatch = code.match(/(['"])use client\1;?\s*\n/)
if (!useClientMatch)
return `${i18nHmrClientSnippet}\n${code}`
const insertAt = (useClientMatch.index ?? 0) + useClientMatch[0].length
return `${code.slice(0, insertAt)}\n${i18nHmrClientSnippet}\n${code.slice(insertAt)}`
}
return {
name: 'custom-i18n-hmr',
apply: 'serve',
handleHotUpdate({ file, server }) {
if (file.endsWith('.json') && file.includes('/i18n/')) {
server.ws.send({
type: 'custom',
event: 'i18n-update',
data: {
file,
content: fs.readFileSync(file, 'utf-8'),
},
})
// return empty array to prevent the default HMR
return []
}
},
transform(code, id) {
const cleanId = normalizeInspectorModuleId(id)
if (cleanId !== injectTarget)
return null
const nextCode = injectI18nHmrClient(code)
if (nextCode === code)
return null
return { code: nextCode, map: null }
},
}
}
export default defineConfig(({ mode }) => {
const isTest = mode === 'test'
@@ -177,12 +89,9 @@ export default defineConfig(({ mode }) => {
} as Plugin,
]
: [
Inspect(),
createCodeInspectorPlugin(),
createForceInspectorClientInjectionPlugin(),
react(),
vinext(),
customI18nHmrPlugin(),
],
resolve: {
alias: {