Compare commits

..

2 Commits

Author SHA1 Message Date
hj24
b4414901d1 fix: add random sleep to reduce db IOPS 2026-02-05 22:12:38 +08:00
hj24
34caf19f5b chore: add performance logs 2026-02-05 16:10:15 +08:00
26 changed files with 676 additions and 1196 deletions

View File

@@ -79,6 +79,29 @@ jobs:
find . -name "*.py" -type f -exec sed -i.bak -E 's/"([^"]+)" \| None/Optional["\1"]/g; s/'"'"'([^'"'"']+)'"'"' \| None/Optional['"'"'\1'"'"']/g' {} \;
find . -name "*.py.bak" -type f -delete
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
package_json_file: web/package.json
run_install: false
- name: Setup Node.js
uses: actions/setup-node@v6
with:
node-version: 24
cache: pnpm
cache-dependency-path: ./web/pnpm-lock.yaml
- name: Install web dependencies
run: |
cd web
pnpm install --frozen-lockfile
- name: ESLint autofix
run: |
cd web
pnpm lint:fix || true
# mdformat breaks YAML front matter in markdown files. Add --exclude for directories containing YAML front matter.
- name: mdformat
run: |

View File

@@ -715,5 +715,6 @@ ANNOTATION_IMPORT_MAX_CONCURRENT=5
# Sandbox expired records clean configuration
SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD=21
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE=1000
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL=200
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS=30
SANDBOX_EXPIRED_RECORDS_CLEAN_TASK_LOCK_TTL=90000

View File

@@ -1309,6 +1309,10 @@ class SandboxExpiredRecordsCleanConfig(BaseSettings):
description="Maximum number of records to process in each batch",
default=1000,
)
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL: PositiveInt = Field(
description="Maximum interval in milliseconds between batches",
default=200,
)
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS: PositiveInt = Field(
description="Retention days for sandbox expired workflow_run records and message records",
default=30,

View File

@@ -1,4 +1,3 @@
import logging
import uuid
from datetime import datetime
from typing import Any, Literal, TypeAlias
@@ -55,8 +54,6 @@ ALLOW_CREATE_APP_MODES = ["chat", "agent-chat", "advanced-chat", "workflow", "co
register_enum_models(console_ns, IconType)
_logger = logging.getLogger(__name__)
class AppListQuery(BaseModel):
page: int = Field(default=1, ge=1, le=99999, description="Page number (1-99999)")
@@ -502,7 +499,6 @@ class AppListApi(Resource):
select(Workflow).where(
Workflow.version == Workflow.VERSION_DRAFT,
Workflow.app_id.in_(workflow_capable_app_ids),
Workflow.tenant_id == current_tenant_id,
)
)
.scalars()
@@ -514,14 +510,12 @@ class AppListApi(Resource):
NodeType.TRIGGER_PLUGIN,
}
for workflow in draft_workflows:
node_id = None
try:
for node_id, node_data in workflow.walk_nodes():
for _, node_data in workflow.walk_nodes():
if node_data.get("type") in trigger_node_types:
draft_trigger_app_ids.add(str(workflow.app_id))
break
except Exception:
_logger.exception("error while walking nodes, workflow_id=%s, node_id=%s", workflow.id, node_id)
continue
for app in app_pagination.items:

View File

@@ -6,8 +6,7 @@ from yarl import URL
from configs import dify_config
from core.helper.download import download_with_size_limit
from core.plugin.entities.marketplace import MarketplacePluginDeclaration, MarketplacePluginSnapshot
from extensions.ext_redis import redis_client
from core.plugin.entities.marketplace import MarketplacePluginDeclaration
marketplace_api_url = URL(str(dify_config.MARKETPLACE_API_URL))
logger = logging.getLogger(__name__)
@@ -44,37 +43,28 @@ def batch_fetch_plugin_by_ids(plugin_ids: list[str]) -> list[dict]:
return data.get("data", {}).get("plugins", [])
def batch_fetch_plugin_manifests_ignore_deserialization_error(
plugin_ids: list[str],
) -> Sequence[MarketplacePluginDeclaration]:
if len(plugin_ids) == 0:
return []
url = str(marketplace_api_url / "api/v1/plugins/batch")
response = httpx.post(url, json={"plugin_ids": plugin_ids}, headers={"X-Dify-Version": dify_config.project.version})
response.raise_for_status()
result: list[MarketplacePluginDeclaration] = []
for plugin in response.json()["data"]["plugins"]:
try:
result.append(MarketplacePluginDeclaration.model_validate(plugin))
except Exception:
logger.exception(
"Failed to deserialize marketplace plugin manifest for %s", plugin.get("plugin_id", "unknown")
)
return result
def record_install_plugin_event(plugin_unique_identifier: str):
url = str(marketplace_api_url / "api/v1/stats/plugins/install_count")
response = httpx.post(url, json={"unique_identifier": plugin_unique_identifier})
response.raise_for_status()
def fetch_global_plugin_manifest(cache_key_prefix: str, cache_ttl: int) -> None:
"""
Fetch all plugin manifests from marketplace and cache them in Redis.
This should be called once per check cycle to populate the instance-level cache.
Args:
cache_key_prefix: Redis key prefix for caching plugin manifests
cache_ttl: Cache TTL in seconds
Raises:
httpx.HTTPError: If the HTTP request fails
Exception: If any other error occurs during fetching or caching
"""
url = str(marketplace_api_url / "api/v1/dist/plugins/manifest.json")
response = httpx.get(url, headers={"X-Dify-Version": dify_config.project.version}, timeout=30)
response.raise_for_status()
raw_json = response.json()
plugins_data = raw_json.get("plugins", [])
# Parse and cache all plugin snapshots
for plugin_data in plugins_data:
plugin_snapshot = MarketplacePluginSnapshot.model_validate(plugin_data)
redis_client.setex(
name=f"{cache_key_prefix}{plugin_snapshot.plugin_id}",
time=cache_ttl,
value=plugin_snapshot.model_dump_json(),
)

View File

@@ -1,4 +1,4 @@
from pydantic import BaseModel, Field, computed_field, model_validator
from pydantic import BaseModel, Field, model_validator
from core.model_runtime.entities.provider_entities import ProviderEntity
from core.plugin.entities.endpoint import EndpointProviderDeclaration
@@ -48,15 +48,3 @@ class MarketplacePluginDeclaration(BaseModel):
if "tool" in data and not data["tool"]:
del data["tool"]
return data
class MarketplacePluginSnapshot(BaseModel):
org: str
name: str
latest_version: str
latest_package_identifier: str
latest_package_url: str
@computed_field
def plugin_id(self) -> str:
return f"{self.org}/{self.name}"

View File

@@ -112,7 +112,7 @@ class ArrayBooleanVariable(ArrayBooleanSegment, ArrayVariable):
class RAGPipelineVariable(BaseModel):
belong_to_node_id: str = Field(description="belong to which node id, shared means public")
type: str = Field(description="variable type, text-input, paragraph, select, number, file, file-list")
type: str = Field(description="variable type, text-input, paragraph, select, number, file, file-list")
label: str = Field(description="label")
description: str | None = Field(description="description", default="")
variable: str = Field(description="variable key", default="")

View File

@@ -10,10 +10,6 @@ import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '7df29de0f6be'
down_revision = '03ea244985ce'
@@ -23,31 +19,16 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('tenant_credit_pools',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('pool_type', sa.String(length=40), server_default='trial', nullable=False),
sa.Column('quota_limit', sa.BigInteger(), nullable=False),
sa.Column('quota_used', sa.BigInteger(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_credit_pool_pkey')
)
else:
# For MySQL and other databases, UUID should be generated at application level
op.create_table('tenant_credit_pools',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('pool_type', sa.String(length=40), server_default='trial', nullable=False),
sa.Column('quota_limit', sa.BigInteger(), nullable=False),
sa.Column('quota_used', sa.BigInteger(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_credit_pool_pkey')
)
op.create_table('tenant_credit_pools',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('pool_type', sa.String(length=40), server_default='trial', nullable=False),
sa.Column('quota_limit', sa.BigInteger(), nullable=False),
sa.Column('quota_used', sa.BigInteger(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_credit_pool_pkey')
)
with op.batch_alter_table('tenant_credit_pools', schema=None) as batch_op:
batch_op.create_index('tenant_credit_pool_pool_type_idx', ['pool_type'], unique=False)
batch_op.create_index('tenant_credit_pool_tenant_id_idx', ['tenant_id'], unique=False)

View File

@@ -2166,9 +2166,7 @@ class TenantCreditPool(TypeBase):
sa.Index("tenant_credit_pool_pool_type_idx", "pool_type"),
)
id: Mapped[str] = mapped_column(
StringUUID, insert_default=lambda: str(uuid4()), default_factory=lambda: str(uuid4()), init=False
)
id: Mapped[str] = mapped_column(StringUUID, primary_key=True, server_default=text("uuid_generate_v4()"), init=False)
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
pool_type: Mapped[str] = mapped_column(String(40), nullable=False, default="trial", server_default="trial")
quota_limit: Mapped[int] = mapped_column(BigInteger, nullable=False, default=0)

View File

@@ -16,6 +16,7 @@ class SavedMessage(TypeBase):
__table_args__ = (
sa.PrimaryKeyConstraint("id", name="saved_message_pkey"),
sa.Index("saved_message_message_idx", "app_id", "message_id", "created_by_role", "created_by"),
sa.Index("saved_message_message_id_idx", "message_id"),
)
id: Mapped[str] = mapped_column(

View File

@@ -1,24 +1,16 @@
import logging
import math
import time
import click
import app
from core.helper.marketplace import fetch_global_plugin_manifest
from extensions.ext_database import db
from models.account import TenantPluginAutoUpgradeStrategy
from tasks import process_tenant_plugin_autoupgrade_check_task as check_task
logger = logging.getLogger(__name__)
AUTO_UPGRADE_MINIMAL_CHECKING_INTERVAL = 15 * 60 # 15 minutes
MAX_CONCURRENT_CHECK_TASKS = 20
# Import cache constants from the task module
CACHE_REDIS_KEY_PREFIX = check_task.CACHE_REDIS_KEY_PREFIX
CACHE_REDIS_TTL = check_task.CACHE_REDIS_TTL
@app.celery.task(queue="plugin")
def check_upgradable_plugin_task():
@@ -48,22 +40,6 @@ def check_upgradable_plugin_task():
) # make sure all strategies are checked in this interval
batch_interval_time = (AUTO_UPGRADE_MINIMAL_CHECKING_INTERVAL / batch_chunk_count) if batch_chunk_count > 0 else 0
if total_strategies == 0:
click.echo(click.style("no strategies to process, skipping plugin manifest fetch.", fg="green"))
return
# Fetch and cache all plugin manifests before processing tenants
# This reduces load on marketplace from 300k requests to 1 request per check cycle
logger.info("fetching global plugin manifest from marketplace")
try:
fetch_global_plugin_manifest(CACHE_REDIS_KEY_PREFIX, CACHE_REDIS_TTL)
logger.info("successfully fetched and cached global plugin manifest")
except Exception as e:
logger.exception("failed to fetch global plugin manifest")
click.echo(click.style(f"failed to fetch global plugin manifest: {e}", fg="red"))
click.echo(click.style("skipping plugin upgrade check for this cycle", fg="yellow"))
return
for i in range(0, total_strategies, MAX_CONCURRENT_CHECK_TASKS):
batch_strategies = strategies[i : i + MAX_CONCURRENT_CHECK_TASKS]
for strategy in batch_strategies:

View File

@@ -1,6 +1,8 @@
import datetime
import logging
import os
import random
import time
from collections.abc import Sequence
from typing import cast
@@ -193,11 +195,15 @@ class MessagesCleanService:
self._end_before,
)
max_batch_interval_ms = int(os.environ.get("SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL", 200))
while True:
stats["batches"] += 1
batch_start = time.monotonic()
# Step 1: Fetch a batch of messages using cursor
with Session(db.engine, expire_on_commit=False) as session:
fetch_messages_start = time.monotonic()
msg_stmt = (
select(Message.id, Message.app_id, Message.created_at)
.where(Message.created_at < self._end_before)
@@ -223,6 +229,12 @@ class MessagesCleanService:
SimpleMessage(id=msg_id, app_id=app_id, created_at=msg_created_at)
for msg_id, app_id, msg_created_at in raw_messages
]
logger.info(
"clean_messages (batch %s): fetched %s messages in %sms",
stats["batches"],
len(messages),
int((time.monotonic() - fetch_messages_start) * 1000),
)
# Track total messages fetched across all batches
stats["total_messages"] += len(messages)
@@ -241,8 +253,16 @@ class MessagesCleanService:
logger.info("clean_messages (batch %s): no app_ids found, skip", stats["batches"])
continue
fetch_apps_start = time.monotonic()
app_stmt = select(App.id, App.tenant_id).where(App.id.in_(app_ids))
apps = list(session.execute(app_stmt).all())
logger.info(
"clean_messages (batch %s): fetched %s apps for %s app_ids in %sms",
stats["batches"],
len(apps),
len(app_ids),
int((time.monotonic() - fetch_apps_start) * 1000),
)
if not apps:
logger.info("clean_messages (batch %s): no apps found, skip", stats["batches"])
@@ -252,7 +272,15 @@ class MessagesCleanService:
app_to_tenant: dict[str, str] = {app.id: app.tenant_id for app in apps}
# Step 3: Delegate to policy to determine which messages to delete
policy_start = time.monotonic()
message_ids_to_delete = self._policy.filter_message_ids(messages, app_to_tenant)
logger.info(
"clean_messages (batch %s): policy selected %s/%s messages in %sms",
stats["batches"],
len(message_ids_to_delete),
len(messages),
int((time.monotonic() - policy_start) * 1000),
)
if not message_ids_to_delete:
logger.info("clean_messages (batch %s): no messages to delete, skip", stats["batches"])
@@ -263,14 +291,20 @@ class MessagesCleanService:
# Step 4: Batch delete messages and their relations
if not self._dry_run:
with Session(db.engine, expire_on_commit=False) as session:
delete_relations_start = time.monotonic()
# Delete related records first
self._batch_delete_message_relations(session, message_ids_to_delete)
delete_relations_ms = int((time.monotonic() - delete_relations_start) * 1000)
# Delete messages
delete_messages_start = time.monotonic()
delete_stmt = delete(Message).where(Message.id.in_(message_ids_to_delete))
delete_result = cast(CursorResult, session.execute(delete_stmt))
messages_deleted = delete_result.rowcount
delete_messages_ms = int((time.monotonic() - delete_messages_start) * 1000)
commit_start = time.monotonic()
session.commit()
commit_ms = int((time.monotonic() - commit_start) * 1000)
stats["total_deleted"] += messages_deleted
@@ -280,6 +314,19 @@ class MessagesCleanService:
len(messages),
messages_deleted,
)
logger.info(
"clean_messages (batch %s): relations %sms, messages %sms, commit %sms, batch total %sms",
stats["batches"],
delete_relations_ms,
delete_messages_ms,
commit_ms,
int((time.monotonic() - batch_start) * 1000),
)
# Random sleep between batches to avoid overwhelming the database
sleep_ms = random.uniform(0, max_batch_interval_ms) # noqa: S311
logger.info("clean_messages (batch %s): sleeping for %.2fms", stats["batches"], sleep_ms)
time.sleep(sleep_ms / 1000)
else:
# Log random sample of message IDs that would be deleted (up to 10)
sample_size = min(10, len(message_ids_to_delete))

View File

@@ -6,8 +6,8 @@ import typing
import click
from celery import shared_task
from core.helper.marketplace import record_install_plugin_event
from core.plugin.entities.marketplace import MarketplacePluginSnapshot
from core.helper import marketplace
from core.helper.marketplace import MarketplacePluginDeclaration
from core.plugin.entities.plugin import PluginInstallationSource
from core.plugin.impl.plugin import PluginInstaller
from extensions.ext_redis import redis_client
@@ -16,7 +16,7 @@ from models.account import TenantPluginAutoUpgradeStrategy
logger = logging.getLogger(__name__)
RETRY_TIMES_OF_ONE_PLUGIN_IN_ONE_TENANT = 3
CACHE_REDIS_KEY_PREFIX = "plugin_autoupgrade_check_task:cached_plugin_snapshot:"
CACHE_REDIS_KEY_PREFIX = "plugin_autoupgrade_check_task:cached_plugin_manifests:"
CACHE_REDIS_TTL = 60 * 60 # 1 hour
@@ -25,11 +25,11 @@ def _get_redis_cache_key(plugin_id: str) -> str:
return f"{CACHE_REDIS_KEY_PREFIX}{plugin_id}"
def _get_cached_manifest(plugin_id: str) -> typing.Union[MarketplacePluginSnapshot, None, bool]:
def _get_cached_manifest(plugin_id: str) -> typing.Union[MarketplacePluginDeclaration, None, bool]:
"""
Get cached plugin manifest from Redis.
Returns:
- MarketplacePluginSnapshot: if found in cache
- MarketplacePluginDeclaration: if found in cache
- None: if cached as not found (marketplace returned no result)
- False: if not in cache at all
"""
@@ -43,31 +43,76 @@ def _get_cached_manifest(plugin_id: str) -> typing.Union[MarketplacePluginSnapsh
if cached_json is None:
return None
return MarketplacePluginSnapshot.model_validate(cached_json)
return MarketplacePluginDeclaration.model_validate(cached_json)
except Exception:
logger.exception("Failed to get cached manifest for plugin %s", plugin_id)
return False
def _set_cached_manifest(plugin_id: str, manifest: typing.Union[MarketplacePluginDeclaration, None]) -> None:
"""
Cache plugin manifest in Redis.
Args:
plugin_id: The plugin ID
manifest: The manifest to cache, or None if not found in marketplace
"""
try:
key = _get_redis_cache_key(plugin_id)
if manifest is None:
# Cache the fact that this plugin was not found
redis_client.setex(key, CACHE_REDIS_TTL, json.dumps(None))
else:
# Cache the manifest data
redis_client.setex(key, CACHE_REDIS_TTL, manifest.model_dump_json())
except Exception:
# If Redis fails, continue without caching
# traceback.print_exc()
logger.exception("Failed to set cached manifest for plugin %s", plugin_id)
def marketplace_batch_fetch_plugin_manifests(
plugin_ids_plain_list: list[str],
) -> list[MarketplacePluginSnapshot]:
"""
Fetch plugin manifests from Redis cache only.
This function assumes fetch_global_plugin_manifest() has been called
to pre-populate the cache with all marketplace plugins.
"""
result: list[MarketplacePluginSnapshot] = []
) -> list[MarketplacePluginDeclaration]:
"""Fetch plugin manifests with Redis caching support."""
cached_manifests: dict[str, typing.Union[MarketplacePluginDeclaration, None]] = {}
not_cached_plugin_ids: list[str] = []
# Check Redis cache for each plugin
for plugin_id in plugin_ids_plain_list:
cached_result = _get_cached_manifest(plugin_id)
if not isinstance(cached_result, MarketplacePluginSnapshot):
# cached_result is False (not in cache) or None (cached as not found)
logger.warning("plugin %s not found in cache, skipping", plugin_id)
continue
if cached_result is False:
# Not in cache, need to fetch
not_cached_plugin_ids.append(plugin_id)
else:
# Either found manifest or cached as None (not found in marketplace)
# At this point, cached_result is either MarketplacePluginDeclaration or None
if isinstance(cached_result, bool):
# This should never happen due to the if condition above, but for type safety
continue
cached_manifests[plugin_id] = cached_result
result.append(cached_result)
# Fetch uncached plugins from marketplace
if not_cached_plugin_ids:
manifests = marketplace.batch_fetch_plugin_manifests_ignore_deserialization_error(not_cached_plugin_ids)
# Cache the fetched manifests
for manifest in manifests:
cached_manifests[manifest.plugin_id] = manifest
_set_cached_manifest(manifest.plugin_id, manifest)
# Cache plugins that were not found in marketplace
fetched_plugin_ids = {manifest.plugin_id for manifest in manifests}
for plugin_id in not_cached_plugin_ids:
if plugin_id not in fetched_plugin_ids:
cached_manifests[plugin_id] = None
_set_cached_manifest(plugin_id, None)
# Build result list from cached manifests
result: list[MarketplacePluginDeclaration] = []
for plugin_id in plugin_ids_plain_list:
cached_manifest: typing.Union[MarketplacePluginDeclaration, None] = cached_manifests.get(plugin_id)
if cached_manifest is not None:
result.append(cached_manifest)
return result
@@ -166,7 +211,7 @@ def process_tenant_plugin_autoupgrade_check_task(
# execute upgrade
new_unique_identifier = manifest.latest_package_identifier
record_install_plugin_event(new_unique_identifier)
marketplace.record_install_plugin_event(new_unique_identifier)
click.echo(
click.style(
f"Upgrade plugin: {original_unique_identifier} -> {new_unique_identifier}",

View File

@@ -1518,5 +1518,6 @@ AMPLITUDE_API_KEY=
# Sandbox expired records clean configuration
SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD=21
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE=1000
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL=200
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS=30
SANDBOX_EXPIRED_RECORDS_CLEAN_TASK_LOCK_TTL=90000

View File

@@ -682,6 +682,7 @@ x-shared-env: &shared-api-worker-env
AMPLITUDE_API_KEY: ${AMPLITUDE_API_KEY:-}
SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD: ${SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD:-21}
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE: ${SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE:-1000}
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL: ${SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL:-200}
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS: ${SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS:-30}
SANDBOX_EXPIRED_RECORDS_CLEAN_TASK_LOCK_TTL: ${SANDBOX_EXPIRED_RECORDS_CLEAN_TASK_LOCK_TTL:-90000}

View File

@@ -1,3 +1,4 @@
/* eslint-disable tailwindcss/classnames-order */
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import Effect from '.'
@@ -28,8 +29,8 @@ type Story = StoryObj<typeof meta>
export const Playground: Story = {
render: () => (
<div className="relative h-40 w-72 overflow-hidden rounded-2xl border border-divider-subtle bg-background-default-subtle">
<Effect className="left-8 top-6" />
<Effect className="bg-util-colors-purple-brand-purple-brand-500 right-10 top-14" />
<Effect className="top-6 left-8" />
<Effect className="top-14 right-10 bg-util-colors-purple-brand-purple-brand-500" />
<div className="absolute inset-x-0 bottom-4 flex justify-center text-xs text-text-secondary">
Accent glow
</div>

View File

@@ -14,6 +14,7 @@ const ErrorMessage = ({
errorMsg,
}: ErrorMessageProps) => {
return (
// eslint-disable-next-line tailwindcss/migration-from-tailwind-2
<div className={cn(
'flex gap-x-0.5 rounded-xl border-[0.5px] border-components-panel-border bg-opacity-40 bg-toast-error-bg p-2 shadow-xs shadow-shadow-shadow-3',
className,

View File

@@ -1,11 +1,14 @@
import { spawnSync } from 'node:child_process'
import { randomUUID } from 'node:crypto'
import { createSerwistRoute } from '@serwist/turbopack'
const basePath = process.env.NEXT_PUBLIC_BASE_PATH || ''
const revision = spawnSync('git', ['rev-parse', 'HEAD'], { encoding: 'utf-8' }).stdout?.trim() || randomUUID()
export const { dynamic, dynamicParams, revalidate, generateStaticParams, GET } = createSerwistRoute({
additionalPrecacheEntries: [{ url: `${basePath}/_offline.html`, revision }],
swSrc: 'app/sw.ts',
nextConfig: {
basePath,
},
useNativeEsbuild: true,
})

View File

@@ -3,9 +3,7 @@
/// <reference lib="webworker" />
import type { PrecacheEntry, SerwistGlobalConfig } from 'serwist'
import { defaultCache } from '@serwist/turbopack/worker'
import { Serwist } from 'serwist'
import { withLeadingSlash } from 'ufo'
import { CacheableResponsePlugin, CacheFirst, ExpirationPlugin, NetworkFirst, Serwist, StaleWhileRevalidate } from 'serwist'
declare global {
// eslint-disable-next-line ts/consistent-type-definitions
@@ -20,30 +18,78 @@ const scopePathname = new URL(self.registration.scope).pathname
const basePath = scopePathname.replace(/\/serwist\/$/, '').replace(/\/$/, '')
const offlineUrl = `${basePath}/_offline.html`
const normalizeManifestUrl = (url: string): string => {
if (url.startsWith('/serwist/'))
return url.replace(/^\/serwist\//, '/')
return withLeadingSlash(url)
}
const manifest = self.__SW_MANIFEST?.map((entry) => {
if (typeof entry === 'string')
return normalizeManifestUrl(entry)
return {
...entry,
url: normalizeManifestUrl(entry.url),
}
})
const serwist = new Serwist({
precacheEntries: manifest,
precacheEntries: self.__SW_MANIFEST,
skipWaiting: true,
disableDevLogs: true,
clientsClaim: true,
navigationPreload: true,
runtimeCaching: defaultCache,
runtimeCaching: [
{
matcher: ({ url }) => url.origin === 'https://fonts.googleapis.com',
handler: new CacheFirst({
cacheName: 'google-fonts',
plugins: [
new CacheableResponsePlugin({ statuses: [0, 200] }),
new ExpirationPlugin({
maxEntries: 4,
maxAgeSeconds: 365 * 24 * 60 * 60,
}),
],
}),
},
{
matcher: ({ url }) => url.origin === 'https://fonts.gstatic.com',
handler: new CacheFirst({
cacheName: 'google-fonts-webfonts',
plugins: [
new CacheableResponsePlugin({ statuses: [0, 200] }),
new ExpirationPlugin({
maxEntries: 4,
maxAgeSeconds: 365 * 24 * 60 * 60,
}),
],
}),
},
{
matcher: ({ request }) => request.destination === 'image',
handler: new CacheFirst({
cacheName: 'images',
plugins: [
new CacheableResponsePlugin({ statuses: [0, 200] }),
new ExpirationPlugin({
maxEntries: 64,
maxAgeSeconds: 30 * 24 * 60 * 60,
}),
],
}),
},
{
matcher: ({ request }) => request.destination === 'script' || request.destination === 'style',
handler: new StaleWhileRevalidate({
cacheName: 'static-resources',
plugins: [
new ExpirationPlugin({
maxEntries: 32,
maxAgeSeconds: 24 * 60 * 60,
}),
],
}),
},
{
matcher: ({ url, sameOrigin }) => sameOrigin && url.pathname.startsWith('/api/'),
handler: new NetworkFirst({
cacheName: 'api-cache',
networkTimeoutSeconds: 10,
plugins: [
new ExpirationPlugin({
maxEntries: 16,
maxAgeSeconds: 60 * 60,
}),
],
}),
},
],
fallbacks: {
entries: [
{

View File

@@ -38,11 +38,6 @@ pnpm lint:tss
This command lints the entire project and is intended for final verification before committing or pushing changes.
### Introducing New Plugins or Rules
If a new rule causes many existing code errors or automatic fixes generate too many diffs, do not use the `--fix` option for automatic fixes.
You can introduce the rule first, then use the `--suppress-all` option to temporarily suppress these errors, and gradually fix them in subsequent changes.
## Type Check
You should be able to see suggestions from TypeScript in your editor for all open files.

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +1,9 @@
// @ts-check
import antfu from '@antfu/eslint-config'
import pluginQuery from '@tanstack/eslint-plugin-query'
import tailwindcss from 'eslint-plugin-better-tailwindcss'
import sonar from 'eslint-plugin-sonarjs'
import storybook from 'eslint-plugin-storybook'
import tailwind from 'eslint-plugin-tailwindcss'
import dify from './eslint-rules/index.js'
export default antfu(
@@ -23,7 +23,7 @@ export default antfu(
},
},
nextjs: true,
ignores: ['public', 'types/doc-paths.ts', 'eslint-suppressions.json'],
ignores: ['public', 'types/doc-paths.ts'],
typescript: {
overrides: {
'ts/consistent-type-definitions': ['error', 'type'],
@@ -66,16 +66,42 @@ export default antfu(
sonarjs: sonar,
},
},
tailwind.configs['flat/recommended'],
{
files: ['**/*.{ts,tsx}'],
plugins: {
tailwindcss,
settings: {
tailwindcss: {
// These are the default values but feel free to customize
callees: ['classnames', 'clsx', 'ctl', 'cn', 'classNames'],
config: 'tailwind.config.js', // returned from `loadConfig()` utility if not provided
cssFiles: [
'**/*.css',
'!**/node_modules',
'!**/.*',
'!**/dist',
'!**/build',
'!**/.storybook',
'!**/.next',
'!**/.public',
],
cssFilesRefreshRate: 5_000,
removeDuplicates: true,
skipClassAttribute: false,
whitelist: [],
tags: [], // can be set to e.g. ['tw'] for use in tw`bg-blue`
classRegex: '^class(Name)?$', // can be modified to support custom attributes. E.g. "^tw$" for `twin.macro`
},
},
rules: {
'tailwindcss/enforce-consistent-class-order': 'error',
'tailwindcss/no-duplicate-classes': 'error',
'tailwindcss/no-unnecessary-whitespace': 'error',
'tailwindcss/no-unknown-classes': 'warn',
// due to 1k lines of tailwind config, these rule have performance issue
'tailwindcss/no-contradicting-classname': 'off',
'tailwindcss/enforces-shorthand': 'off',
'tailwindcss/no-custom-classname': 'off',
'tailwindcss/no-unnecessary-arbitrary-value': 'off',
'tailwindcss/no-arbitrary-value': 'off',
'tailwindcss/classnames-order': 'warn',
'tailwindcss/enforces-negative-arbitrary-values': 'warn',
'tailwindcss/migration-from-tailwind-2': 'warn',
},
},
{

View File

@@ -29,7 +29,7 @@ const remoteImageURLs = ([hasSetWebPrefix ? new URL(`${process.env.NEXT_PUBLIC_W
const nextConfig: NextConfig = {
basePath: process.env.NEXT_PUBLIC_BASE_PATH || '',
serverExternalPackages: ['esbuild'],
serverExternalPackages: ['esbuild-wasm'],
transpilePackages: ['echarts', 'zrender'],
turbopack: {
rules: codeInspectorPlugin({

View File

@@ -154,9 +154,8 @@
"sharp": "0.33.5",
"sortablejs": "1.15.6",
"string-ts": "2.3.1",
"tailwind-merge": "2.6.1",
"tailwind-merge": "2.6.0",
"tldts": "7.0.17",
"ufo": "1.6.3",
"use-context-selector": "2.0.0",
"uuid": "10.0.0",
"zod": "3.25.76",
@@ -166,21 +165,21 @@
"devDependencies": {
"@antfu/eslint-config": "7.2.0",
"@chromatic-com/storybook": "5.0.0",
"@eslint-react/eslint-plugin": "2.9.4",
"@eslint-react/eslint-plugin": "2.8.1",
"@mdx-js/loader": "3.1.1",
"@mdx-js/react": "3.1.1",
"@next/bundle-analyzer": "16.1.5",
"@next/eslint-plugin-next": "16.1.6",
"@next/mdx": "16.1.5",
"@rgrove/parse-xml": "4.2.0",
"@serwist/turbopack": "9.5.4",
"@serwist/turbopack": "9.5.0",
"@storybook/addon-docs": "10.2.0",
"@storybook/addon-links": "10.2.0",
"@storybook/addon-onboarding": "10.2.0",
"@storybook/addon-themes": "10.2.0",
"@storybook/nextjs-vite": "10.2.0",
"@storybook/react": "10.2.0",
"@tanstack/eslint-plugin-query": "5.91.4",
"@tanstack/eslint-plugin-query": "5.91.3",
"@tanstack/react-devtools": "0.9.2",
"@tanstack/react-form-devtools": "0.2.12",
"@tanstack/react-query-devtools": "5.90.2",
@@ -211,13 +210,13 @@
"autoprefixer": "10.4.21",
"code-inspector-plugin": "1.3.6",
"cross-env": "10.1.0",
"esbuild": "0.27.2",
"esbuild-wasm": "0.27.2",
"eslint": "9.39.2",
"eslint-plugin-better-tailwindcss": "4.1.1",
"eslint-plugin-react-hooks": "7.0.1",
"eslint-plugin-react-refresh": "0.5.0",
"eslint-plugin-react-refresh": "0.4.26",
"eslint-plugin-sonarjs": "3.0.6",
"eslint-plugin-storybook": "10.2.6",
"eslint-plugin-storybook": "10.2.1",
"eslint-plugin-tailwindcss": "3.18.2",
"husky": "9.1.7",
"jsdom": "27.3.0",
"jsdom-testing-mocks": "1.16.0",
@@ -227,9 +226,9 @@
"postcss": "8.5.6",
"react-scan": "0.4.3",
"sass": "1.93.2",
"serwist": "9.5.4",
"serwist": "9.5.0",
"storybook": "10.2.0",
"tailwindcss": "3.4.19",
"tailwindcss": "3.4.18",
"tsx": "4.21.0",
"typescript": "5.9.3",
"uglify-js": "3.19.3",

536
web/pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,157 @@
/**
* Test suite for the classnames utility function
* This utility combines the classnames library with tailwind-merge
* to handle conditional CSS classes and merge conflicting Tailwind classes
*/
import { cn } from './classnames'
describe('classnames', () => {
/**
* Tests basic classnames library features:
* - String concatenation
* - Array handling
* - Falsy value filtering
* - Object-based conditional classes
*/
it('classnames libs feature', () => {
expect(cn('foo')).toBe('foo')
expect(cn('foo', 'bar')).toBe('foo bar')
expect(cn(['foo', 'bar'])).toBe('foo bar')
expect(cn(undefined)).toBe('')
expect(cn(null)).toBe('')
expect(cn(false)).toBe('')
expect(cn({
foo: true,
bar: false,
baz: true,
})).toBe('foo baz')
})
/**
* Tests tailwind-merge functionality:
* - Conflicting class resolution (last one wins)
* - Modifier handling (hover, focus, etc.)
* - Important prefix (!)
* - Custom color classes
* - Arbitrary values
*/
it('tailwind-merge', () => {
/* eslint-disable tailwindcss/classnames-order */
expect(cn('p-0')).toBe('p-0')
expect(cn('text-right text-center text-left')).toBe('text-left')
expect(cn('pl-4 p-8')).toBe('p-8')
expect(cn('m-[2px] m-[4px]')).toBe('m-[4px]')
expect(cn('m-1 m-[4px]')).toBe('m-[4px]')
expect(cn('overflow-x-auto hover:overflow-x-hidden overflow-x-scroll')).toBe(
'hover:overflow-x-hidden overflow-x-scroll',
)
expect(cn('h-10 h-min')).toBe('h-min')
expect(cn('bg-grey-5 bg-hotpink')).toBe('bg-hotpink')
expect(cn('hover:block hover:inline')).toBe('hover:inline')
expect(cn('font-medium !font-bold')).toBe('font-medium !font-bold')
expect(cn('!font-medium !font-bold')).toBe('!font-bold')
expect(cn('text-gray-100 text-primary-200')).toBe('text-primary-200')
expect(cn('text-some-unknown-color text-components-input-bg-disabled text-primary-200')).toBe('text-primary-200')
expect(cn('bg-some-unknown-color bg-components-input-bg-disabled bg-primary-200')).toBe('bg-primary-200')
expect(cn('border-t border-white/10')).toBe('border-t border-white/10')
expect(cn('border-t border-white')).toBe('border-t border-white')
expect(cn('text-3.5xl text-black')).toBe('text-3.5xl text-black')
})
/**
* Tests the integration of classnames and tailwind-merge:
* - Object-based conditional classes with Tailwind conflict resolution
*/
it('classnames combined with tailwind-merge', () => {
expect(cn('text-right', {
'text-center': true,
})).toBe('text-center')
expect(cn('text-right', {
'text-center': false,
})).toBe('text-right')
})
/**
* Tests handling of multiple mixed argument types:
* - Strings, arrays, and objects in a single call
* - Tailwind merge working across different argument types
*/
it('multiple mixed argument types', () => {
expect(cn('foo', ['bar', 'baz'], { qux: true, quux: false })).toBe('foo bar baz qux')
expect(cn('p-4', ['p-2', 'm-4'], { 'text-left': true, 'text-right': true })).toBe('p-2 m-4 text-right')
})
/**
* Tests nested array handling:
* - Deep array flattening
* - Tailwind merge with nested structures
*/
it('nested arrays', () => {
expect(cn(['foo', ['bar', 'baz']])).toBe('foo bar baz')
expect(cn(['p-4', ['p-2', 'text-center']])).toBe('p-2 text-center')
})
/**
* Tests empty input handling:
* - Empty strings, arrays, and objects
* - Mixed empty and non-empty values
*/
it('empty inputs', () => {
expect(cn('')).toBe('')
expect(cn([])).toBe('')
expect(cn({})).toBe('')
expect(cn('', [], {})).toBe('')
expect(cn('foo', '', 'bar')).toBe('foo bar')
})
/**
* Tests number input handling:
* - Truthy numbers converted to strings
* - Zero treated as falsy
*/
it('numbers as inputs', () => {
expect(cn(1)).toBe('1')
expect(cn(0)).toBe('')
expect(cn('foo', 1, 'bar')).toBe('foo 1 bar')
})
/**
* Tests multiple object arguments:
* - Object merging
* - Tailwind conflict resolution across objects
*/
it('multiple objects', () => {
expect(cn({ foo: true }, { bar: true })).toBe('foo bar')
expect(cn({ foo: true, bar: false }, { bar: true, baz: true })).toBe('foo bar baz')
expect(cn({ 'p-4': true }, { 'p-2': true })).toBe('p-2')
})
/**
* Tests complex edge cases:
* - Mixed falsy values
* - Nested arrays with falsy values
* - Multiple conflicting Tailwind classes
*/
it('complex edge cases', () => {
expect(cn('foo', null, undefined, false, 'bar', 0, 1, '')).toBe('foo bar 1')
expect(cn(['foo', null, ['bar', undefined, 'baz']])).toBe('foo bar baz')
expect(cn('text-sm', { 'text-lg': false, 'text-xl': true }, 'text-2xl')).toBe('text-2xl')
})
/**
* Tests important (!) modifier behavior:
* - Important modifiers in objects
* - Conflict resolution with important prefix
*/
it('important modifier with objects', () => {
expect(cn({ '!font-medium': true }, { '!font-bold': true })).toBe('!font-bold')
expect(cn('font-normal', { '!font-bold': true })).toBe('font-normal !font-bold')
})
})