Compare commits

...

12 Commits

Author SHA1 Message Date
autofix-ci[bot]
344f6be7cd [autofix.ci] apply automated fixes 2026-03-13 10:10:00 +00:00
Yanli 盐粒
f169cf8654 Merge origin/main into yanli/fix-iter-log 2026-03-13 18:07:15 +08:00
dependabot[bot]
20e91990bf chore(deps): bump orjson from 3.11.4 to 3.11.6 in /api (#33380)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-13 17:33:33 +09:00
Asuka Minato
f38e8cca52 test: [Refactor/Chore] use Testcontainers to do sql test #32454 (#32460) 2026-03-13 17:32:39 +09:00
Coding On Star
00eda73ad1 test: enforce app/components coverage gates in web tests (#33395)
Co-authored-by: CodingOnStar <hanxujiang@dify.com>
2026-03-13 16:31:05 +08:00
Ethan T.
8b40a89add fix: with_debug_recipient() silently drops debug emails when user_id is None or empty string (#33373)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Crazywoola <100913391+crazywoola@users.noreply.github.com>
2026-03-13 15:35:02 +08:00
Joel
97776eabff chore: add tracking info of in site message (#33394)
Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com>
2026-03-13 15:29:24 +08:00
Yanli 盐粒
e76fbcb045 fix: guard loop child node starts 2026-03-10 20:34:07 +08:00
盐粒 Yanli
e6f00a2bf9 Update web/app/components/workflow/utils/top-level-tracing.ts
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2026-03-10 20:13:49 +08:00
Yanli 盐粒
715f3affe5 chore: address review feedback 2026-03-10 19:31:20 +08:00
autofix-ci[bot]
4f73766a21 [autofix.ci] apply automated fixes 2026-03-10 11:18:09 +00:00
Yanli 盐粒
fe90453eed fix: preserve workflow tracing by execution id 2026-03-10 19:14:14 +08:00
25 changed files with 1629 additions and 157 deletions

View File

@@ -62,6 +62,9 @@ jobs:
needs: check-changes
if: needs.check-changes.outputs.web-changed == 'true'
uses: ./.github/workflows/web-tests.yml
with:
base_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event.before }}
head_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
style-check:
name: Style Check

View File

@@ -2,6 +2,13 @@ name: Web Tests
on:
workflow_call:
inputs:
base_sha:
required: false
type: string
head_sha:
required: false
type: string
permissions:
contents: read
@@ -14,6 +21,8 @@ jobs:
test:
name: Web Tests (${{ matrix.shardIndex }}/${{ matrix.shardTotal }})
runs-on: ubuntu-latest
env:
VITEST_COVERAGE_SCOPE: app-components
strategy:
fail-fast: false
matrix:
@@ -50,6 +59,8 @@ jobs:
if: ${{ !cancelled() }}
needs: [test]
runs-on: ubuntu-latest
env:
VITEST_COVERAGE_SCOPE: app-components
defaults:
run:
shell: bash
@@ -59,6 +70,7 @@ jobs:
- name: Checkout code
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
fetch-depth: 0
persist-credentials: false
- name: Setup web environment
@@ -74,6 +86,12 @@ jobs:
- name: Merge reports
run: pnpm vitest --merge-reports --reporter=json --reporter=agent --coverage
- name: Check app/components diff coverage
env:
BASE_SHA: ${{ inputs.base_sha }}
HEAD_SHA: ${{ inputs.head_sha }}
run: node ./scripts/check-components-diff-coverage.mjs
- name: Coverage Summary
if: always()
id: coverage-summary

View File

@@ -72,8 +72,8 @@ class EmailDeliveryConfig(BaseModel):
body: str
debug_mode: bool = False
def with_debug_recipient(self, user_id: str) -> "EmailDeliveryConfig":
if not user_id:
def with_debug_recipient(self, user_id: str | None) -> "EmailDeliveryConfig":
if user_id is None:
debug_recipients = EmailRecipients(whole_workspace=False, items=[])
return self.model_copy(update={"recipients": debug_recipients})
debug_recipients = EmailRecipients(whole_workspace=False, items=[MemberRecipient(user_id=user_id)])
@@ -141,7 +141,7 @@ def apply_debug_email_recipient(
method: DeliveryChannelConfig,
*,
enabled: bool,
user_id: str,
user_id: str | None,
) -> DeliveryChannelConfig:
if not enabled:
return method
@@ -149,7 +149,7 @@ def apply_debug_email_recipient(
return method
if not method.config.debug_mode:
return method
debug_config = method.config.with_debug_recipient(user_id or "")
debug_config = method.config.with_debug_recipient(user_id)
return method.model_copy(update={"config": debug_config})

View File

@@ -952,7 +952,7 @@ class WorkflowService:
delivery_method = apply_debug_email_recipient(
delivery_method,
enabled=True,
user_id=account.id or "",
user_id=account.id,
)
variable_pool = self._build_human_input_variable_pool(

View File

@@ -0,0 +1,32 @@
"""
Integration tests for AppModelConfig using testcontainers.
These tests validate database-backed model behavior without mocking SQLAlchemy queries.
"""
from uuid import uuid4
from sqlalchemy.orm import Session
from models.model import AppModelConfig
class TestAppModelConfig:
"""Integration tests for AppModelConfig."""
def test_annotation_reply_dict_disabled_without_setting(self, db_session_with_containers: Session) -> None:
"""Return disabled annotation reply dict when no AppAnnotationSetting exists."""
# Arrange
config = AppModelConfig(app_id=str(uuid4()))
db_session_with_containers.add(config)
db_session_with_containers.commit()
# Act
result = config.annotation_reply_dict
# Assert
assert result == {"enabled": False}
# Cleanup
db_session_with_containers.delete(config)
db_session_with_containers.commit()

64
api/uv.lock generated
View File

@@ -4461,40 +4461,40 @@ wheels = [
[[package]]
name = "orjson"
version = "3.11.4"
version = "3.11.6"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/c6/fe/ed708782d6709cc60eb4c2d8a361a440661f74134675c72990f2c48c785f/orjson-3.11.4.tar.gz", hash = "sha256:39485f4ab4c9b30a3943cfe99e1a213c4776fb69e8abd68f66b83d5a0b0fdc6d", size = 5945188, upload-time = "2025-10-24T15:50:38.027Z" }
sdist = { url = "https://files.pythonhosted.org/packages/70/a3/4e09c61a5f0c521cba0bb433639610ae037437669f1a4cbc93799e731d78/orjson-3.11.6.tar.gz", hash = "sha256:0a54c72259f35299fd033042367df781c2f66d10252955ca1efb7db309b954cb", size = 6175856, upload-time = "2026-01-29T15:13:07.942Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/63/1d/1ea6005fffb56715fd48f632611e163d1604e8316a5bad2288bee9a1c9eb/orjson-3.11.4-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:5e59d23cd93ada23ec59a96f215139753fbfe3a4d989549bcb390f8c00370b39", size = 243498, upload-time = "2025-10-24T15:48:48.101Z" },
{ url = "https://files.pythonhosted.org/packages/37/d7/ffed10c7da677f2a9da307d491b9eb1d0125b0307019c4ad3d665fd31f4f/orjson-3.11.4-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:5c3aedecfc1beb988c27c79d52ebefab93b6c3921dbec361167e6559aba2d36d", size = 128961, upload-time = "2025-10-24T15:48:49.571Z" },
{ url = "https://files.pythonhosted.org/packages/a2/96/3e4d10a18866d1368f73c8c44b7fe37cc8a15c32f2a7620be3877d4c55a3/orjson-3.11.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da9e5301f1c2caa2a9a4a303480d79c9ad73560b2e7761de742ab39fe59d9175", size = 130321, upload-time = "2025-10-24T15:48:50.713Z" },
{ url = "https://files.pythonhosted.org/packages/eb/1f/465f66e93f434f968dd74d5b623eb62c657bdba2332f5a8be9f118bb74c7/orjson-3.11.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8873812c164a90a79f65368f8f96817e59e35d0cc02786a5356f0e2abed78040", size = 129207, upload-time = "2025-10-24T15:48:52.193Z" },
{ url = "https://files.pythonhosted.org/packages/28/43/d1e94837543321c119dff277ae8e348562fe8c0fafbb648ef7cb0c67e521/orjson-3.11.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5d7feb0741ebb15204e748f26c9638e6665a5fa93c37a2c73d64f1669b0ddc63", size = 136323, upload-time = "2025-10-24T15:48:54.806Z" },
{ url = "https://files.pythonhosted.org/packages/bf/04/93303776c8890e422a5847dd012b4853cdd88206b8bbd3edc292c90102d1/orjson-3.11.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01ee5487fefee21e6910da4c2ee9eef005bee568a0879834df86f888d2ffbdd9", size = 137440, upload-time = "2025-10-24T15:48:56.326Z" },
{ url = "https://files.pythonhosted.org/packages/1e/ef/75519d039e5ae6b0f34d0336854d55544ba903e21bf56c83adc51cd8bf82/orjson-3.11.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d40d46f348c0321df01507f92b95a377240c4ec31985225a6668f10e2676f9a", size = 136680, upload-time = "2025-10-24T15:48:57.476Z" },
{ url = "https://files.pythonhosted.org/packages/b5/18/bf8581eaae0b941b44efe14fee7b7862c3382fbc9a0842132cfc7cf5ecf4/orjson-3.11.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95713e5fc8af84d8edc75b785d2386f653b63d62b16d681687746734b4dfc0be", size = 136160, upload-time = "2025-10-24T15:48:59.631Z" },
{ url = "https://files.pythonhosted.org/packages/c4/35/a6d582766d351f87fc0a22ad740a641b0a8e6fc47515e8614d2e4790ae10/orjson-3.11.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ad73ede24f9083614d6c4ca9a85fe70e33be7bf047ec586ee2363bc7418fe4d7", size = 140318, upload-time = "2025-10-24T15:49:00.834Z" },
{ url = "https://files.pythonhosted.org/packages/76/b3/5a4801803ab2e2e2d703bce1a56540d9f99a9143fbec7bf63d225044fef8/orjson-3.11.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:842289889de515421f3f224ef9c1f1efb199a32d76d8d2ca2706fa8afe749549", size = 406330, upload-time = "2025-10-24T15:49:02.327Z" },
{ url = "https://files.pythonhosted.org/packages/80/55/a8f682f64833e3a649f620eafefee175cbfeb9854fc5b710b90c3bca45df/orjson-3.11.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3b2427ed5791619851c52a1261b45c233930977e7de8cf36de05636c708fa905", size = 149580, upload-time = "2025-10-24T15:49:03.517Z" },
{ url = "https://files.pythonhosted.org/packages/ad/e4/c132fa0c67afbb3eb88274fa98df9ac1f631a675e7877037c611805a4413/orjson-3.11.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3c36e524af1d29982e9b190573677ea02781456b2e537d5840e4538a5ec41907", size = 139846, upload-time = "2025-10-24T15:49:04.761Z" },
{ url = "https://files.pythonhosted.org/packages/54/06/dc3491489efd651fef99c5908e13951abd1aead1257c67f16135f95ce209/orjson-3.11.4-cp311-cp311-win32.whl", hash = "sha256:87255b88756eab4a68ec61837ca754e5d10fa8bc47dc57f75cedfeaec358d54c", size = 135781, upload-time = "2025-10-24T15:49:05.969Z" },
{ url = "https://files.pythonhosted.org/packages/79/b7/5e5e8d77bd4ea02a6ac54c42c818afb01dd31961be8a574eb79f1d2cfb1e/orjson-3.11.4-cp311-cp311-win_amd64.whl", hash = "sha256:e2d5d5d798aba9a0e1fede8d853fa899ce2cb930ec0857365f700dffc2c7af6a", size = 131391, upload-time = "2025-10-24T15:49:07.355Z" },
{ url = "https://files.pythonhosted.org/packages/0f/dc/9484127cc1aa213be398ed735f5f270eedcb0c0977303a6f6ddc46b60204/orjson-3.11.4-cp311-cp311-win_arm64.whl", hash = "sha256:6bb6bb41b14c95d4f2702bce9975fda4516f1db48e500102fc4d8119032ff045", size = 126252, upload-time = "2025-10-24T15:49:08.869Z" },
{ url = "https://files.pythonhosted.org/packages/63/51/6b556192a04595b93e277a9ff71cd0cc06c21a7df98bcce5963fa0f5e36f/orjson-3.11.4-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:d4371de39319d05d3f482f372720b841c841b52f5385bd99c61ed69d55d9ab50", size = 243571, upload-time = "2025-10-24T15:49:10.008Z" },
{ url = "https://files.pythonhosted.org/packages/1c/2c/2602392ddf2601d538ff11848b98621cd465d1a1ceb9db9e8043181f2f7b/orjson-3.11.4-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:e41fd3b3cac850eaae78232f37325ed7d7436e11c471246b87b2cd294ec94853", size = 128891, upload-time = "2025-10-24T15:49:11.297Z" },
{ url = "https://files.pythonhosted.org/packages/4e/47/bf85dcf95f7a3a12bf223394a4f849430acd82633848d52def09fa3f46ad/orjson-3.11.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:600e0e9ca042878c7fdf189cf1b028fe2c1418cc9195f6cb9824eb6ed99cb938", size = 130137, upload-time = "2025-10-24T15:49:12.544Z" },
{ url = "https://files.pythonhosted.org/packages/b4/4d/a0cb31007f3ab6f1fd2a1b17057c7c349bc2baf8921a85c0180cc7be8011/orjson-3.11.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7bbf9b333f1568ef5da42bc96e18bf30fd7f8d54e9ae066d711056add508e415", size = 129152, upload-time = "2025-10-24T15:49:13.754Z" },
{ url = "https://files.pythonhosted.org/packages/f7/ef/2811def7ce3d8576b19e3929fff8f8f0d44bc5eb2e0fdecb2e6e6cc6c720/orjson-3.11.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4806363144bb6e7297b8e95870e78d30a649fdc4e23fc84daa80c8ebd366ce44", size = 136834, upload-time = "2025-10-24T15:49:15.307Z" },
{ url = "https://files.pythonhosted.org/packages/00/d4/9aee9e54f1809cec8ed5abd9bc31e8a9631d19460e3b8470145d25140106/orjson-3.11.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad355e8308493f527d41154e9053b86a5be892b3b359a5c6d5d95cda23601cb2", size = 137519, upload-time = "2025-10-24T15:49:16.557Z" },
{ url = "https://files.pythonhosted.org/packages/db/ea/67bfdb5465d5679e8ae8d68c11753aaf4f47e3e7264bad66dc2f2249e643/orjson-3.11.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a7517482667fb9f0ff1b2f16fe5829296ed7a655d04d68cd9711a4d8a4e708", size = 136749, upload-time = "2025-10-24T15:49:17.796Z" },
{ url = "https://files.pythonhosted.org/packages/01/7e/62517dddcfce6d53a39543cd74d0dccfcbdf53967017c58af68822100272/orjson-3.11.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97eb5942c7395a171cbfecc4ef6701fc3c403e762194683772df4c54cfbb2210", size = 136325, upload-time = "2025-10-24T15:49:19.347Z" },
{ url = "https://files.pythonhosted.org/packages/18/ae/40516739f99ab4c7ec3aaa5cc242d341fcb03a45d89edeeaabc5f69cb2cf/orjson-3.11.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:149d95d5e018bdd822e3f38c103b1a7c91f88d38a88aada5c4e9b3a73a244241", size = 140204, upload-time = "2025-10-24T15:49:20.545Z" },
{ url = "https://files.pythonhosted.org/packages/82/18/ff5734365623a8916e3a4037fcef1cd1782bfc14cf0992afe7940c5320bf/orjson-3.11.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:624f3951181eb46fc47dea3d221554e98784c823e7069edb5dbd0dc826ac909b", size = 406242, upload-time = "2025-10-24T15:49:21.884Z" },
{ url = "https://files.pythonhosted.org/packages/e1/43/96436041f0a0c8c8deca6a05ebeaf529bf1de04839f93ac5e7c479807aec/orjson-3.11.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:03bfa548cf35e3f8b3a96c4e8e41f753c686ff3d8e182ce275b1751deddab58c", size = 150013, upload-time = "2025-10-24T15:49:23.185Z" },
{ url = "https://files.pythonhosted.org/packages/1b/48/78302d98423ed8780479a1e682b9aecb869e8404545d999d34fa486e573e/orjson-3.11.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:525021896afef44a68148f6ed8a8bf8375553d6066c7f48537657f64823565b9", size = 139951, upload-time = "2025-10-24T15:49:24.428Z" },
{ url = "https://files.pythonhosted.org/packages/4a/7b/ad613fdcdaa812f075ec0875143c3d37f8654457d2af17703905425981bf/orjson-3.11.4-cp312-cp312-win32.whl", hash = "sha256:b58430396687ce0f7d9eeb3dd47761ca7d8fda8e9eb92b3077a7a353a75efefa", size = 136049, upload-time = "2025-10-24T15:49:25.973Z" },
{ url = "https://files.pythonhosted.org/packages/b9/3c/9cf47c3ff5f39b8350fb21ba65d789b6a1129d4cbb3033ba36c8a9023520/orjson-3.11.4-cp312-cp312-win_amd64.whl", hash = "sha256:c6dbf422894e1e3c80a177133c0dda260f81428f9de16d61041949f6a2e5c140", size = 131461, upload-time = "2025-10-24T15:49:27.259Z" },
{ url = "https://files.pythonhosted.org/packages/c6/3b/e2425f61e5825dc5b08c2a5a2b3af387eaaca22a12b9c8c01504f8614c36/orjson-3.11.4-cp312-cp312-win_arm64.whl", hash = "sha256:d38d2bc06d6415852224fcc9c0bfa834c25431e466dc319f0edd56cca81aa96e", size = 126167, upload-time = "2025-10-24T15:49:28.511Z" },
{ url = "https://files.pythonhosted.org/packages/f3/fd/d6b0a36854179b93ed77839f107c4089d91cccc9f9ba1b752b6e3bac5f34/orjson-3.11.6-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e259e85a81d76d9665f03d6129e09e4435531870de5961ddcd0bf6e3a7fde7d7", size = 250029, upload-time = "2026-01-29T15:11:35.942Z" },
{ url = "https://files.pythonhosted.org/packages/a3/bb/22902619826641cf3b627c24aab62e2ad6b571bdd1d34733abb0dd57f67a/orjson-3.11.6-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:52263949f41b4a4822c6b1353bcc5ee2f7109d53a3b493501d3369d6d0e7937a", size = 134518, upload-time = "2026-01-29T15:11:37.347Z" },
{ url = "https://files.pythonhosted.org/packages/72/90/7a818da4bba1de711a9653c420749c0ac95ef8f8651cbc1dca551f462fe0/orjson-3.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6439e742fa7834a24698d358a27346bb203bff356ae0402e7f5df8f749c621a8", size = 137917, upload-time = "2026-01-29T15:11:38.511Z" },
{ url = "https://files.pythonhosted.org/packages/59/0f/02846c1cac8e205cb3822dd8aa8f9114acda216f41fd1999ace6b543418d/orjson-3.11.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b81ffd68f084b4e993e3867acb554a049fa7787cc8710bbcc1e26965580d99be", size = 134923, upload-time = "2026-01-29T15:11:39.711Z" },
{ url = "https://files.pythonhosted.org/packages/94/cf/aeaf683001b474bb3c3c757073a4231dfdfe8467fceaefa5bfd40902c99f/orjson-3.11.6-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5a5468e5e60f7ef6d7f9044b06c8f94a3c56ba528c6e4f7f06ae95164b595ec", size = 140752, upload-time = "2026-01-29T15:11:41.347Z" },
{ url = "https://files.pythonhosted.org/packages/fc/fe/dad52d8315a65f084044a0819d74c4c9daf9ebe0681d30f525b0d29a31f0/orjson-3.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:72c5005eb45bd2535632d4f3bec7ad392832cfc46b62a3021da3b48a67734b45", size = 144201, upload-time = "2026-01-29T15:11:42.537Z" },
{ url = "https://files.pythonhosted.org/packages/36/bc/ab070dd421565b831801077f1e390c4d4af8bfcecafc110336680a33866b/orjson-3.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0b14dd49f3462b014455a28a4d810d3549bf990567653eb43765cd847df09145", size = 142380, upload-time = "2026-01-29T15:11:44.309Z" },
{ url = "https://files.pythonhosted.org/packages/e6/d8/4b581c725c3a308717f28bf45a9fdac210bca08b67e8430143699413ff06/orjson-3.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bb2c1ea30ef302f0f89f9bf3e7f9ab5e2af29dc9f80eb87aa99788e4e2d65", size = 145582, upload-time = "2026-01-29T15:11:45.506Z" },
{ url = "https://files.pythonhosted.org/packages/5b/a2/09aab99b39f9a7f175ea8fa29adb9933a3d01e7d5d603cdee7f1c40c8da2/orjson-3.11.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:825e0a85d189533c6bff7e2fc417a28f6fcea53d27125c4551979aecd6c9a197", size = 147270, upload-time = "2026-01-29T15:11:46.782Z" },
{ url = "https://files.pythonhosted.org/packages/b8/2f/5ef8eaf7829dc50da3bf497c7775b21ee88437bc8c41f959aa3504ca6631/orjson-3.11.6-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:b04575417a26530637f6ab4b1f7b4f666eb0433491091da4de38611f97f2fcf3", size = 421222, upload-time = "2026-01-29T15:11:48.106Z" },
{ url = "https://files.pythonhosted.org/packages/3b/b0/dd6b941294c2b5b13da5fdc7e749e58d0c55a5114ab37497155e83050e95/orjson-3.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b83eb2e40e8c4da6d6b340ee6b1d6125f5195eb1b0ebb7eac23c6d9d4f92d224", size = 155562, upload-time = "2026-01-29T15:11:49.408Z" },
{ url = "https://files.pythonhosted.org/packages/8e/09/43924331a847476ae2f9a16bd6d3c9dab301265006212ba0d3d7fd58763a/orjson-3.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1f42da604ee65a6b87eef858c913ce3e5777872b19321d11e6fc6d21de89b64f", size = 147432, upload-time = "2026-01-29T15:11:50.635Z" },
{ url = "https://files.pythonhosted.org/packages/5d/e9/d9865961081816909f6b49d880749dbbd88425afd7c5bbce0549e2290d77/orjson-3.11.6-cp311-cp311-win32.whl", hash = "sha256:5ae45df804f2d344cffb36c43fdf03c82fb6cd247f5faa41e21891b40dfbf733", size = 139623, upload-time = "2026-01-29T15:11:51.82Z" },
{ url = "https://files.pythonhosted.org/packages/b4/f9/6836edb92f76eec1082919101eb1145d2f9c33c8f2c5e6fa399b82a2aaa8/orjson-3.11.6-cp311-cp311-win_amd64.whl", hash = "sha256:f4295948d65ace0a2d8f2c4ccc429668b7eb8af547578ec882e16bf79b0050b2", size = 136647, upload-time = "2026-01-29T15:11:53.454Z" },
{ url = "https://files.pythonhosted.org/packages/b3/0c/4954082eea948c9ae52ee0bcbaa2f99da3216a71bcc314ab129bde22e565/orjson-3.11.6-cp311-cp311-win_arm64.whl", hash = "sha256:314e9c45e0b81b547e3a1cfa3df3e07a815821b3dac9fe8cb75014071d0c16a4", size = 135327, upload-time = "2026-01-29T15:11:56.616Z" },
{ url = "https://files.pythonhosted.org/packages/14/ba/759f2879f41910b7e5e0cdbd9cf82a4f017c527fb0e972e9869ca7fe4c8e/orjson-3.11.6-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6f03f30cd8953f75f2a439070c743c7336d10ee940da918d71c6f3556af3ddcf", size = 249988, upload-time = "2026-01-29T15:11:58.294Z" },
{ url = "https://files.pythonhosted.org/packages/f0/70/54cecb929e6c8b10104fcf580b0cc7dc551aa193e83787dd6f3daba28bb5/orjson-3.11.6-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:af44baae65ef386ad971469a8557a0673bb042b0b9fd4397becd9c2dfaa02588", size = 134445, upload-time = "2026-01-29T15:11:59.819Z" },
{ url = "https://files.pythonhosted.org/packages/f2/6f/ec0309154457b9ba1ad05f11faa4441f76037152f75e1ac577db3ce7ca96/orjson-3.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c310a48542094e4f7dbb6ac076880994986dda8ca9186a58c3cb70a3514d3231", size = 137708, upload-time = "2026-01-29T15:12:01.488Z" },
{ url = "https://files.pythonhosted.org/packages/20/52/3c71b80840f8bab9cb26417302707b7716b7d25f863f3a541bcfa232fe6e/orjson-3.11.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d8dfa7a5d387f15ecad94cb6b2d2d5f4aeea64efd8d526bfc03c9812d01e1cc0", size = 134798, upload-time = "2026-01-29T15:12:02.705Z" },
{ url = "https://files.pythonhosted.org/packages/30/51/b490a43b22ff736282360bd02e6bded455cf31dfc3224e01cd39f919bbd2/orjson-3.11.6-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba8daee3e999411b50f8b50dbb0a3071dd1845f3f9a1a0a6fa6de86d1689d84d", size = 140839, upload-time = "2026-01-29T15:12:03.956Z" },
{ url = "https://files.pythonhosted.org/packages/95/bc/4bcfe4280c1bc63c5291bb96f98298845b6355da2226d3400e17e7b51e53/orjson-3.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f89d104c974eafd7436d7a5fdbc57f7a1e776789959a2f4f1b2eab5c62a339f4", size = 144080, upload-time = "2026-01-29T15:12:05.151Z" },
{ url = "https://files.pythonhosted.org/packages/01/74/22970f9ead9ab1f1b5f8c227a6c3aa8d71cd2c5acd005868a1d44f2362fa/orjson-3.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2e2e2456788ca5ea75616c40da06fc885a7dc0389780e8a41bf7c5389ba257b", size = 142435, upload-time = "2026-01-29T15:12:06.641Z" },
{ url = "https://files.pythonhosted.org/packages/29/34/d564aff85847ab92c82ee43a7a203683566c2fca0723a5f50aebbe759603/orjson-3.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a42efebc45afabb1448001e90458c4020d5c64fbac8a8dc4045b777db76cb5a", size = 145631, upload-time = "2026-01-29T15:12:08.351Z" },
{ url = "https://files.pythonhosted.org/packages/e7/ef/016957a3890752c4aa2368326ea69fa53cdc1fdae0a94a542b6410dbdf52/orjson-3.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:71b7cbef8471324966c3738c90ba38775563ef01b512feb5ad4805682188d1b9", size = 147058, upload-time = "2026-01-29T15:12:10.023Z" },
{ url = "https://files.pythonhosted.org/packages/56/cc/9a899c3972085645b3225569f91a30e221f441e5dc8126e6d060b971c252/orjson-3.11.6-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:f8515e5910f454fe9a8e13c2bb9dc4bae4c1836313e967e72eb8a4ad874f0248", size = 421161, upload-time = "2026-01-29T15:12:11.308Z" },
{ url = "https://files.pythonhosted.org/packages/21/a8/767d3fbd6d9b8fdee76974db40619399355fd49bf91a6dd2c4b6909ccf05/orjson-3.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:300360edf27c8c9bf7047345a94fddf3a8b8922df0ff69d71d854a170cb375cf", size = 155757, upload-time = "2026-01-29T15:12:12.776Z" },
{ url = "https://files.pythonhosted.org/packages/ad/0b/205cd69ac87e2272e13ef3f5f03a3d4657e317e38c1b08aaa2ef97060bbc/orjson-3.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:caaed4dad39e271adfadc106fab634d173b2bb23d9cf7e67bd645f879175ebfc", size = 147446, upload-time = "2026-01-29T15:12:14.166Z" },
{ url = "https://files.pythonhosted.org/packages/de/c5/dd9f22aa9f27c54c7d05cc32f4580c9ac9b6f13811eeb81d6c4c3f50d6b1/orjson-3.11.6-cp312-cp312-win32.whl", hash = "sha256:955368c11808c89793e847830e1b1007503a5923ddadc108547d3b77df761044", size = 139717, upload-time = "2026-01-29T15:12:15.7Z" },
{ url = "https://files.pythonhosted.org/packages/23/a1/e62fc50d904486970315a1654b8cfb5832eb46abb18cd5405118e7e1fc79/orjson-3.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:2c68de30131481150073d90a5d227a4a421982f42c025ecdfb66157f9579e06f", size = 136711, upload-time = "2026-01-29T15:12:17.055Z" },
{ url = "https://files.pythonhosted.org/packages/04/3d/b4fefad8bdf91e0fe212eb04975aeb36ea92997269d68857efcc7eb1dda3/orjson-3.11.6-cp312-cp312-win_arm64.whl", hash = "sha256:65dfa096f4e3a5e02834b681f539a87fbe85adc82001383c0db907557f666bfc", size = 135212, upload-time = "2026-01-29T15:12:18.3Z" },
]
[[package]]

View File

@@ -0,0 +1,115 @@
import fs from 'node:fs'
import os from 'node:os'
import path from 'node:path'
import { afterEach, describe, expect, it } from 'vitest'
import {
collectComponentCoverageExcludedFiles,
COMPONENT_COVERAGE_EXCLUDE_LABEL,
getComponentCoverageExclusionReasons,
} from '../scripts/component-coverage-filters.mjs'
describe('component coverage filters', () => {
describe('getComponentCoverageExclusionReasons', () => {
it('should exclude type-only files by basename', () => {
expect(
getComponentCoverageExclusionReasons(
'web/app/components/share/text-generation/types.ts',
'export type ShareMode = "run-once" | "run-batch"',
),
).toContain('type-only')
})
it('should exclude pure barrel files', () => {
expect(
getComponentCoverageExclusionReasons(
'web/app/components/base/amplitude/index.ts',
[
'export { default } from "./AmplitudeProvider"',
'export { resetUser, trackEvent } from "./utils"',
].join('\n'),
),
).toContain('pure-barrel')
})
it('should exclude generated files from marker comments', () => {
expect(
getComponentCoverageExclusionReasons(
'web/app/components/base/icons/src/vender/workflow/Answer.tsx',
[
'// GENERATE BY script',
'// DON NOT EDIT IT MANUALLY',
'export default function Icon() {',
' return null',
'}',
].join('\n'),
),
).toContain('generated')
})
it('should exclude pure static files with exported constants only', () => {
expect(
getComponentCoverageExclusionReasons(
'web/app/components/workflow/note-node/constants.ts',
[
'import { NoteTheme } from "./types"',
'export const CUSTOM_NOTE_NODE = "custom-note"',
'export const THEME_MAP = {',
' [NoteTheme.blue]: { title: "bg-blue-100" },',
'}',
].join('\n'),
),
).toContain('pure-static')
})
it('should keep runtime logic files tracked', () => {
expect(
getComponentCoverageExclusionReasons(
'web/app/components/workflow/nodes/trigger-schedule/default.ts',
[
'const validate = (value: string) => value.trim()',
'export const nodeDefault = {',
' value: validate("x"),',
'}',
].join('\n'),
),
).toEqual([])
})
})
describe('collectComponentCoverageExcludedFiles', () => {
const tempDirs: string[] = []
afterEach(() => {
for (const dir of tempDirs)
fs.rmSync(dir, { recursive: true, force: true })
tempDirs.length = 0
})
it('should collect excluded files for coverage config and keep runtime files out', () => {
const rootDir = fs.mkdtempSync(path.join(os.tmpdir(), 'component-coverage-filters-'))
tempDirs.push(rootDir)
fs.mkdirSync(path.join(rootDir, 'barrel'), { recursive: true })
fs.mkdirSync(path.join(rootDir, 'icons'), { recursive: true })
fs.mkdirSync(path.join(rootDir, 'static'), { recursive: true })
fs.mkdirSync(path.join(rootDir, 'runtime'), { recursive: true })
fs.writeFileSync(path.join(rootDir, 'barrel', 'index.ts'), 'export { default } from "./Button"\n')
fs.writeFileSync(path.join(rootDir, 'icons', 'generated-icon.tsx'), '// @generated\nexport default function Icon() { return null }\n')
fs.writeFileSync(path.join(rootDir, 'static', 'constants.ts'), 'export const COLORS = { primary: "#fff" }\n')
fs.writeFileSync(path.join(rootDir, 'runtime', 'config.ts'), 'export const config = makeConfig()\n')
fs.writeFileSync(path.join(rootDir, 'runtime', 'types.ts'), 'export type Config = { value: string }\n')
expect(collectComponentCoverageExcludedFiles(rootDir, { pathPrefix: 'app/components' })).toEqual([
'app/components/barrel/index.ts',
'app/components/icons/generated-icon.tsx',
'app/components/runtime/types.ts',
'app/components/static/constants.ts',
])
})
})
it('should describe the excluded coverage categories', () => {
expect(COMPONENT_COVERAGE_EXCLUDE_LABEL).toBe('type-only files, pure barrel files, generated files, pure static files')
})
})

View File

@@ -1,7 +1,13 @@
import type { ComponentProps } from 'react'
import type { InSiteMessageActionItem } from './index'
import { fireEvent, render, screen } from '@testing-library/react'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import InSiteMessage from './index'
vi.mock('@/app/components/base/amplitude', () => ({
trackEvent: vi.fn(),
}))
describe('InSiteMessage', () => {
const originalLocation = window.location
@@ -18,9 +24,10 @@ describe('InSiteMessage', () => {
vi.unstubAllGlobals()
})
const renderComponent = (actions: InSiteMessageActionItem[], props?: Partial<React.ComponentProps<typeof InSiteMessage>>) => {
const renderComponent = (actions: InSiteMessageActionItem[], props?: Partial<ComponentProps<typeof InSiteMessage>>) => {
return render(
<InSiteMessage
notificationId="test-notification-id"
title="Title\\nLine"
subtitle="Subtitle\\nLine"
main="Main content"
@@ -34,8 +41,8 @@ describe('InSiteMessage', () => {
describe('Rendering', () => {
it('should render title, subtitle, markdown content, and action buttons', () => {
const actions: InSiteMessageActionItem[] = [
{ action: 'close', text: 'Close', type: 'default' },
{ action: 'link', text: 'Learn more', type: 'primary', data: 'https://example.com' },
{ action: 'close', action_name: 'dismiss', text: 'Close', type: 'default' },
{ action: 'link', action_name: 'learn_more', text: 'Learn more', type: 'primary', data: 'https://example.com' },
]
renderComponent(actions, { className: 'custom-message' })
@@ -56,7 +63,7 @@ describe('InSiteMessage', () => {
})
it('should fallback to default header background when headerBgUrl is empty string', () => {
const actions: InSiteMessageActionItem[] = [{ action: 'close', text: 'Close', type: 'default' }]
const actions: InSiteMessageActionItem[] = [{ action: 'close', action_name: 'dismiss', text: 'Close', type: 'default' }]
const { container } = renderComponent(actions, { headerBgUrl: '' })
const header = container.querySelector('div[style]')
@@ -68,7 +75,7 @@ describe('InSiteMessage', () => {
describe('Actions', () => {
it('should call onAction and hide component when close action is clicked', () => {
const onAction = vi.fn()
const closeAction: InSiteMessageActionItem = { action: 'close', text: 'Close', type: 'default' }
const closeAction: InSiteMessageActionItem = { action: 'close', action_name: 'dismiss', text: 'Close', type: 'default' }
renderComponent([closeAction], { onAction })
fireEvent.click(screen.getByRole('button', { name: 'Close' }))
@@ -80,6 +87,7 @@ describe('InSiteMessage', () => {
it('should open a new tab when link action data is a string', () => {
const linkAction: InSiteMessageActionItem = {
action: 'link',
action_name: 'confirm',
text: 'Open link',
type: 'primary',
data: 'https://example.com',
@@ -103,6 +111,7 @@ describe('InSiteMessage', () => {
const linkAction: InSiteMessageActionItem = {
action: 'link',
action_name: 'confirm',
text: 'Open self',
type: 'primary',
data: { href: 'https://example.com/self', target: '_self' },
@@ -118,6 +127,7 @@ describe('InSiteMessage', () => {
it('should not trigger navigation when link data is invalid', () => {
const linkAction: InSiteMessageActionItem = {
action: 'link',
action_name: 'confirm',
text: 'Broken link',
type: 'primary',
data: { rel: 'noopener' },

View File

@@ -1,6 +1,7 @@
'use client'
import { useMemo, useState } from 'react'
import { useEffect, useMemo, useState } from 'react'
import { trackEvent } from '@/app/components/base/amplitude'
import Button from '@/app/components/base/button'
import { MarkdownWithDirective } from '@/app/components/base/markdown-with-directive'
import { cn } from '@/utils/classnames'
@@ -10,12 +11,14 @@ type InSiteMessageButtonType = 'primary' | 'default'
export type InSiteMessageActionItem = {
action: InSiteMessageAction
action_name: string // for tracing and analytics
data?: unknown
text: string
type: InSiteMessageButtonType
}
type InSiteMessageProps = {
notificationId: string
actions: InSiteMessageActionItem[]
className?: string
headerBgUrl?: string
@@ -52,6 +55,7 @@ function normalizeLinkData(data: unknown): { href: string, rel?: string, target?
const DEFAULT_HEADER_BG_URL = '/in-site-message/header-bg.svg'
function InSiteMessage({
notificationId,
actions,
className,
headerBgUrl = DEFAULT_HEADER_BG_URL,
@@ -70,7 +74,17 @@ function InSiteMessage({
}
}, [headerBgUrl])
useEffect(() => {
trackEvent('in_site_message_show', {
notification_id: notificationId,
})
}, [notificationId])
const handleAction = (item: InSiteMessageActionItem) => {
trackEvent('in_site_message_action', {
notification_id: notificationId,
action: item.action_name,
})
onAction?.(item)
if (item.action === 'close') {

View File

@@ -15,11 +15,16 @@ const {
mockNotificationDismiss: vi.fn(),
}))
vi.mock('@/config', () => ({
get IS_CLOUD_EDITION() {
return mockConfig.isCloudEdition
},
}))
vi.mock(import('@/config'), async (importOriginal) => {
const actual = await importOriginal()
return {
...actual,
get IS_CLOUD_EDITION() {
return mockConfig.isCloudEdition
},
}
})
vi.mock('@/service/client', () => ({
consoleQuery: {

View File

@@ -75,6 +75,7 @@ function InSiteMessageNotification() {
const fallbackActions: InSiteMessageActionItem[] = [
{
type: 'default',
action_name: 'dismiss',
text: t('operation.close', { ns: 'common' }),
action: 'close',
},
@@ -96,6 +97,7 @@ function InSiteMessageNotification() {
return (
<InSiteMessage
key={notification.notification_id}
notificationId={notification.notification_id}
title={notification.title}
subtitle={notification.subtitle}
headerBgUrl={notification.title_pic_url}

View File

@@ -449,6 +449,66 @@ describe('useChat', () => {
expect(lastResponse.workflowProcess?.status).toBe('failed')
})
it('should keep separate iteration traces for repeated executions of the same iteration node', async () => {
let callbacks: HookCallbacks
vi.mocked(ssePost).mockImplementation(async (_url, _params, options) => {
callbacks = options as HookCallbacks
})
const { result } = renderHook(() => useChat())
act(() => {
result.current.handleSend('test-url', { query: 'iteration trace test' }, {})
})
act(() => {
callbacks.onWorkflowStarted({ workflow_run_id: 'wr-1', task_id: 't-1' })
callbacks.onIterationStart({ data: { id: 'iter-run-1', node_id: 'iter-1' } })
callbacks.onIterationStart({ data: { id: 'iter-run-2', node_id: 'iter-1' } })
callbacks.onIterationFinish({ data: { id: 'iter-run-1', node_id: 'iter-1', status: 'succeeded' } })
callbacks.onIterationFinish({ data: { id: 'iter-run-2', node_id: 'iter-1', status: 'succeeded' } })
})
const tracing = result.current.chatList[1].workflowProcess?.tracing ?? []
expect(tracing).toHaveLength(2)
expect(tracing).toEqual(expect.arrayContaining([
expect.objectContaining({ id: 'iter-run-1', status: 'succeeded' }),
expect.objectContaining({ id: 'iter-run-2', status: 'succeeded' }),
]))
})
it('should keep separate top-level traces for repeated executions of the same node', async () => {
let callbacks: HookCallbacks
vi.mocked(ssePost).mockImplementation(async (_url, _params, options) => {
callbacks = options as HookCallbacks
})
const { result } = renderHook(() => useChat())
act(() => {
result.current.handleSend('test-url', { query: 'top-level trace test' }, {})
})
act(() => {
callbacks.onWorkflowStarted({ workflow_run_id: 'wr-1', task_id: 't-1' })
callbacks.onNodeStarted({ data: { id: 'node-run-1', node_id: 'node-1', title: 'Node 1' } })
callbacks.onNodeStarted({ data: { id: 'node-run-2', node_id: 'node-1', title: 'Node 1 retry' } })
callbacks.onNodeFinished({ data: { id: 'node-run-1', node_id: 'node-1', status: 'succeeded' } })
callbacks.onNodeFinished({ data: { id: 'node-run-2', node_id: 'node-1', status: 'succeeded' } })
})
const tracing = result.current.chatList[1].workflowProcess?.tracing ?? []
expect(tracing).toHaveLength(2)
expect(tracing).toEqual(expect.arrayContaining([
expect.objectContaining({ id: 'node-run-1', status: 'succeeded' }),
expect.objectContaining({ id: 'node-run-2', status: 'succeeded' }),
]))
})
it('should handle early exits in tracing events during iteration or loop', async () => {
let callbacks: HookCallbacks
@@ -484,7 +544,7 @@ describe('useChat', () => {
callbacks.onNodeFinished({ data: { id: 'n-1', iteration_id: 'iter-1' } })
})
const traceLen1 = result.current.chatList[result.current.chatList.length - 1].workflowProcess?.tracing?.length
const traceLen1 = result.current.chatList.at(-1)!.workflowProcess?.tracing?.length
expect(traceLen1).toBe(0) // None added due to iteration early hits
})
@@ -568,7 +628,7 @@ describe('useChat', () => {
expect(result.current.chatList.some(item => item.id === 'question-m-child')).toBe(true)
expect(result.current.chatList.some(item => item.id === 'm-child')).toBe(true)
expect(result.current.chatList[result.current.chatList.length - 1].content).toBe('child answer')
expect(result.current.chatList.at(-1)!.content).toBe('child answer')
})
it('should strip local file urls before sending payload', () => {
@@ -666,7 +726,7 @@ describe('useChat', () => {
})
expect(onGetConversationMessages).toHaveBeenCalled()
expect(result.current.chatList[result.current.chatList.length - 1].content).toBe('streamed content')
expect(result.current.chatList.at(-1)!.content).toBe('streamed content')
})
it('should clear suggested questions when suggestion fetch fails after completion', async () => {
@@ -712,7 +772,7 @@ describe('useChat', () => {
callbacks.onNodeFinished({ data: { node_id: 'n-loop', id: 'n-loop' } })
})
const latestResponse = result.current.chatList[result.current.chatList.length - 1]
const latestResponse = result.current.chatList.at(-1)!
expect(latestResponse.workflowProcess?.tracing).toHaveLength(0)
})
@@ -739,7 +799,7 @@ describe('useChat', () => {
callbacks.onTTSChunk('m-th-bind', '')
})
const latestResponse = result.current.chatList[result.current.chatList.length - 1]
const latestResponse = result.current.chatList.at(-1)!
expect(latestResponse.id).toBe('m-th-bind')
expect(latestResponse.conversationId).toBe('c-th-bind')
expect(latestResponse.workflowProcess?.status).toBe('succeeded')
@@ -832,7 +892,7 @@ describe('useChat', () => {
callbacks.onCompleted()
})
const lastResponse = result.current.chatList[result.current.chatList.length - 1]
const lastResponse = result.current.chatList.at(-1)!
expect(lastResponse.agent_thoughts![0].thought).toContain('resumed')
expect(lastResponse.workflowProcess?.tracing?.length).toBeGreaterThan(0)

View File

@@ -32,6 +32,7 @@ import {
} from '@/app/components/base/file-uploader/utils'
import { useToastContext } from '@/app/components/base/toast/context'
import { NodeRunningStatus, WorkflowRunningStatus } from '@/app/components/workflow/types'
import { upsertTopLevelTracingNodeOnStart } from '@/app/components/workflow/utils/top-level-tracing'
import useTimestamp from '@/hooks/use-timestamp'
import {
sseGet,
@@ -395,8 +396,7 @@ export const useChat = (
if (!responseItem.workflowProcess?.tracing)
return
const tracing = responseItem.workflowProcess.tracing
const iterationIndex = tracing.findIndex(item => item.node_id === iterationFinishedData.node_id
&& (item.execution_metadata?.parallel_id === iterationFinishedData.execution_metadata?.parallel_id || item.parallel_id === iterationFinishedData.execution_metadata?.parallel_id))!
const iterationIndex = tracing.findIndex(item => item.id === iterationFinishedData.id)!
if (iterationIndex > -1) {
tracing[iterationIndex] = {
...tracing[iterationIndex],
@@ -408,38 +408,34 @@ export const useChat = (
},
onNodeStarted: ({ data: nodeStartedData }) => {
updateChatTreeNode(messageId, (responseItem) => {
if (params.loop_id)
return
if (!responseItem.workflowProcess)
return
if (!responseItem.workflowProcess.tracing)
responseItem.workflowProcess.tracing = []
const currentIndex = responseItem.workflowProcess.tracing.findIndex(item => item.node_id === nodeStartedData.node_id)
// if the node is already started, update the node
if (currentIndex > -1) {
responseItem.workflowProcess.tracing[currentIndex] = {
...nodeStartedData,
status: NodeRunningStatus.Running,
}
}
else {
if (nodeStartedData.iteration_id)
return
responseItem.workflowProcess.tracing.push({
...nodeStartedData,
status: WorkflowRunningStatus.Running,
})
}
upsertTopLevelTracingNodeOnStart(responseItem.workflowProcess.tracing, {
...nodeStartedData,
status: WorkflowRunningStatus.Running,
})
})
},
onNodeFinished: ({ data: nodeFinishedData }) => {
updateChatTreeNode(messageId, (responseItem) => {
if (params.loop_id)
return
if (!responseItem.workflowProcess?.tracing)
return
if (nodeFinishedData.iteration_id)
return
if (nodeFinishedData.loop_id)
return
const currentIndex = responseItem.workflowProcess.tracing.findIndex((item) => {
if (!item.execution_metadata?.parallel_id)
return item.id === nodeFinishedData.id
@@ -481,8 +477,7 @@ export const useChat = (
if (!responseItem.workflowProcess?.tracing)
return
const tracing = responseItem.workflowProcess.tracing
const loopIndex = tracing.findIndex(item => item.node_id === loopFinishedData.node_id
&& (item.execution_metadata?.parallel_id === loopFinishedData.execution_metadata?.parallel_id || item.parallel_id === loopFinishedData.execution_metadata?.parallel_id))!
const loopIndex = tracing.findIndex(item => item.id === loopFinishedData.id)!
if (loopIndex > -1) {
tracing[loopIndex] = {
...tracing[loopIndex],
@@ -558,7 +553,7 @@ export const useChat = (
{},
otherOptions,
)
}, [updateChatTreeNode, handleResponding, createAudioPlayerManager, config?.suggested_questions_after_answer])
}, [updateChatTreeNode, handleResponding, createAudioPlayerManager, config?.suggested_questions_after_answer, params.loop_id])
const updateCurrentQAOnTree = useCallback(({
parentId,
@@ -948,12 +943,13 @@ export const useChat = (
},
onIterationFinish: ({ data: iterationFinishedData }) => {
const tracing = responseItem.workflowProcess!.tracing!
const iterationIndex = tracing.findIndex(item => item.node_id === iterationFinishedData.node_id
&& (item.execution_metadata?.parallel_id === iterationFinishedData.execution_metadata?.parallel_id || item.parallel_id === iterationFinishedData.execution_metadata?.parallel_id))!
tracing[iterationIndex] = {
...tracing[iterationIndex],
...iterationFinishedData,
status: WorkflowRunningStatus.Succeeded,
const iterationIndex = tracing.findIndex(item => item.id === iterationFinishedData.id)!
if (iterationIndex > -1) {
tracing[iterationIndex] = {
...tracing[iterationIndex],
...iterationFinishedData,
status: WorkflowRunningStatus.Succeeded,
}
}
updateCurrentQAOnTree({
@@ -964,30 +960,19 @@ export const useChat = (
})
},
onNodeStarted: ({ data: nodeStartedData }) => {
// `data` is the outer send payload for this request; loop child runs should not emit top-level node traces here.
if (data.loop_id)
return
if (!responseItem.workflowProcess)
return
if (!responseItem.workflowProcess.tracing)
responseItem.workflowProcess.tracing = []
const currentIndex = responseItem.workflowProcess.tracing.findIndex(item => item.node_id === nodeStartedData.node_id)
if (currentIndex > -1) {
responseItem.workflowProcess.tracing[currentIndex] = {
...nodeStartedData,
status: NodeRunningStatus.Running,
}
}
else {
if (nodeStartedData.iteration_id)
return
if (data.loop_id)
return
responseItem.workflowProcess.tracing.push({
...nodeStartedData,
status: WorkflowRunningStatus.Running,
})
}
upsertTopLevelTracingNodeOnStart(responseItem.workflowProcess.tracing, {
...nodeStartedData,
status: WorkflowRunningStatus.Running,
})
updateCurrentQAOnTree({
placeholderQuestionId,
questionItem,
@@ -996,10 +981,14 @@ export const useChat = (
})
},
onNodeFinished: ({ data: nodeFinishedData }) => {
// Use the outer request payload here as well so loop child runs skip top-level finish handling entirely.
if (data.loop_id)
return
if (nodeFinishedData.iteration_id)
return
if (data.loop_id)
if (nodeFinishedData.loop_id)
return
const currentIndex = responseItem.workflowProcess!.tracing!.findIndex((item) => {
@@ -1045,12 +1034,13 @@ export const useChat = (
},
onLoopFinish: ({ data: loopFinishedData }) => {
const tracing = responseItem.workflowProcess!.tracing!
const loopIndex = tracing.findIndex(item => item.node_id === loopFinishedData.node_id
&& (item.execution_metadata?.parallel_id === loopFinishedData.execution_metadata?.parallel_id || item.parallel_id === loopFinishedData.execution_metadata?.parallel_id))!
tracing[loopIndex] = {
...tracing[loopIndex],
...loopFinishedData,
status: WorkflowRunningStatus.Succeeded,
const loopIndex = tracing.findIndex(item => item.id === loopFinishedData.id)!
if (loopIndex > -1) {
tracing[loopIndex] = {
...tracing[loopIndex],
...loopFinishedData,
status: WorkflowRunningStatus.Succeeded,
}
}
updateCurrentQAOnTree({

View File

@@ -337,11 +337,12 @@ const Result: FC<IResultProps> = ({
onIterationFinish: ({ data }) => {
setWorkflowProcessData(produce(getWorkflowProcessData()!, (draft) => {
draft.expand = true
const iterationsIndex = draft.tracing.findIndex(item => item.node_id === data.node_id
&& (item.execution_metadata?.parallel_id === data.execution_metadata?.parallel_id || item.parallel_id === data.execution_metadata?.parallel_id))!
draft.tracing[iterationsIndex] = {
...data,
expand: !!data.error,
const iterationsIndex = draft.tracing.findIndex(item => item.id === data.id)
if (iterationsIndex > -1) {
draft.tracing[iterationsIndex] = {
...data,
expand: !!data.error,
}
}
}))
},
@@ -366,11 +367,12 @@ const Result: FC<IResultProps> = ({
onLoopFinish: ({ data }) => {
setWorkflowProcessData(produce(getWorkflowProcessData()!, (draft) => {
draft.expand = true
const loopsIndex = draft.tracing.findIndex(item => item.node_id === data.node_id
&& (item.execution_metadata?.parallel_id === data.execution_metadata?.parallel_id || item.parallel_id === data.execution_metadata?.parallel_id))!
draft.tracing[loopsIndex] = {
...data,
expand: !!data.error,
const loopsIndex = draft.tracing.findIndex(item => item.id === data.id)
if (loopsIndex > -1) {
draft.tracing[loopsIndex] = {
...data,
expand: !!data.error,
}
}
}))
},

View File

@@ -178,6 +178,28 @@ describe('useWorkflowAgentLog', () => {
expect(store.getState().workflowRunningData!.tracing![0].execution_metadata!.agent_log).toHaveLength(1)
})
it('should attach the log to the matching execution id when a node runs multiple times', () => {
const { result, store } = renderWorkflowHook(() => useWorkflowAgentLog(), {
initialStoreState: {
workflowRunningData: baseRunningData({
tracing: [
{ id: 'trace-1', node_id: 'n1', execution_metadata: {} },
{ id: 'trace-2', node_id: 'n1', execution_metadata: {} },
],
}),
},
})
result.current.handleWorkflowAgentLog({
data: { node_id: 'n1', node_execution_id: 'trace-2', message_id: 'm2' },
} as AgentLogResponse)
const tracing = store.getState().workflowRunningData!.tracing!
expect(tracing[0].execution_metadata!.agent_log).toBeUndefined()
expect(tracing[1].execution_metadata!.agent_log).toHaveLength(1)
expect(tracing[1].execution_metadata!.agent_log![0].message_id).toBe('m2')
})
})
describe('useWorkflowNodeHumanInputFormFilled', () => {

View File

@@ -77,15 +77,15 @@ describe('useWorkflowNodeStarted', () => {
initialStoreState: {
workflowRunningData: baseRunningData({
tracing: [
{ node_id: 'n0', status: NodeRunningStatus.Succeeded },
{ node_id: 'n1', status: NodeRunningStatus.Succeeded },
{ id: 'trace-0', node_id: 'n0', status: NodeRunningStatus.Succeeded },
{ id: 'trace-1', node_id: 'n1', status: NodeRunningStatus.Succeeded },
],
}),
},
})
result.current.handleWorkflowNodeStarted(
{ data: { node_id: 'n1' } } as NodeStartedResponse,
{ data: { id: 'trace-1', node_id: 'n1' } } as NodeStartedResponse,
containerParams,
)
@@ -93,6 +93,30 @@ describe('useWorkflowNodeStarted', () => {
expect(tracing).toHaveLength(2)
expect(tracing[1].status).toBe(NodeRunningStatus.Running)
})
it('should append a new tracing entry when the same node starts a new execution id', () => {
const { result, store } = renderWorkflowHook(() => useWorkflowNodeStarted(), {
initialStoreState: {
workflowRunningData: baseRunningData({
tracing: [
{ id: 'trace-0', node_id: 'n0', status: NodeRunningStatus.Succeeded },
{ id: 'trace-1', node_id: 'n1', status: NodeRunningStatus.Succeeded },
],
}),
},
})
result.current.handleWorkflowNodeStarted(
{ data: { id: 'trace-2', node_id: 'n1' } } as NodeStartedResponse,
containerParams,
)
const tracing = store.getState().workflowRunningData!.tracing!
expect(tracing).toHaveLength(3)
expect(tracing[2].id).toBe('trace-2')
expect(tracing[2].node_id).toBe('n1')
expect(tracing[2].status).toBe(NodeRunningStatus.Running)
})
})
describe('useWorkflowNodeIterationStarted', () => {

View File

@@ -14,7 +14,12 @@ export const useWorkflowAgentLog = () => {
} = workflowStore.getState()
setWorkflowRunningData(produce(workflowRunningData!, (draft) => {
const currentIndex = draft.tracing!.findIndex(item => item.node_id === data.node_id)
const currentIndex = draft.tracing!.findIndex((item) => {
if (data.node_execution_id)
return item.id === data.node_execution_id
return item.node_id === data.node_id
})
if (currentIndex > -1) {
const current = draft.tracing![currentIndex]

View File

@@ -33,8 +33,8 @@ export const useWorkflowNodeStarted = () => {
transform,
} = store.getState()
const nodes = getNodes()
const currentIndex = workflowRunningData?.tracing?.findIndex(item => item.node_id === data.node_id)
if (currentIndex && currentIndex > -1) {
const currentIndex = workflowRunningData?.tracing?.findIndex(item => item.id === data.id)
if (currentIndex !== undefined && currentIndex > -1) {
setWorkflowRunningData(produce(workflowRunningData!, (draft) => {
draft.tracing![currentIndex] = {
...data,

View File

@@ -42,6 +42,7 @@ import {
import { useHooksStore } from '../../hooks-store'
import { useWorkflowStore } from '../../store'
import { NodeRunningStatus, WorkflowRunningStatus } from '../../types'
import { upsertTopLevelTracingNodeOnStart } from '../../utils/top-level-tracing'
type GetAbortController = (abortController: AbortController) => void
type SendCallback = {
@@ -486,19 +487,13 @@ export const useChat = (
}
},
onNodeStarted: ({ data }) => {
const currentIndex = responseItem.workflowProcess!.tracing!.findIndex(item => item.node_id === data.node_id)
if (currentIndex > -1) {
responseItem.workflowProcess!.tracing![currentIndex] = {
...data,
status: NodeRunningStatus.Running,
}
}
else {
responseItem.workflowProcess!.tracing!.push({
...data,
status: NodeRunningStatus.Running,
})
}
if (params.loop_id)
return
upsertTopLevelTracingNodeOnStart(responseItem.workflowProcess!.tracing!, {
...data,
status: NodeRunningStatus.Running,
})
updateCurrentQAOnTree({
placeholderQuestionId,
questionItem,
@@ -517,6 +512,9 @@ export const useChat = (
})
},
onNodeFinished: ({ data }) => {
if (params.loop_id)
return
const currentTracingIndex = responseItem.workflowProcess!.tracing!.findIndex(item => item.id === data.id)
if (currentTracingIndex > -1) {
responseItem.workflowProcess!.tracing[currentTracingIndex] = {
@@ -758,8 +756,7 @@ export const useChat = (
if (!responseItem.workflowProcess?.tracing)
return
const tracing = responseItem.workflowProcess.tracing
const iterationIndex = tracing.findIndex(item => item.node_id === iterationFinishedData.node_id
&& (item.execution_metadata?.parallel_id === iterationFinishedData.execution_metadata?.parallel_id || item.parallel_id === iterationFinishedData.execution_metadata?.parallel_id))!
const iterationIndex = tracing.findIndex(item => item.id === iterationFinishedData.id)!
if (iterationIndex > -1) {
tracing[iterationIndex] = {
...tracing[iterationIndex],
@@ -776,22 +773,10 @@ export const useChat = (
if (!responseItem.workflowProcess.tracing)
responseItem.workflowProcess.tracing = []
const currentIndex = responseItem.workflowProcess.tracing.findIndex(item => item.node_id === nodeStartedData.node_id)
if (currentIndex > -1) {
responseItem.workflowProcess.tracing[currentIndex] = {
...nodeStartedData,
status: NodeRunningStatus.Running,
}
}
else {
if (nodeStartedData.iteration_id)
return
responseItem.workflowProcess.tracing.push({
...nodeStartedData,
status: WorkflowRunningStatus.Running,
})
}
upsertTopLevelTracingNodeOnStart(responseItem.workflowProcess.tracing, {
...nodeStartedData,
status: WorkflowRunningStatus.Running,
})
})
},
onNodeFinished: ({ data: nodeFinishedData }) => {
@@ -802,6 +787,9 @@ export const useChat = (
if (nodeFinishedData.iteration_id)
return
if (nodeFinishedData.loop_id)
return
const currentIndex = responseItem.workflowProcess.tracing.findIndex((item) => {
if (!item.execution_metadata?.parallel_id)
return item.id === nodeFinishedData.id
@@ -829,8 +817,7 @@ export const useChat = (
if (!responseItem.workflowProcess?.tracing)
return
const tracing = responseItem.workflowProcess.tracing
const loopIndex = tracing.findIndex(item => item.node_id === loopFinishedData.node_id
&& (item.execution_metadata?.parallel_id === loopFinishedData.execution_metadata?.parallel_id || item.parallel_id === loopFinishedData.execution_metadata?.parallel_id))!
const loopIndex = tracing.findIndex(item => item.id === loopFinishedData.id)!
if (loopIndex > -1) {
tracing[loopIndex] = {
...tracing[loopIndex],

View File

@@ -0,0 +1,133 @@
import type { NodeTracing } from '@/types/workflow'
import { NodeRunningStatus } from '@/app/components/workflow/types'
import { upsertTopLevelTracingNodeOnStart } from './top-level-tracing'
const createTrace = (overrides: Partial<NodeTracing> = {}): NodeTracing => ({
id: 'trace-1',
index: 0,
predecessor_node_id: '',
node_id: 'node-1',
node_type: 'llm' as NodeTracing['node_type'],
title: 'Node 1',
inputs: {},
inputs_truncated: false,
process_data: {},
process_data_truncated: false,
outputs: {},
outputs_truncated: false,
status: NodeRunningStatus.Succeeded,
elapsed_time: 0,
metadata: {
iterator_length: 0,
iterator_index: 0,
loop_length: 0,
loop_index: 0,
},
created_at: 0,
created_by: {
id: 'user-1',
name: 'User',
email: 'user@example.com',
},
finished_at: 0,
...overrides,
})
describe('upsertTopLevelTracingNodeOnStart', () => {
beforeEach(() => {
vi.clearAllMocks()
})
it('should append a new top-level node when no matching trace exists', () => {
const tracing: NodeTracing[] = []
const startedNode = createTrace({
id: 'trace-2',
node_id: 'node-2',
status: NodeRunningStatus.Running,
})
const updated = upsertTopLevelTracingNodeOnStart(tracing, startedNode)
expect(updated).toBe(true)
expect(tracing).toEqual([startedNode])
})
it('should update an existing top-level node when the execution id matches', () => {
const tracing: NodeTracing[] = [
createTrace({
id: 'trace-1',
node_id: 'node-1',
status: NodeRunningStatus.Succeeded,
}),
]
const startedNode = createTrace({
id: 'trace-1',
node_id: 'node-1',
status: NodeRunningStatus.Running,
})
const updated = upsertTopLevelTracingNodeOnStart(tracing, startedNode)
expect(updated).toBe(true)
expect(tracing).toEqual([startedNode])
})
it('should append a new top-level node when the same node starts with a new execution id', () => {
const existingTrace = createTrace({
id: 'trace-1',
node_id: 'node-1',
status: NodeRunningStatus.Succeeded,
})
const tracing: NodeTracing[] = [existingTrace]
const startedNode = createTrace({
id: 'trace-2',
node_id: 'node-1',
status: NodeRunningStatus.Running,
})
const updated = upsertTopLevelTracingNodeOnStart(tracing, startedNode)
expect(updated).toBe(true)
expect(tracing).toEqual([existingTrace, startedNode])
})
it('should ignore nested iteration node starts even when the node id matches a top-level trace', () => {
const existingTrace = createTrace({
id: 'top-level-trace',
node_id: 'node-1',
status: NodeRunningStatus.Succeeded,
})
const tracing: NodeTracing[] = [existingTrace]
const nestedIterationTrace = createTrace({
id: 'iteration-trace',
node_id: 'node-1',
iteration_id: 'iteration-1',
status: NodeRunningStatus.Running,
})
const updated = upsertTopLevelTracingNodeOnStart(tracing, nestedIterationTrace)
expect(updated).toBe(false)
expect(tracing).toEqual([existingTrace])
})
it('should ignore nested loop node starts even when the node id matches a top-level trace', () => {
const existingTrace = createTrace({
id: 'top-level-trace',
node_id: 'node-1',
status: NodeRunningStatus.Succeeded,
})
const tracing: NodeTracing[] = [existingTrace]
const nestedLoopTrace = createTrace({
id: 'loop-trace',
node_id: 'node-1',
loop_id: 'loop-1',
status: NodeRunningStatus.Running,
})
const updated = upsertTopLevelTracingNodeOnStart(tracing, nestedLoopTrace)
expect(updated).toBe(false)
expect(tracing).toEqual([existingTrace])
})
})

View File

@@ -0,0 +1,22 @@
import type { NodeTracing } from '@/types/workflow'
const isNestedTracingNode = (trace: Pick<NodeTracing, 'iteration_id' | 'loop_id'>) => {
return Boolean(trace.iteration_id || trace.loop_id)
}
export const upsertTopLevelTracingNodeOnStart = (
tracing: NodeTracing[],
startedNode: NodeTracing,
) => {
if (isNestedTracingNode(startedNode))
return false
const currentIndex = tracing.findIndex(item => item.id === startedNode.id)
if (currentIndex > -1)
// Started events are the authoritative snapshot for an execution; merging would retain stale client-side fields.
tracing[currentIndex] = startedNode
else
tracing.push(startedNode)
return true
}

View File

@@ -0,0 +1,560 @@
import { execFileSync } from 'node:child_process'
import fs from 'node:fs'
import path from 'node:path'
import {
collectComponentCoverageExcludedFiles,
COMPONENT_COVERAGE_EXCLUDE_LABEL,
} from './component-coverage-filters.mjs'
import {
COMPONENTS_GLOBAL_THRESHOLDS,
EXCLUDED_COMPONENT_MODULES,
getComponentModuleThreshold,
} from './components-coverage-thresholds.mjs'
const APP_COMPONENTS_PREFIX = 'web/app/components/'
const APP_COMPONENTS_COVERAGE_PREFIX = 'app/components/'
const SHARED_TEST_PREFIX = 'web/__tests__/'
const STRICT_TEST_FILE_TOUCH = process.env.STRICT_COMPONENT_TEST_TOUCH === 'true'
const EXCLUDED_MODULES_LABEL = [...EXCLUDED_COMPONENT_MODULES].sort().join(', ')
const repoRoot = repoRootFromCwd()
const webRoot = path.join(repoRoot, 'web')
const excludedComponentCoverageFiles = new Set(
collectComponentCoverageExcludedFiles(path.join(webRoot, 'app/components'), { pathPrefix: 'web/app/components' }),
)
const baseSha = process.env.BASE_SHA?.trim()
const headSha = process.env.HEAD_SHA?.trim() || 'HEAD'
const coverageFinalPath = path.join(webRoot, 'coverage', 'coverage-final.json')
if (!baseSha || /^0+$/.test(baseSha)) {
appendSummary([
'### app/components Diff Coverage',
'',
'Skipped diff coverage check because `BASE_SHA` was not available.',
])
process.exit(0)
}
if (!fs.existsSync(coverageFinalPath)) {
console.error(`Coverage report not found at ${coverageFinalPath}`)
process.exit(1)
}
const coverage = JSON.parse(fs.readFileSync(coverageFinalPath, 'utf8'))
const changedFiles = getChangedFiles(baseSha, headSha)
const changedComponentSourceFiles = changedFiles.filter(isAnyComponentSourceFile)
const changedSourceFiles = changedComponentSourceFiles.filter(isTrackedComponentSourceFile)
const changedExcludedSourceFiles = changedComponentSourceFiles.filter(isExcludedComponentSourceFile)
const changedTestFiles = changedFiles.filter(isRelevantTestFile)
if (changedSourceFiles.length === 0) {
appendSummary(buildSkipSummary(changedExcludedSourceFiles))
process.exit(0)
}
const coverageEntries = new Map()
for (const [file, entry] of Object.entries(coverage)) {
const repoRelativePath = normalizeToRepoRelative(entry.path ?? file)
if (!isTrackedComponentSourceFile(repoRelativePath))
continue
coverageEntries.set(repoRelativePath, entry)
}
const fileCoverageRows = []
const moduleCoverageMap = new Map()
for (const [file, entry] of coverageEntries.entries()) {
const stats = getCoverageStats(entry)
const moduleName = getModuleName(file)
fileCoverageRows.push({ file, moduleName, ...stats })
mergeCoverageStats(moduleCoverageMap, moduleName, stats)
}
const overallCoverage = sumCoverageStats(fileCoverageRows)
const diffChanges = getChangedLineMap(baseSha, headSha)
const diffRows = []
for (const [file, changedLines] of diffChanges.entries()) {
if (!isTrackedComponentSourceFile(file))
continue
const entry = coverageEntries.get(file)
const lineHits = entry ? getLineHits(entry) : {}
const executableChangedLines = [...changedLines]
.filter(line => !entry || lineHits[line] !== undefined)
.sort((a, b) => a - b)
if (executableChangedLines.length === 0) {
diffRows.push({
file,
moduleName: getModuleName(file),
total: 0,
covered: 0,
uncoveredLines: [],
})
continue
}
const uncoveredLines = executableChangedLines.filter(line => (lineHits[line] ?? 0) === 0)
diffRows.push({
file,
moduleName: getModuleName(file),
total: executableChangedLines.length,
covered: executableChangedLines.length - uncoveredLines.length,
uncoveredLines,
})
}
const diffTotals = diffRows.reduce((acc, row) => {
acc.total += row.total
acc.covered += row.covered
return acc
}, { total: 0, covered: 0 })
const diffCoveragePct = percentage(diffTotals.covered, diffTotals.total)
const diffFailures = diffRows.filter(row => row.uncoveredLines.length > 0)
const overallThresholdFailures = getThresholdFailures(overallCoverage, COMPONENTS_GLOBAL_THRESHOLDS)
const moduleCoverageRows = [...moduleCoverageMap.entries()]
.map(([moduleName, stats]) => ({
moduleName,
stats,
thresholds: getComponentModuleThreshold(moduleName),
}))
.map(row => ({
...row,
failures: row.thresholds ? getThresholdFailures(row.stats, row.thresholds) : [],
}))
const moduleThresholdFailures = moduleCoverageRows
.filter(row => row.failures.length > 0)
.flatMap(row => row.failures.map(failure => ({
moduleName: row.moduleName,
...failure,
})))
const hasRelevantTestChanges = changedTestFiles.length > 0
const missingTestTouch = !hasRelevantTestChanges
appendSummary(buildSummary({
overallCoverage,
overallThresholdFailures,
moduleCoverageRows,
moduleThresholdFailures,
diffRows,
diffFailures,
diffCoveragePct,
changedSourceFiles,
changedTestFiles,
missingTestTouch,
}))
if (diffFailures.length > 0 && process.env.CI) {
for (const failure of diffFailures.slice(0, 20)) {
const firstLine = failure.uncoveredLines[0] ?? 1
console.log(`::error file=${failure.file},line=${firstLine}::Uncovered changed lines: ${formatLineRanges(failure.uncoveredLines)}`)
}
}
if (
overallThresholdFailures.length > 0
|| moduleThresholdFailures.length > 0
|| diffFailures.length > 0
|| (STRICT_TEST_FILE_TOUCH && missingTestTouch)
) {
process.exit(1)
}
function buildSummary({
overallCoverage,
overallThresholdFailures,
moduleCoverageRows,
moduleThresholdFailures,
diffRows,
diffFailures,
diffCoveragePct,
changedSourceFiles,
changedTestFiles,
missingTestTouch,
}) {
const lines = [
'### app/components Diff Coverage',
'',
`Compared \`${baseSha.slice(0, 12)}\` -> \`${headSha.slice(0, 12)}\``,
'',
`Excluded modules: \`${EXCLUDED_MODULES_LABEL}\``,
`Excluded file kinds: \`${COMPONENT_COVERAGE_EXCLUDE_LABEL}\``,
'',
'| Check | Result | Details |',
'|---|---:|---|',
`| Overall tracked lines | ${formatPercent(overallCoverage.lines)} | ${overallCoverage.lines.covered}/${overallCoverage.lines.total}; threshold ${COMPONENTS_GLOBAL_THRESHOLDS.lines}% |`,
`| Overall tracked statements | ${formatPercent(overallCoverage.statements)} | ${overallCoverage.statements.covered}/${overallCoverage.statements.total}; threshold ${COMPONENTS_GLOBAL_THRESHOLDS.statements}% |`,
`| Overall tracked functions | ${formatPercent(overallCoverage.functions)} | ${overallCoverage.functions.covered}/${overallCoverage.functions.total}; threshold ${COMPONENTS_GLOBAL_THRESHOLDS.functions}% |`,
`| Overall tracked branches | ${formatPercent(overallCoverage.branches)} | ${overallCoverage.branches.covered}/${overallCoverage.branches.total}; threshold ${COMPONENTS_GLOBAL_THRESHOLDS.branches}% |`,
`| Changed executable lines | ${formatPercent({ covered: diffTotals.covered, total: diffTotals.total })} | ${diffTotals.covered}/${diffTotals.total} |`,
'',
]
if (overallThresholdFailures.length > 0) {
lines.push('Overall thresholds failed:')
for (const failure of overallThresholdFailures)
lines.push(`- ${failure.metric}: ${failure.actual.toFixed(2)}% < ${failure.expected}%`)
lines.push('')
}
if (moduleThresholdFailures.length > 0) {
lines.push('Module thresholds failed:')
for (const failure of moduleThresholdFailures)
lines.push(`- ${failure.moduleName} ${failure.metric}: ${failure.actual.toFixed(2)}% < ${failure.expected}%`)
lines.push('')
}
const moduleRows = moduleCoverageRows
.map(({ moduleName, stats, thresholds, failures }) => ({
moduleName,
lines: percentage(stats.lines.covered, stats.lines.total),
statements: percentage(stats.statements.covered, stats.statements.total),
functions: percentage(stats.functions.covered, stats.functions.total),
branches: percentage(stats.branches.covered, stats.branches.total),
thresholds,
failures,
}))
.sort((a, b) => {
if (a.failures.length !== b.failures.length)
return b.failures.length - a.failures.length
return a.lines - b.lines || a.moduleName.localeCompare(b.moduleName)
})
lines.push('<details><summary>Module coverage</summary>')
lines.push('')
lines.push('| Module | Lines | Statements | Functions | Branches | Thresholds | Status |')
lines.push('|---|---:|---:|---:|---:|---|---|')
for (const row of moduleRows) {
const thresholdLabel = row.thresholds
? `L${row.thresholds.lines}/S${row.thresholds.statements}/F${row.thresholds.functions}/B${row.thresholds.branches}`
: 'n/a'
const status = row.thresholds ? (row.failures.length > 0 ? 'fail' : 'pass') : 'info'
lines.push(`| ${row.moduleName} | ${row.lines.toFixed(2)}% | ${row.statements.toFixed(2)}% | ${row.functions.toFixed(2)}% | ${row.branches.toFixed(2)}% | ${thresholdLabel} | ${status} |`)
}
lines.push('</details>')
lines.push('')
const changedRows = diffRows
.filter(row => row.total > 0)
.sort((a, b) => {
const aPct = percentage(rowCovered(a), rowTotal(a))
const bPct = percentage(rowCovered(b), rowTotal(b))
return aPct - bPct || a.file.localeCompare(b.file)
})
lines.push('<details><summary>Changed file coverage</summary>')
lines.push('')
lines.push('| File | Module | Changed executable lines | Coverage | Uncovered lines |')
lines.push('|---|---|---:|---:|---|')
for (const row of changedRows) {
const rowPct = percentage(row.covered, row.total)
lines.push(`| ${row.file.replace('web/', '')} | ${row.moduleName} | ${row.total} | ${rowPct.toFixed(2)}% | ${formatLineRanges(row.uncoveredLines)} |`)
}
lines.push('</details>')
lines.push('')
if (missingTestTouch) {
lines.push(`Warning: tracked source files changed under \`web/app/components/\`, but no test files changed under \`web/app/components/**\` or \`web/__tests__/\`.`)
if (STRICT_TEST_FILE_TOUCH)
lines.push('`STRICT_COMPONENT_TEST_TOUCH=true` is enabled, so this warning fails the check.')
lines.push('')
}
else {
lines.push(`Relevant test files changed: ${changedTestFiles.length}`)
lines.push('')
}
if (diffFailures.length > 0) {
lines.push('Uncovered changed lines:')
for (const row of diffFailures) {
lines.push(`- ${row.file.replace('web/', '')}: ${formatLineRanges(row.uncoveredLines)}`)
}
lines.push('')
}
lines.push(`Changed source files checked: ${changedSourceFiles.length}`)
lines.push(`Changed executable line coverage: ${diffCoveragePct.toFixed(2)}%`)
return lines
}
function buildSkipSummary(changedExcludedSourceFiles) {
const lines = [
'### app/components Diff Coverage',
'',
`Excluded modules: \`${EXCLUDED_MODULES_LABEL}\``,
`Excluded file kinds: \`${COMPONENT_COVERAGE_EXCLUDE_LABEL}\``,
'',
]
if (changedExcludedSourceFiles.length > 0) {
lines.push('Only excluded component modules or type-only files changed, so diff coverage check was skipped.')
lines.push(`Skipped files: ${changedExcludedSourceFiles.length}`)
}
else {
lines.push('No source changes under tracked `web/app/components/`. Diff coverage check skipped.')
}
return lines
}
function getChangedFiles(base, head) {
const output = execGit(['diff', '--name-only', '--diff-filter=ACMR', `${base}...${head}`, '--', 'web/app/components', 'web/__tests__'])
return output
.split('\n')
.map(line => line.trim())
.filter(Boolean)
}
function getChangedLineMap(base, head) {
const diff = execGit(['diff', '--unified=0', '--no-color', '--diff-filter=ACMR', `${base}...${head}`, '--', 'web/app/components'])
const lineMap = new Map()
let currentFile = null
for (const line of diff.split('\n')) {
if (line.startsWith('+++ b/')) {
currentFile = line.slice(6).trim()
continue
}
if (!currentFile || !isTrackedComponentSourceFile(currentFile))
continue
const match = line.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,(\d+))? @@/)
if (!match)
continue
const start = Number(match[1])
const count = match[2] ? Number(match[2]) : 1
if (count === 0)
continue
const linesForFile = lineMap.get(currentFile) ?? new Set()
for (let offset = 0; offset < count; offset += 1)
linesForFile.add(start + offset)
lineMap.set(currentFile, linesForFile)
}
return lineMap
}
function isAnyComponentSourceFile(filePath) {
return filePath.startsWith(APP_COMPONENTS_PREFIX)
&& /\.(?:ts|tsx)$/.test(filePath)
&& !isTestLikePath(filePath)
}
function isTrackedComponentSourceFile(filePath) {
return isAnyComponentSourceFile(filePath)
&& !isExcludedComponentSourceFile(filePath)
}
function isExcludedComponentSourceFile(filePath) {
return isAnyComponentSourceFile(filePath)
&& (
EXCLUDED_COMPONENT_MODULES.has(getModuleName(filePath))
|| excludedComponentCoverageFiles.has(filePath)
)
}
function isRelevantTestFile(filePath) {
return filePath.startsWith(SHARED_TEST_PREFIX)
|| (filePath.startsWith(APP_COMPONENTS_PREFIX) && isTestLikePath(filePath) && !isExcludedComponentTestFile(filePath))
}
function isExcludedComponentTestFile(filePath) {
if (!filePath.startsWith(APP_COMPONENTS_PREFIX))
return false
return EXCLUDED_COMPONENT_MODULES.has(getModuleName(filePath))
}
function isTestLikePath(filePath) {
return /(?:^|\/)__tests__\//.test(filePath)
|| /(?:^|\/)__mocks__\//.test(filePath)
|| /\.(?:spec|test)\.(?:ts|tsx)$/.test(filePath)
|| /\.stories\.(?:ts|tsx)$/.test(filePath)
|| /\.d\.ts$/.test(filePath)
}
function getCoverageStats(entry) {
const lineHits = getLineHits(entry)
const statementHits = Object.values(entry.s ?? {})
const functionHits = Object.values(entry.f ?? {})
const branchHits = Object.values(entry.b ?? {}).flat()
return {
lines: {
covered: Object.values(lineHits).filter(count => count > 0).length,
total: Object.keys(lineHits).length,
},
statements: {
covered: statementHits.filter(count => count > 0).length,
total: statementHits.length,
},
functions: {
covered: functionHits.filter(count => count > 0).length,
total: functionHits.length,
},
branches: {
covered: branchHits.filter(count => count > 0).length,
total: branchHits.length,
},
}
}
function getLineHits(entry) {
if (entry.l && Object.keys(entry.l).length > 0)
return entry.l
const lineHits = {}
for (const [statementId, statement] of Object.entries(entry.statementMap ?? {})) {
const line = statement?.start?.line
if (!line)
continue
const hits = entry.s?.[statementId] ?? 0
const previous = lineHits[line]
lineHits[line] = previous === undefined ? hits : Math.max(previous, hits)
}
return lineHits
}
function sumCoverageStats(rows) {
const total = createEmptyCoverageStats()
for (const row of rows)
addCoverageStats(total, row)
return total
}
function mergeCoverageStats(map, moduleName, stats) {
const existing = map.get(moduleName) ?? createEmptyCoverageStats()
addCoverageStats(existing, stats)
map.set(moduleName, existing)
}
function addCoverageStats(target, source) {
for (const metric of ['lines', 'statements', 'functions', 'branches']) {
target[metric].covered += source[metric].covered
target[metric].total += source[metric].total
}
}
function createEmptyCoverageStats() {
return {
lines: { covered: 0, total: 0 },
statements: { covered: 0, total: 0 },
functions: { covered: 0, total: 0 },
branches: { covered: 0, total: 0 },
}
}
function getThresholdFailures(stats, thresholds) {
const failures = []
for (const metric of ['lines', 'statements', 'functions', 'branches']) {
const actual = percentage(stats[metric].covered, stats[metric].total)
const expected = thresholds[metric]
if (actual < expected) {
failures.push({
metric,
actual,
expected,
})
}
}
return failures
}
function getModuleName(filePath) {
const relativePath = filePath.slice(APP_COMPONENTS_PREFIX.length)
if (!relativePath)
return '(root)'
const segments = relativePath.split('/')
return segments.length === 1 ? '(root)' : segments[0]
}
function normalizeToRepoRelative(filePath) {
if (!filePath)
return ''
if (filePath.startsWith(APP_COMPONENTS_PREFIX) || filePath.startsWith(SHARED_TEST_PREFIX))
return filePath
if (filePath.startsWith(APP_COMPONENTS_COVERAGE_PREFIX))
return `web/${filePath}`
const absolutePath = path.isAbsolute(filePath)
? filePath
: path.resolve(webRoot, filePath)
return path.relative(repoRoot, absolutePath).split(path.sep).join('/')
}
function formatLineRanges(lines) {
if (!lines || lines.length === 0)
return ''
const ranges = []
let start = lines[0]
let end = lines[0]
for (let index = 1; index < lines.length; index += 1) {
const current = lines[index]
if (current === end + 1) {
end = current
continue
}
ranges.push(start === end ? `${start}` : `${start}-${end}`)
start = current
end = current
}
ranges.push(start === end ? `${start}` : `${start}-${end}`)
return ranges.join(', ')
}
function percentage(covered, total) {
if (total === 0)
return 100
return (covered / total) * 100
}
function formatPercent(metric) {
return `${percentage(metric.covered, metric.total).toFixed(2)}%`
}
function appendSummary(lines) {
const content = `${lines.join('\n')}\n`
if (process.env.GITHUB_STEP_SUMMARY)
fs.appendFileSync(process.env.GITHUB_STEP_SUMMARY, content)
console.log(content)
}
function execGit(args) {
return execFileSync('git', args, {
cwd: repoRoot,
encoding: 'utf8',
})
}
function repoRootFromCwd() {
return execFileSync('git', ['rev-parse', '--show-toplevel'], {
cwd: process.cwd(),
encoding: 'utf8',
}).trim()
}
function rowCovered(row) {
return row.covered
}
function rowTotal(row) {
return row.total
}

View File

@@ -0,0 +1,316 @@
import fs from 'node:fs'
import path from 'node:path'
import tsParser from '@typescript-eslint/parser'
const TS_TSX_FILE_PATTERN = /\.(?:ts|tsx)$/
const TYPE_COVERAGE_EXCLUDE_BASENAMES = new Set([
'type',
'types',
'declarations',
])
const GENERATED_FILE_COMMENT_PATTERNS = [
/@generated/i,
/\bauto-?generated\b/i,
/\bgenerated by\b/i,
/\bgenerate by\b/i,
/\bdo not edit\b/i,
/\bdon not edit\b/i,
]
const PARSER_OPTIONS = {
ecmaVersion: 'latest',
sourceType: 'module',
ecmaFeatures: { jsx: true },
}
const collectedExcludedFilesCache = new Map()
export const COMPONENT_COVERAGE_EXCLUDE_LABEL = 'type-only files, pure barrel files, generated files, pure static files'
export function isTypeCoverageExcludedComponentFile(filePath) {
return TYPE_COVERAGE_EXCLUDE_BASENAMES.has(getPathBaseNameWithoutExtension(filePath))
}
export function getComponentCoverageExclusionReasons(filePath, sourceCode) {
if (!isEligibleComponentSourceFilePath(filePath))
return []
const reasons = []
if (isTypeCoverageExcludedComponentFile(filePath))
reasons.push('type-only')
if (typeof sourceCode !== 'string' || sourceCode.length === 0)
return reasons
if (isGeneratedComponentFile(sourceCode))
reasons.push('generated')
const ast = parseComponentFile(sourceCode)
if (!ast)
return reasons
if (isPureBarrelComponentFile(ast))
reasons.push('pure-barrel')
else if (isPureStaticComponentFile(ast))
reasons.push('pure-static')
return reasons
}
export function collectComponentCoverageExcludedFiles(rootDir, options = {}) {
const normalizedRootDir = path.resolve(rootDir)
const pathPrefix = normalizePathPrefix(options.pathPrefix ?? '')
const cacheKey = `${normalizedRootDir}::${pathPrefix}`
const cached = collectedExcludedFilesCache.get(cacheKey)
if (cached)
return cached
const files = []
walkComponentFiles(normalizedRootDir, (absolutePath) => {
const relativePath = path.relative(normalizedRootDir, absolutePath).split(path.sep).join('/')
const prefixedPath = pathPrefix ? `${pathPrefix}/${relativePath}` : relativePath
const sourceCode = fs.readFileSync(absolutePath, 'utf8')
if (getComponentCoverageExclusionReasons(prefixedPath, sourceCode).length > 0)
files.push(prefixedPath)
})
files.sort((a, b) => a.localeCompare(b))
collectedExcludedFilesCache.set(cacheKey, files)
return files
}
function normalizePathPrefix(pathPrefix) {
return pathPrefix.replace(/\\/g, '/').replace(/\/$/, '')
}
function walkComponentFiles(currentDir, onFile) {
if (!fs.existsSync(currentDir))
return
const entries = fs.readdirSync(currentDir, { withFileTypes: true })
for (const entry of entries) {
const entryPath = path.join(currentDir, entry.name)
if (entry.isDirectory()) {
if (entry.name === '__tests__' || entry.name === '__mocks__')
continue
walkComponentFiles(entryPath, onFile)
continue
}
if (!isEligibleComponentSourceFilePath(entry.name))
continue
onFile(entryPath)
}
}
function isEligibleComponentSourceFilePath(filePath) {
return TS_TSX_FILE_PATTERN.test(filePath)
&& !isTestLikePath(filePath)
}
function isTestLikePath(filePath) {
return /(?:^|\/)__tests__\//.test(filePath)
|| /(?:^|\/)__mocks__\//.test(filePath)
|| /\.(?:spec|test)\.(?:ts|tsx)$/.test(filePath)
|| /\.stories\.(?:ts|tsx)$/.test(filePath)
|| /\.d\.ts$/.test(filePath)
}
function getPathBaseNameWithoutExtension(filePath) {
if (!filePath)
return ''
const normalizedPath = filePath.replace(/\\/g, '/')
const fileName = normalizedPath.split('/').pop() ?? ''
return fileName.replace(TS_TSX_FILE_PATTERN, '')
}
function isGeneratedComponentFile(sourceCode) {
const leadingText = sourceCode.split('\n').slice(0, 5).join('\n')
return GENERATED_FILE_COMMENT_PATTERNS.some(pattern => pattern.test(leadingText))
}
function parseComponentFile(sourceCode) {
try {
return tsParser.parse(sourceCode, PARSER_OPTIONS)
}
catch {
return null
}
}
function isPureBarrelComponentFile(ast) {
let hasRuntimeReExports = false
for (const statement of ast.body) {
if (statement.type === 'ExportAllDeclaration') {
hasRuntimeReExports = true
continue
}
if (statement.type === 'ExportNamedDeclaration' && statement.source) {
hasRuntimeReExports = hasRuntimeReExports || statement.exportKind !== 'type'
continue
}
if (statement.type === 'TSInterfaceDeclaration' || statement.type === 'TSTypeAliasDeclaration')
continue
return false
}
return hasRuntimeReExports
}
function isPureStaticComponentFile(ast) {
const importedStaticBindings = collectImportedStaticBindings(ast.body)
const staticBindings = new Set()
let hasRuntimeValue = false
for (const statement of ast.body) {
if (statement.type === 'ImportDeclaration')
continue
if (statement.type === 'TSInterfaceDeclaration' || statement.type === 'TSTypeAliasDeclaration')
continue
if (statement.type === 'ExportAllDeclaration')
return false
if (statement.type === 'ExportNamedDeclaration' && statement.source)
return false
if (statement.type === 'ExportDefaultDeclaration') {
if (!isStaticExpression(statement.declaration, staticBindings, importedStaticBindings))
return false
hasRuntimeValue = true
continue
}
if (statement.type === 'ExportNamedDeclaration' && statement.declaration) {
if (!handleStaticDeclaration(statement.declaration, staticBindings, importedStaticBindings))
return false
hasRuntimeValue = true
continue
}
if (statement.type === 'ExportNamedDeclaration' && statement.specifiers.length > 0) {
const allStaticSpecifiers = statement.specifiers.every((specifier) => {
if (specifier.type !== 'ExportSpecifier' || specifier.exportKind === 'type')
return false
return specifier.local.type === 'Identifier' && staticBindings.has(specifier.local.name)
})
if (!allStaticSpecifiers)
return false
hasRuntimeValue = true
continue
}
if (!handleStaticDeclaration(statement, staticBindings, importedStaticBindings))
return false
hasRuntimeValue = true
}
return hasRuntimeValue
}
function handleStaticDeclaration(statement, staticBindings, importedStaticBindings) {
if (statement.type !== 'VariableDeclaration' || statement.kind !== 'const')
return false
for (const declarator of statement.declarations) {
if (declarator.id.type !== 'Identifier' || !declarator.init)
return false
if (!isStaticExpression(declarator.init, staticBindings, importedStaticBindings))
return false
staticBindings.add(declarator.id.name)
}
return true
}
function collectImportedStaticBindings(statements) {
const importedBindings = new Set()
for (const statement of statements) {
if (statement.type !== 'ImportDeclaration')
continue
const importSource = String(statement.source.value ?? '')
const isTypeLikeSource = isTypeCoverageExcludedComponentFile(importSource)
const importIsStatic = statement.importKind === 'type' || isTypeLikeSource
if (!importIsStatic)
continue
for (const specifier of statement.specifiers) {
if (specifier.local?.type === 'Identifier')
importedBindings.add(specifier.local.name)
}
}
return importedBindings
}
function isStaticExpression(node, staticBindings, importedStaticBindings) {
switch (node.type) {
case 'Literal':
return true
case 'Identifier':
return staticBindings.has(node.name) || importedStaticBindings.has(node.name)
case 'TemplateLiteral':
return node.expressions.every(expression => isStaticExpression(expression, staticBindings, importedStaticBindings))
case 'ArrayExpression':
return node.elements.every(element => !element || isStaticExpression(element, staticBindings, importedStaticBindings))
case 'ObjectExpression':
return node.properties.every((property) => {
if (property.type === 'SpreadElement')
return isStaticExpression(property.argument, staticBindings, importedStaticBindings)
if (property.type !== 'Property' || property.method)
return false
if (property.computed && !isStaticExpression(property.key, staticBindings, importedStaticBindings))
return false
if (property.shorthand)
return property.value.type === 'Identifier' && staticBindings.has(property.value.name)
return isStaticExpression(property.value, staticBindings, importedStaticBindings)
})
case 'UnaryExpression':
return isStaticExpression(node.argument, staticBindings, importedStaticBindings)
case 'BinaryExpression':
case 'LogicalExpression':
return isStaticExpression(node.left, staticBindings, importedStaticBindings)
&& isStaticExpression(node.right, staticBindings, importedStaticBindings)
case 'ConditionalExpression':
return isStaticExpression(node.test, staticBindings, importedStaticBindings)
&& isStaticExpression(node.consequent, staticBindings, importedStaticBindings)
&& isStaticExpression(node.alternate, staticBindings, importedStaticBindings)
case 'MemberExpression':
return isStaticMemberExpression(node, staticBindings, importedStaticBindings)
case 'ChainExpression':
return isStaticExpression(node.expression, staticBindings, importedStaticBindings)
case 'TSAsExpression':
case 'TSSatisfiesExpression':
case 'TSTypeAssertion':
case 'TSNonNullExpression':
return isStaticExpression(node.expression, staticBindings, importedStaticBindings)
case 'ParenthesizedExpression':
return isStaticExpression(node.expression, staticBindings, importedStaticBindings)
default:
return false
}
}
function isStaticMemberExpression(node, staticBindings, importedStaticBindings) {
if (!isStaticExpression(node.object, staticBindings, importedStaticBindings))
return false
if (!node.computed)
return node.property.type === 'Identifier'
return isStaticExpression(node.property, staticBindings, importedStaticBindings)
}

View File

@@ -0,0 +1,128 @@
// Floors were set from the app/components baseline captured on 2026-03-13,
// with a small buffer to avoid CI noise on existing code.
export const EXCLUDED_COMPONENT_MODULES = new Set([
'devtools',
'provider',
])
export const COMPONENTS_GLOBAL_THRESHOLDS = {
lines: 58,
statements: 58,
functions: 58,
branches: 54,
}
export const COMPONENT_MODULE_THRESHOLDS = {
'app': {
lines: 45,
statements: 45,
functions: 50,
branches: 35,
},
'app-sidebar': {
lines: 95,
statements: 95,
functions: 95,
branches: 90,
},
'apps': {
lines: 90,
statements: 90,
functions: 85,
branches: 80,
},
'base': {
lines: 95,
statements: 95,
functions: 90,
branches: 95,
},
'billing': {
lines: 95,
statements: 95,
functions: 95,
branches: 95,
},
'custom': {
lines: 70,
statements: 70,
functions: 70,
branches: 80,
},
'datasets': {
lines: 95,
statements: 95,
functions: 95,
branches: 90,
},
'develop': {
lines: 95,
statements: 95,
functions: 95,
branches: 90,
},
'explore': {
lines: 95,
statements: 95,
functions: 95,
branches: 85,
},
'goto-anything': {
lines: 90,
statements: 90,
functions: 90,
branches: 90,
},
'header': {
lines: 95,
statements: 95,
functions: 95,
branches: 95,
},
'plugins': {
lines: 90,
statements: 90,
functions: 90,
branches: 85,
},
'rag-pipeline': {
lines: 95,
statements: 95,
functions: 95,
branches: 90,
},
'share': {
lines: 15,
statements: 15,
functions: 20,
branches: 20,
},
'signin': {
lines: 95,
statements: 95,
functions: 95,
branches: 95,
},
'tools': {
lines: 95,
statements: 95,
functions: 90,
branches: 90,
},
'workflow': {
lines: 15,
statements: 15,
functions: 10,
branches: 10,
},
'workflow-app': {
lines: 20,
statements: 20,
functions: 25,
branches: 15,
},
}
export function getComponentModuleThreshold(moduleName) {
return COMPONENT_MODULE_THRESHOLDS[moduleName] ?? null
}

View File

@@ -8,15 +8,24 @@ import { defineConfig } from 'vite'
import Inspect from 'vite-plugin-inspect'
import { createCodeInspectorPlugin, createForceInspectorClientInjectionPlugin } from './plugins/vite/code-inspector'
import { customI18nHmrPlugin } from './plugins/vite/custom-i18n-hmr'
import { collectComponentCoverageExcludedFiles } from './scripts/component-coverage-filters.mjs'
import { EXCLUDED_COMPONENT_MODULES } from './scripts/components-coverage-thresholds.mjs'
const projectRoot = path.dirname(fileURLToPath(import.meta.url))
const isCI = !!process.env.CI
const coverageScope = process.env.VITEST_COVERAGE_SCOPE
const browserInitializerInjectTarget = path.resolve(projectRoot, 'app/components/browser-initializer.tsx')
const excludedAppComponentsCoveragePaths = [...EXCLUDED_COMPONENT_MODULES]
.map(moduleName => `app/components/${moduleName}/**`)
export default defineConfig(({ mode }) => {
const isTest = mode === 'test'
const isStorybook = process.env.STORYBOOK === 'true'
|| process.argv.some(arg => arg.toLowerCase().includes('storybook'))
const isAppComponentsCoverage = coverageScope === 'app-components'
const excludedComponentCoverageFiles = isAppComponentsCoverage
? collectComponentCoverageExcludedFiles(path.join(projectRoot, 'app/components'), { pathPrefix: 'app/components' })
: []
return {
plugins: isTest
@@ -82,6 +91,21 @@ export default defineConfig(({ mode }) => {
coverage: {
provider: 'v8',
reporter: isCI ? ['json', 'json-summary'] : ['text', 'json', 'json-summary'],
...(isAppComponentsCoverage
? {
include: ['app/components/**/*.{ts,tsx}'],
exclude: [
'app/components/**/*.d.ts',
'app/components/**/*.spec.{ts,tsx}',
'app/components/**/*.test.{ts,tsx}',
'app/components/**/__tests__/**',
'app/components/**/__mocks__/**',
'app/components/**/*.stories.{ts,tsx}',
...excludedComponentCoverageFiles,
...excludedAppComponentsCoveragePaths,
],
}
: {}),
},
},
}