Compare commits

..

4 Commits

Author SHA1 Message Date
Stephen Zhou
4493a41c50 trigger ci 2026-03-31 16:46:30 +08:00
Stephen Zhou
26d0778da2 Merge branch 'main' into 3-31-vite-task-cache 2026-03-31 14:53:38 +08:00
Stephen Zhou
bf470471f7 trigger ci 2026-03-31 11:17:32 +08:00
Stephen Zhou
f833701a4a ci: enable vite task cache 2026-03-31 11:07:44 +08:00
285 changed files with 3625 additions and 17753 deletions

View File

@@ -1,10 +1,11 @@
name: Setup Web Environment
description: A GitHub Action to set up the web environment using Vite+.
runs:
using: composite
steps:
- name: Setup Vite+
uses: voidzero-dev/setup-vp@20553a7a7429c429a74894104a2835d7fed28a72 # v1.3.0
uses: hyoban/setup-vp@96511aa421048609564ade4427c73d0078d4afc1 # v1.3.0
with:
node-version-file: .nvmrc
cache: true

View File

@@ -106,12 +106,12 @@ jobs:
- name: Web tsslint
if: steps.changed-files.outputs.any_changed == 'true'
working-directory: ./web
run: vp run lint:tss
run: vp run lint:tss --cache
- name: Web type check
if: steps.changed-files.outputs.any_changed == 'true'
working-directory: ./web
run: vp run type-check
run: vp run type-check --cache
- name: Web dead code check
if: steps.changed-files.outputs.any_changed == 'true'

View File

@@ -1,10 +1,10 @@
name: Translate i18n Files with Claude Code
# Note: claude-code-action doesn't support push events directly.
# Push events are bridged by trigger-i18n-sync.yml via repository_dispatch.
on:
repository_dispatch:
types: [i18n-sync]
push:
branches: [main]
paths:
- 'web/i18n/en-US/*.json'
workflow_dispatch:
inputs:
files:
@@ -30,7 +30,7 @@ permissions:
concurrency:
group: translate-i18n-${{ github.event_name }}-${{ github.ref }}
cancel-in-progress: false
cancel-in-progress: ${{ github.event_name == 'push' }}
jobs:
translate:
@@ -67,113 +67,19 @@ jobs:
}
" web/i18n-config/languages.ts | sed 's/[[:space:]]*$//')
generate_changes_json() {
node <<'NODE'
const { execFileSync } = require('node:child_process')
const fs = require('node:fs')
const path = require('node:path')
const repoRoot = process.cwd()
const baseSha = process.env.BASE_SHA || ''
const headSha = process.env.HEAD_SHA || ''
const files = (process.env.CHANGED_FILES || '').split(/\s+/).filter(Boolean)
const englishPath = fileStem => path.join(repoRoot, 'web', 'i18n', 'en-US', `${fileStem}.json`)
const readCurrentJson = (fileStem) => {
const filePath = englishPath(fileStem)
if (!fs.existsSync(filePath))
return null
return JSON.parse(fs.readFileSync(filePath, 'utf8'))
}
const readBaseJson = (fileStem) => {
if (!baseSha)
return null
try {
const relativePath = `web/i18n/en-US/${fileStem}.json`
const content = execFileSync('git', ['show', `${baseSha}:${relativePath}`], { encoding: 'utf8' })
return JSON.parse(content)
}
catch (error) {
return null
}
}
const compareJson = (beforeValue, afterValue) => JSON.stringify(beforeValue) === JSON.stringify(afterValue)
const changes = {}
for (const fileStem of files) {
const currentJson = readCurrentJson(fileStem)
const beforeJson = readBaseJson(fileStem) || {}
const afterJson = currentJson || {}
const added = {}
const updated = {}
const deleted = []
for (const [key, value] of Object.entries(afterJson)) {
if (!(key in beforeJson)) {
added[key] = value
continue
}
if (!compareJson(beforeJson[key], value)) {
updated[key] = {
before: beforeJson[key],
after: value,
}
}
}
for (const key of Object.keys(beforeJson)) {
if (!(key in afterJson))
deleted.push(key)
}
changes[fileStem] = {
fileDeleted: currentJson === null,
added,
updated,
deleted,
}
}
fs.writeFileSync(
'/tmp/i18n-changes.json',
JSON.stringify({
baseSha,
headSha,
files,
changes,
})
)
NODE
}
if [ "${{ github.event_name }}" = "repository_dispatch" ]; then
BASE_SHA="${{ github.event.client_payload.base_sha }}"
HEAD_SHA="${{ github.event.client_payload.head_sha }}"
CHANGED_FILES="${{ github.event.client_payload.changed_files }}"
TARGET_LANGS="$DEFAULT_TARGET_LANGS"
SYNC_MODE="${{ github.event.client_payload.sync_mode || 'incremental' }}"
if [ -n "${{ github.event.client_payload.changes_base64 }}" ]; then
printf '%s' '${{ github.event.client_payload.changes_base64 }}' | base64 -d > /tmp/i18n-changes.json
CHANGES_AVAILABLE="true"
CHANGES_SOURCE="embedded"
elif [ -n "$BASE_SHA" ] && [ -n "$CHANGED_FILES" ]; then
export BASE_SHA HEAD_SHA CHANGED_FILES
generate_changes_json
CHANGES_AVAILABLE="true"
CHANGES_SOURCE="recomputed"
else
printf '%s' '{"baseSha":"","headSha":"","files":[],"changes":{}}' > /tmp/i18n-changes.json
CHANGES_AVAILABLE="false"
CHANGES_SOURCE="unavailable"
if [ "${{ github.event_name }}" = "push" ]; then
BASE_SHA="${{ github.event.before }}"
if [ -z "$BASE_SHA" ] || [ "$BASE_SHA" = "0000000000000000000000000000000000000000" ]; then
BASE_SHA=$(git rev-parse HEAD~1 2>/dev/null || true)
fi
HEAD_SHA="${{ github.sha }}"
if [ -n "$BASE_SHA" ]; then
CHANGED_FILES=$(git diff --name-only "$BASE_SHA" "$HEAD_SHA" -- 'web/i18n/en-US/*.json' 2>/dev/null | sed -n 's@^.*/@@p' | sed 's/\.json$//' | tr '\n' ' ' | sed 's/[[:space:]]*$//')
else
CHANGED_FILES=$(find web/i18n/en-US -maxdepth 1 -type f -name '*.json' -print | sed -n 's@^.*/@@p' | sed 's/\.json$//' | sort | tr '\n' ' ' | sed 's/[[:space:]]*$//')
fi
TARGET_LANGS="$DEFAULT_TARGET_LANGS"
SYNC_MODE="incremental"
else
BASE_SHA=""
HEAD_SHA=$(git rev-parse HEAD)
@@ -198,17 +104,6 @@ jobs:
else
CHANGED_FILES=""
fi
if [ "$SYNC_MODE" = "incremental" ] && [ -n "$CHANGED_FILES" ]; then
export BASE_SHA HEAD_SHA CHANGED_FILES
generate_changes_json
CHANGES_AVAILABLE="true"
CHANGES_SOURCE="local"
else
printf '%s' '{"baseSha":"","headSha":"","files":[],"changes":{}}' > /tmp/i18n-changes.json
CHANGES_AVAILABLE="false"
CHANGES_SOURCE="unavailable"
fi
fi
FILE_ARGS=""
@@ -228,8 +123,6 @@ jobs:
echo "CHANGED_FILES=$CHANGED_FILES"
echo "TARGET_LANGS=$TARGET_LANGS"
echo "SYNC_MODE=$SYNC_MODE"
echo "CHANGES_AVAILABLE=$CHANGES_AVAILABLE"
echo "CHANGES_SOURCE=$CHANGES_SOURCE"
echo "FILE_ARGS=$FILE_ARGS"
echo "LANG_ARGS=$LANG_ARGS"
} >> "$GITHUB_OUTPUT"
@@ -248,7 +141,7 @@ jobs:
show_full_output: ${{ github.event_name == 'workflow_dispatch' }}
prompt: |
You are the i18n sync agent for the Dify repository.
Your job is to keep translations synchronized with the English source files under `${{ github.workspace }}/web/i18n/en-US/`.
Your job is to keep translations synchronized with the English source files under `${{ github.workspace }}/web/i18n/en-US/`, then open a PR with the result.
Use absolute paths at all times:
- Repo root: `${{ github.workspace }}`
@@ -263,15 +156,12 @@ jobs:
- Head SHA: `${{ steps.context.outputs.HEAD_SHA }}`
- Scoped file args: `${{ steps.context.outputs.FILE_ARGS }}`
- Scoped language args: `${{ steps.context.outputs.LANG_ARGS }}`
- Structured change set available: `${{ steps.context.outputs.CHANGES_AVAILABLE }}`
- Structured change set source: `${{ steps.context.outputs.CHANGES_SOURCE }}`
- Structured change set file: `/tmp/i18n-changes.json`
Tool rules:
- Use Read for repository files.
- Use Edit for JSON updates.
- Use Bash only for `pnpm`.
- Do not use Bash for `git`, `gh`, or branch management.
- Use Bash only for `git`, `gh`, `pnpm`, and `date`.
- Run Bash commands one by one. Do not combine commands with `&&`, `||`, pipes, or command substitution.
Required execution plan:
1. Resolve target languages.
@@ -282,25 +172,27 @@ jobs:
- Only process the resolved target languages, never `en-US`.
- Do not touch unrelated i18n files.
- Do not modify `${{ github.workspace }}/web/i18n/en-US/`.
3. Resolve source changes.
- If `Structured change set available` is `true`, read `/tmp/i18n-changes.json` and use it as the source of truth for file-level and key-level changes.
- For each file entry:
- `added` contains new English keys that need translations.
- `updated` contains stale keys whose English source changed; re-translate using the `after` value.
- `deleted` contains keys that should be removed from locale files.
- `fileDeleted: true` means the English file no longer exists; remove the matching locale file if present.
- Read the current English JSON file for any file that still exists so wording, placeholders, and surrounding terminology stay accurate.
- If `Structured change set available` is `false`, treat this as a scoped full sync and use the current English files plus scoped checks as the source of truth.
3. Detect English changes per file.
- Read the current English JSON file for each file in scope.
- If sync mode is `incremental` and `Base SHA` is not empty, run:
`git -C ${{ github.workspace }} show <Base SHA>:web/i18n/en-US/<file>.json`
- If sync mode is `full` or `Base SHA` is empty, skip historical comparison and treat the current English file as the only source of truth for structural sync.
- If the file did not exist at Base SHA, treat all current keys as ADD.
- Compare previous and current English JSON to identify:
- ADD: key only in current
- UPDATE: key exists in both and the English value changed
- DELETE: key only in previous
- Do not rely on a truncated diff file.
4. Run a scoped pre-check before editing:
- `pnpm --dir ${{ github.workspace }}/web run i18n:check ${{ steps.context.outputs.FILE_ARGS }} ${{ steps.context.outputs.LANG_ARGS }}`
- Use this command as the source of truth for missing and extra keys inside the current scope.
5. Apply translations.
- For every target language and scoped file:
- If `fileDeleted` is `true`, remove the locale file if it exists and skip the rest of that file.
- If the locale file does not exist yet, create it with `Write` and then continue with `Edit` as needed.
- ADD missing keys.
- UPDATE stale translations when the English value changed.
- DELETE removed keys. Prefer `pnpm --dir ${{ github.workspace }}/web run i18n:check ${{ steps.context.outputs.FILE_ARGS }} ${{ steps.context.outputs.LANG_ARGS }} --auto-remove` for extra keys so deletions stay in scope.
- For `zh-Hans` and `ja-JP`, if the locale file also changed between Base SHA and Head SHA, preserve manual translations unless they are clearly wrong for the new English value. If in doubt, keep the manual translation.
- Preserve placeholders exactly: `{{variable}}`, `${variable}`, HTML tags, component tags, and variable names.
- Match the existing terminology and register used by each locale.
- Prefer one Edit per file when stable, but prioritize correctness over batching.
@@ -308,119 +200,14 @@ jobs:
- Run `pnpm --dir ${{ github.workspace }}/web lint:fix --quiet -- <relative edited i18n file paths>`
- Run `pnpm --dir ${{ github.workspace }}/web run i18n:check ${{ steps.context.outputs.FILE_ARGS }} ${{ steps.context.outputs.LANG_ARGS }}`
- If verification fails, fix the remaining problems before continuing.
7. Stop after the scoped locale files are updated and verification passes.
- Do not create branches, commits, or pull requests.
7. Create a PR only when there are changes in `web/i18n/`.
- Check `git -C ${{ github.workspace }} status --porcelain -- web/i18n/`
- Create branch `chore/i18n-sync-<timestamp>`
- Commit message: `chore(i18n): sync translations with en-US`
- Push the branch and open a PR against `main`
- PR title: `chore(i18n): sync translations with en-US`
- PR body: summarize files, languages, sync mode, and verification commands
8. If there are no translation changes after verification, do not create a branch, commit, or PR.
claude_args: |
--max-turns 120
--allowedTools "Read,Write,Edit,Bash(pnpm *),Bash(pnpm:*),Glob,Grep"
- name: Prepare branch metadata
id: pr_meta
if: steps.context.outputs.CHANGED_FILES != ''
shell: bash
run: |
if [ -z "$(git -C "${{ github.workspace }}" status --porcelain -- web/i18n/)" ]; then
echo "has_changes=false" >> "$GITHUB_OUTPUT"
exit 0
fi
SCOPE_HASH=$(printf '%s|%s|%s' "${{ steps.context.outputs.CHANGED_FILES }}" "${{ steps.context.outputs.TARGET_LANGS }}" "${{ steps.context.outputs.SYNC_MODE }}" | sha256sum | cut -c1-8)
HEAD_SHORT=$(printf '%s' "${{ steps.context.outputs.HEAD_SHA }}" | cut -c1-12)
BRANCH_NAME="chore/i18n-sync-${HEAD_SHORT}-${SCOPE_HASH}"
{
echo "has_changes=true"
echo "branch_name=$BRANCH_NAME"
} >> "$GITHUB_OUTPUT"
- name: Commit translation changes
if: steps.pr_meta.outputs.has_changes == 'true'
shell: bash
run: |
git -C "${{ github.workspace }}" checkout -B "${{ steps.pr_meta.outputs.branch_name }}"
git -C "${{ github.workspace }}" add web/i18n/
git -C "${{ github.workspace }}" commit -m "chore(i18n): sync translations with en-US"
- name: Push translation branch
if: steps.pr_meta.outputs.has_changes == 'true'
shell: bash
run: |
if git -C "${{ github.workspace }}" ls-remote --exit-code --heads origin "${{ steps.pr_meta.outputs.branch_name }}" >/dev/null 2>&1; then
git -C "${{ github.workspace }}" push --force-with-lease origin "${{ steps.pr_meta.outputs.branch_name }}"
else
git -C "${{ github.workspace }}" push --set-upstream origin "${{ steps.pr_meta.outputs.branch_name }}"
fi
- name: Create or update translation PR
if: steps.pr_meta.outputs.has_changes == 'true'
env:
BRANCH_NAME: ${{ steps.pr_meta.outputs.branch_name }}
FILES_IN_SCOPE: ${{ steps.context.outputs.CHANGED_FILES }}
TARGET_LANGS: ${{ steps.context.outputs.TARGET_LANGS }}
SYNC_MODE: ${{ steps.context.outputs.SYNC_MODE }}
CHANGES_SOURCE: ${{ steps.context.outputs.CHANGES_SOURCE }}
BASE_SHA: ${{ steps.context.outputs.BASE_SHA }}
HEAD_SHA: ${{ steps.context.outputs.HEAD_SHA }}
REPO_NAME: ${{ github.repository }}
shell: bash
run: |
PR_BODY_FILE=/tmp/i18n-pr-body.md
LANG_COUNT=$(printf '%s\n' "$TARGET_LANGS" | wc -w | tr -d ' ')
if [ "$LANG_COUNT" = "0" ]; then
LANG_COUNT="0"
fi
export LANG_COUNT
node <<'NODE' > "$PR_BODY_FILE"
const fs = require('node:fs')
const changesPath = '/tmp/i18n-changes.json'
const changes = fs.existsSync(changesPath)
? JSON.parse(fs.readFileSync(changesPath, 'utf8'))
: { changes: {} }
const filesInScope = (process.env.FILES_IN_SCOPE || '').split(/\s+/).filter(Boolean)
const lines = [
'## Summary',
'',
`- **Files synced**: \`${process.env.FILES_IN_SCOPE || '<none>'}\``,
`- **Languages updated**: ${process.env.TARGET_LANGS || '<none>'} (${process.env.LANG_COUNT} languages)`,
`- **Sync mode**: ${process.env.SYNC_MODE}${process.env.BASE_SHA ? ` (base: \`${process.env.BASE_SHA.slice(0, 10)}\`, head: \`${process.env.HEAD_SHA.slice(0, 10)}\`)` : ` (head: \`${process.env.HEAD_SHA.slice(0, 10)}\`)`}`,
'',
'### Key changes',
]
for (const fileName of filesInScope) {
const fileChange = changes.changes?.[fileName] || { added: {}, updated: {}, deleted: [], fileDeleted: false }
const addedKeys = Object.keys(fileChange.added || {})
const updatedKeys = Object.keys(fileChange.updated || {})
const deletedKeys = fileChange.deleted || []
lines.push(`- \`${fileName}\`: +${addedKeys.length} / ~${updatedKeys.length} / -${deletedKeys.length}${fileChange.fileDeleted ? ' (file deleted in en-US)' : ''}`)
}
lines.push(
'',
'## Verification',
'',
`- \`pnpm --dir web run i18n:check --file ${process.env.FILES_IN_SCOPE} --lang ${process.env.TARGET_LANGS}\``,
`- \`pnpm --dir web lint:fix --quiet -- <edited i18n files>\``,
'',
'## Notes',
'',
'- This PR was generated from structured en-US key changes produced by `trigger-i18n-sync.yml`.',
`- Structured change source: ${process.env.CHANGES_SOURCE || 'unknown'}.`,
'- Branch name is deterministic for the head SHA and scope, so reruns update the same PR instead of opening duplicates.',
'',
'🤖 Generated with [Claude Code](https://claude.com/claude-code)'
)
process.stdout.write(lines.join('\n'))
NODE
EXISTING_PR_NUMBER=$(gh pr list --repo "$REPO_NAME" --head "$BRANCH_NAME" --state open --json number --jq '.[0].number')
if [ -n "$EXISTING_PR_NUMBER" ] && [ "$EXISTING_PR_NUMBER" != "null" ]; then
gh pr edit "$EXISTING_PR_NUMBER" --repo "$REPO_NAME" --title "chore(i18n): sync translations with en-US" --body-file "$PR_BODY_FILE"
else
gh pr create --repo "$REPO_NAME" --head "$BRANCH_NAME" --base main --title "chore(i18n): sync translations with en-US" --body-file "$PR_BODY_FILE"
fi
--max-turns 80
--allowedTools "Read,Write,Edit,Bash(git *),Bash(git:*),Bash(gh *),Bash(gh:*),Bash(pnpm *),Bash(pnpm:*),Bash(date *),Bash(date:*),Glob,Grep"

View File

@@ -1,171 +0,0 @@
name: Trigger i18n Sync on Push
on:
push:
branches: [main]
paths:
- 'web/i18n/en-US/*.json'
permissions:
contents: write
concurrency:
group: trigger-i18n-sync-${{ github.ref }}
cancel-in-progress: true
jobs:
trigger:
if: github.repository == 'langgenius/dify'
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
fetch-depth: 0
- name: Detect changed files and build structured change set
id: detect
shell: bash
run: |
BASE_SHA="${{ github.event.before }}"
if [ -z "$BASE_SHA" ] || [ "$BASE_SHA" = "0000000000000000000000000000000000000000" ]; then
BASE_SHA=$(git rev-parse HEAD~1 2>/dev/null || true)
fi
HEAD_SHA="${{ github.sha }}"
if [ -n "$BASE_SHA" ]; then
CHANGED_FILES=$(git diff --name-only "$BASE_SHA" "$HEAD_SHA" -- 'web/i18n/en-US/*.json' 2>/dev/null | sed -n 's@^.*/@@p' | sed 's/\.json$//' | tr '\n' ' ' | sed 's/[[:space:]]*$//')
else
CHANGED_FILES=$(find web/i18n/en-US -maxdepth 1 -type f -name '*.json' -print | sed -n 's@^.*/@@p' | sed 's/\.json$//' | sort | tr '\n' ' ' | sed 's/[[:space:]]*$//')
fi
export BASE_SHA HEAD_SHA CHANGED_FILES
node <<'NODE'
const { execFileSync } = require('node:child_process')
const fs = require('node:fs')
const path = require('node:path')
const repoRoot = process.cwd()
const baseSha = process.env.BASE_SHA || ''
const headSha = process.env.HEAD_SHA || ''
const files = (process.env.CHANGED_FILES || '').split(/\s+/).filter(Boolean)
const englishPath = fileStem => path.join(repoRoot, 'web', 'i18n', 'en-US', `${fileStem}.json`)
const readCurrentJson = (fileStem) => {
const filePath = englishPath(fileStem)
if (!fs.existsSync(filePath))
return null
return JSON.parse(fs.readFileSync(filePath, 'utf8'))
}
const readBaseJson = (fileStem) => {
if (!baseSha)
return null
try {
const relativePath = `web/i18n/en-US/${fileStem}.json`
const content = execFileSync('git', ['show', `${baseSha}:${relativePath}`], { encoding: 'utf8' })
return JSON.parse(content)
}
catch (error) {
return null
}
}
const compareJson = (beforeValue, afterValue) => JSON.stringify(beforeValue) === JSON.stringify(afterValue)
const changes = {}
for (const fileStem of files) {
const beforeJson = readBaseJson(fileStem) || {}
const afterJson = readCurrentJson(fileStem) || {}
const added = {}
const updated = {}
const deleted = []
for (const [key, value] of Object.entries(afterJson)) {
if (!(key in beforeJson)) {
added[key] = value
continue
}
if (!compareJson(beforeJson[key], value)) {
updated[key] = {
before: beforeJson[key],
after: value,
}
}
}
for (const key of Object.keys(beforeJson)) {
if (!(key in afterJson))
deleted.push(key)
}
changes[fileStem] = {
fileDeleted: readCurrentJson(fileStem) === null,
added,
updated,
deleted,
}
}
fs.writeFileSync(
'/tmp/i18n-changes.json',
JSON.stringify({
baseSha,
headSha,
files,
changes,
})
)
NODE
if [ -n "$CHANGED_FILES" ]; then
echo "has_changes=true" >> "$GITHUB_OUTPUT"
else
echo "has_changes=false" >> "$GITHUB_OUTPUT"
fi
echo "base_sha=$BASE_SHA" >> "$GITHUB_OUTPUT"
echo "head_sha=$HEAD_SHA" >> "$GITHUB_OUTPUT"
echo "changed_files=$CHANGED_FILES" >> "$GITHUB_OUTPUT"
- name: Trigger i18n sync workflow
if: steps.detect.outputs.has_changes == 'true'
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
BASE_SHA: ${{ steps.detect.outputs.base_sha }}
HEAD_SHA: ${{ steps.detect.outputs.head_sha }}
CHANGED_FILES: ${{ steps.detect.outputs.changed_files }}
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const fs = require('fs')
const changesJson = fs.readFileSync('/tmp/i18n-changes.json', 'utf8')
const changesBase64 = Buffer.from(changesJson).toString('base64')
const maxEmbeddedChangesChars = 48000
const changesEmbedded = changesBase64.length <= maxEmbeddedChangesChars
if (!changesEmbedded) {
console.log(`Structured change set too large to embed safely (${changesBase64.length} chars). Downstream workflow will regenerate it from git history.`)
}
await github.rest.repos.createDispatchEvent({
owner: context.repo.owner,
repo: context.repo.repo,
event_type: 'i18n-sync',
client_payload: {
changed_files: process.env.CHANGED_FILES,
changes_base64: changesEmbedded ? changesBase64 : '',
changes_embedded: changesEmbedded,
sync_mode: 'incremental',
base_sha: process.env.BASE_SHA,
head_sha: process.env.HEAD_SHA,
},
})

View File

@@ -7,16 +7,15 @@ UUID_NIL = "00000000-0000-0000-0000-000000000000"
DEFAULT_FILE_NUMBER_LIMITS = 3
_IMAGE_EXTENSION_BASE: frozenset[str] = frozenset(("jpg", "jpeg", "png", "webp", "gif", "svg"))
_VIDEO_EXTENSION_BASE: frozenset[str] = frozenset(("mp4", "mov", "mpeg", "webm"))
_AUDIO_EXTENSION_BASE: frozenset[str] = frozenset(("mp3", "m4a", "wav", "amr", "mpga"))
IMAGE_EXTENSIONS = convert_to_lower_and_upper_set({"jpg", "jpeg", "png", "webp", "gif", "svg"})
IMAGE_EXTENSIONS: frozenset[str] = frozenset(convert_to_lower_and_upper_set(_IMAGE_EXTENSION_BASE))
VIDEO_EXTENSIONS: frozenset[str] = frozenset(convert_to_lower_and_upper_set(_VIDEO_EXTENSION_BASE))
AUDIO_EXTENSIONS: frozenset[str] = frozenset(convert_to_lower_and_upper_set(_AUDIO_EXTENSION_BASE))
VIDEO_EXTENSIONS = convert_to_lower_and_upper_set({"mp4", "mov", "mpeg", "webm"})
_UNSTRUCTURED_DOCUMENT_EXTENSION_BASE: frozenset[str] = frozenset(
(
AUDIO_EXTENSIONS = convert_to_lower_and_upper_set({"mp3", "m4a", "wav", "amr", "mpga"})
_doc_extensions: set[str]
if dify_config.ETL_TYPE == "Unstructured":
_doc_extensions = {
"txt",
"markdown",
"md",
@@ -36,10 +35,11 @@ _UNSTRUCTURED_DOCUMENT_EXTENSION_BASE: frozenset[str] = frozenset(
"pptx",
"xml",
"epub",
)
)
_DEFAULT_DOCUMENT_EXTENSION_BASE: frozenset[str] = frozenset(
(
}
if dify_config.UNSTRUCTURED_API_URL:
_doc_extensions.add("ppt")
else:
_doc_extensions = {
"txt",
"markdown",
"md",
@@ -53,17 +53,8 @@ _DEFAULT_DOCUMENT_EXTENSION_BASE: frozenset[str] = frozenset(
"csv",
"vtt",
"properties",
)
)
_doc_extensions: set[str]
if dify_config.ETL_TYPE == "Unstructured":
_doc_extensions = set(_UNSTRUCTURED_DOCUMENT_EXTENSION_BASE)
if dify_config.UNSTRUCTURED_API_URL:
_doc_extensions.add("ppt")
else:
_doc_extensions = set(_DEFAULT_DOCUMENT_EXTENSION_BASE)
DOCUMENT_EXTENSIONS: frozenset[str] = frozenset(convert_to_lower_and_upper_set(_doc_extensions))
}
DOCUMENT_EXTENSIONS: set[str] = convert_to_lower_and_upper_set(_doc_extensions)
# console
COOKIE_NAME_ACCESS_TOKEN = "access_token"

View File

@@ -4,8 +4,8 @@ from urllib.parse import quote
from flask import Response
HTML_MIME_TYPES: frozenset[str] = frozenset(("text/html", "application/xhtml+xml"))
HTML_EXTENSIONS: frozenset[str] = frozenset(("html", "htm"))
HTML_MIME_TYPES = frozenset({"text/html", "application/xhtml+xml"})
HTML_EXTENSIONS = frozenset({"html", "htm"})
def _normalize_mime_type(mime_type: str | None) -> str:

View File

@@ -2,7 +2,7 @@ import flask_restx
from flask_restx import Resource, fields, marshal_with
from flask_restx._http import HTTPStatus
from sqlalchemy import delete, func, select
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden
from extensions.ext_database import db
@@ -34,7 +34,7 @@ api_key_list_model = console_ns.model(
def _get_resource(resource_id, tenant_id, resource_model):
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session:
resource = session.execute(
select(resource_model).filter_by(id=resource_id, tenant_id=tenant_id)
).scalar_one_or_none()

View File

@@ -9,7 +9,7 @@ from graphon.enums import WorkflowExecutionStatus
from graphon.file import helpers as file_helpers
from pydantic import AliasChoices, BaseModel, ConfigDict, Field, computed_field, field_validator
from sqlalchemy import select
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Session
from werkzeug.exceptions import BadRequest
from controllers.common.helpers import FileInfo
@@ -642,7 +642,7 @@ class AppCopyApi(Resource):
args = CopyAppPayload.model_validate(console_ns.payload or {})
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
with Session(db.engine) as session:
import_service = AppDslService(session)
yaml_content = import_service.export_dsl(app_model=app_model, include_secret=True)
result = import_service.import_app(
@@ -655,6 +655,7 @@ class AppCopyApi(Resource):
icon=args.icon,
icon_background=args.icon_background,
)
session.commit()
# Inherit web app permission from original app
if result.app_id and FeatureService.get_system_features().webapp_auth.enabled:

View File

@@ -1,6 +1,6 @@
from flask_restx import Resource, fields, marshal_with
from pydantic import BaseModel, Field
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Session
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import (
@@ -71,7 +71,7 @@ class AppImportApi(Resource):
args = AppImportPayload.model_validate(console_ns.payload)
# Create service with session
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session:
import_service = AppDslService(session)
# Import app
account = current_user
@@ -87,6 +87,7 @@ class AppImportApi(Resource):
icon_background=args.icon_background,
app_id=args.app_id,
)
session.commit()
if result.app_id and FeatureService.get_system_features().webapp_auth.enabled:
# update web app setting as private
EnterpriseService.WebAppAuth.update_app_access_mode(result.app_id, "private")
@@ -111,11 +112,12 @@ class AppImportConfirmApi(Resource):
current_user, _ = current_account_with_tenant()
# Create service with session
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session:
import_service = AppDslService(session)
# Confirm import
account = current_user
result = import_service.confirm_import(import_id=import_id, account=account)
session.commit()
# Return appropriate status code based on result
if result.status == ImportStatus.FAILED:
@@ -132,7 +134,7 @@ class AppImportCheckDependenciesApi(Resource):
@marshal_with(app_import_check_dependencies_model)
@edit_permission_required
def get(self, app_model: App):
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session:
import_service = AppDslService(session)
result = import_service.check_dependencies(app_model=app_model)

View File

@@ -2,7 +2,7 @@ from flask import request
from flask_restx import Resource, fields, marshal_with
from pydantic import BaseModel, Field
from sqlalchemy import select
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Session
from controllers.console import console_ns
from controllers.console.app.wraps import get_app_model
@@ -69,7 +69,7 @@ class ConversationVariablesApi(Resource):
page_size = 100
stmt = stmt.limit(page_size).offset((page - 1) * page_size)
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
with Session(db.engine) as session:
rows = session.scalars(stmt).all()
return {

View File

@@ -10,7 +10,7 @@ from graphon.file import File
from graphon.graph_engine.manager import GraphEngineManager
from graphon.model_runtime.utils.encoders import jsonable_encoder
from pydantic import BaseModel, Field, field_validator
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Session
from werkzeug.exceptions import BadRequest, Forbidden, InternalServerError, NotFound
import services
@@ -840,7 +840,7 @@ class PublishedWorkflowApi(Resource):
args = PublishWorkflowPayload.model_validate(console_ns.payload or {})
workflow_service = WorkflowService()
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session:
workflow = workflow_service.publish_workflow(
session=session,
app_model=app_model,
@@ -858,6 +858,8 @@ class PublishedWorkflowApi(Resource):
workflow_created_at = TimestampField().format(workflow.created_at)
session.commit()
return {
"result": "success",
"created_at": workflow_created_at,
@@ -980,7 +982,7 @@ class PublishedAllWorkflowApi(Resource):
raise Forbidden()
workflow_service = WorkflowService()
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session:
workflows, has_more = workflow_service.get_all_published_workflow(
session=session,
app_model=app_model,
@@ -1070,7 +1072,7 @@ class WorkflowByIdApi(Resource):
workflow_service = WorkflowService()
# Create a session and manage the transaction
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
with Session(db.engine, expire_on_commit=False) as session:
workflow = workflow_service.update_workflow(
session=session,
workflow_id=workflow_id,
@@ -1082,6 +1084,9 @@ class WorkflowByIdApi(Resource):
if not workflow:
raise NotFound("Workflow not found")
# Commit the transaction in the controller
session.commit()
return workflow
@setup_required
@@ -1096,11 +1101,13 @@ class WorkflowByIdApi(Resource):
workflow_service = WorkflowService()
# Create a session and manage the transaction
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session:
try:
workflow_service.delete_workflow(
session=session, workflow_id=workflow_id, tenant_id=app_model.tenant_id
)
# Commit the transaction in the controller
session.commit()
except WorkflowInUseError as e:
abort(400, description=str(e))
except DraftWorkflowDeletionError as e:

View File

@@ -5,7 +5,7 @@ from flask import request
from flask_restx import Resource, marshal_with
from graphon.enums import WorkflowExecutionStatus
from pydantic import BaseModel, Field, field_validator
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Session
from controllers.console import console_ns
from controllers.console.app.wraps import get_app_model
@@ -87,7 +87,7 @@ class WorkflowAppLogApi(Resource):
# get paginate workflow app logs
workflow_app_service = WorkflowAppService()
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session:
workflow_app_log_pagination = workflow_app_service.get_paginate_workflow_app_logs(
session=session,
app_model=app_model,
@@ -124,7 +124,7 @@ class WorkflowArchivedLogApi(Resource):
args = WorkflowAppLogQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
workflow_app_service = WorkflowAppService()
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session:
workflow_app_log_pagination = workflow_app_service.get_paginate_workflow_archive_logs(
session=session,
app_model=app_model,

View File

@@ -10,7 +10,7 @@ from graphon.variables.segment_group import SegmentGroup
from graphon.variables.segments import ArrayFileSegment, FileSegment, Segment
from graphon.variables.types import SegmentType
from pydantic import BaseModel, Field
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Session
from controllers.console import console_ns
from controllers.console.app.error import (
@@ -244,7 +244,7 @@ class WorkflowVariableCollectionApi(Resource):
raise DraftWorkflowNotExist()
# fetch draft workflow by app_model
with sessionmaker(bind=db.engine, expire_on_commit=False).begin() as session:
with Session(bind=db.engine, expire_on_commit=False) as session:
draft_var_srv = WorkflowDraftVariableService(
session=session,
)
@@ -298,7 +298,7 @@ class NodeVariableCollectionApi(Resource):
@marshal_with(workflow_draft_variable_list_model)
def get(self, app_model: App, node_id: str):
validate_node_id(node_id)
with sessionmaker(bind=db.engine, expire_on_commit=False).begin() as session:
with Session(bind=db.engine, expire_on_commit=False) as session:
draft_var_srv = WorkflowDraftVariableService(
session=session,
)
@@ -465,7 +465,7 @@ class VariableResetApi(Resource):
def _get_variable_list(app_model: App, node_id) -> WorkflowDraftVariableList:
with sessionmaker(bind=db.engine, expire_on_commit=False).begin() as session:
with Session(bind=db.engine, expire_on_commit=False) as session:
draft_var_srv = WorkflowDraftVariableService(
session=session,
)

View File

@@ -4,7 +4,7 @@ from flask import request
from flask_restx import Resource, fields, marshal_with
from pydantic import BaseModel
from sqlalchemy import select
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Session
from werkzeug.exceptions import NotFound
from configs import dify_config
@@ -64,7 +64,7 @@ class WebhookTriggerApi(Resource):
node_id = args.node_id
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session:
# Get webhook trigger for this app and node
webhook_trigger = (
session.query(WorkflowWebhookTrigger)
@@ -95,7 +95,7 @@ class AppTriggersApi(Resource):
assert isinstance(current_user, Account)
assert current_user.current_tenant_id is not None
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session:
# Get all triggers for this app using select API
triggers = (
session.execute(
@@ -137,7 +137,7 @@ class AppTriggerEnableApi(Resource):
assert current_user.current_tenant_id is not None
trigger_id = args.trigger_id
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
with Session(db.engine) as session:
# Find the trigger using select
trigger = session.execute(
select(AppTrigger).where(
@@ -153,6 +153,9 @@ class AppTriggerEnableApi(Resource):
# Update status based on enable_trigger boolean
trigger.status = AppTriggerStatus.ENABLED if args.enable_trigger else AppTriggerStatus.DISABLED
session.commit()
session.refresh(trigger)
# Add computed icon field
url_prefix = dify_config.CONSOLE_API_URL + "/console/api/workspaces/current/tool-provider/builtin/"
if trigger.trigger_type == "trigger-plugin":

View File

@@ -36,7 +36,7 @@ class Subscription(Resource):
@only_edition_cloud
def get(self):
current_user, current_tenant_id = current_account_with_tenant()
args = SubscriptionQuery.model_validate(request.args.to_dict(flat=True))
args = SubscriptionQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
BillingService.is_tenant_owner_or_admin(current_user)
return BillingService.get_subscription(args.plan, args.interval, current_user.email, current_tenant_id)

View File

@@ -31,7 +31,7 @@ class ComplianceApi(Resource):
@only_edition_cloud
def get(self):
current_user, current_tenant_id = current_account_with_tenant()
args = ComplianceDownloadQuery.model_validate(request.args.to_dict(flat=True))
args = ComplianceDownloadQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
ip_address = extract_remote_ip(request)
device_info = request.headers.get("User-Agent", "Unknown device")

View File

@@ -6,7 +6,7 @@ from flask import request
from flask_restx import Resource, fields, marshal_with
from pydantic import BaseModel, Field
from sqlalchemy import select
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Session
from werkzeug.exceptions import NotFound
from controllers.common.schema import get_or_create_model, register_schema_model
@@ -159,7 +159,7 @@ class DataSourceApi(Resource):
@account_initialization_required
def patch(self, binding_id, action: Literal["enable", "disable"]):
binding_id = str(binding_id)
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
with Session(db.engine) as session:
data_source_binding = session.execute(
select(DataSourceOauthBinding).filter_by(id=binding_id)
).scalar_one_or_none()
@@ -211,7 +211,7 @@ class DataSourceNotionListApi(Resource):
if not credential:
raise NotFound("Credential not found.")
exist_page_ids = []
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session:
# import notion in the exist dataset
if query.dataset_id:
dataset = DatasetService.get_dataset(query.dataset_id)

View File

@@ -3,7 +3,7 @@ import logging
from flask import request
from flask_restx import Resource
from pydantic import BaseModel, Field
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Session
from controllers.common.schema import register_schema_models
from controllers.console import console_ns
@@ -85,7 +85,7 @@ class CustomizedPipelineTemplateApi(Resource):
@account_initialization_required
@enterprise_license_required
def post(self, template_id: str):
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
with Session(db.engine) as session:
template = (
session.query(PipelineCustomizedTemplate).where(PipelineCustomizedTemplate.id == template_id).first()
)

View File

@@ -1,6 +1,6 @@
from flask_restx import Resource, marshal
from pydantic import BaseModel
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden
import services
@@ -54,7 +54,7 @@ class CreateRagPipelineDatasetApi(Resource):
yaml_content=payload.yaml_content,
)
try:
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session:
rag_pipeline_dsl_service = RagPipelineDslService(session)
import_info = rag_pipeline_dsl_service.create_rag_pipeline_dataset(
tenant_id=current_tenant_id,

View File

@@ -5,7 +5,7 @@ from flask import Response, request
from flask_restx import Resource, marshal, marshal_with
from graphon.variables.types import SegmentType
from pydantic import BaseModel, Field
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden
from controllers.common.schema import register_schema_models
@@ -96,7 +96,7 @@ class RagPipelineVariableCollectionApi(Resource):
raise DraftWorkflowNotExist()
# fetch draft workflow by app_model
with sessionmaker(bind=db.engine, expire_on_commit=False).begin() as session:
with Session(bind=db.engine, expire_on_commit=False) as session:
draft_var_srv = WorkflowDraftVariableService(
session=session,
)
@@ -143,7 +143,7 @@ class RagPipelineNodeVariableCollectionApi(Resource):
@marshal_with(workflow_draft_variable_list_model)
def get(self, pipeline: Pipeline, node_id: str):
validate_node_id(node_id)
with sessionmaker(bind=db.engine, expire_on_commit=False).begin() as session:
with Session(bind=db.engine, expire_on_commit=False) as session:
draft_var_srv = WorkflowDraftVariableService(
session=session,
)
@@ -289,7 +289,7 @@ class RagPipelineVariableResetApi(Resource):
def _get_variable_list(pipeline: Pipeline, node_id) -> WorkflowDraftVariableList:
with sessionmaker(bind=db.engine, expire_on_commit=False).begin() as session:
with Session(bind=db.engine, expire_on_commit=False) as session:
draft_var_srv = WorkflowDraftVariableService(
session=session,
)

View File

@@ -1,7 +1,7 @@
from flask import request
from flask_restx import Resource, fields, marshal_with # type: ignore
from pydantic import BaseModel, Field
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Session
from controllers.common.schema import get_or_create_model, register_schema_models
from controllers.console import console_ns
@@ -68,7 +68,7 @@ class RagPipelineImportApi(Resource):
payload = RagPipelineImportPayload.model_validate(console_ns.payload or {})
# Create service with session
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session:
import_service = RagPipelineDslService(session)
# Import app
account = current_user
@@ -80,6 +80,7 @@ class RagPipelineImportApi(Resource):
pipeline_id=payload.pipeline_id,
dataset_name=payload.name,
)
session.commit()
# Return appropriate status code based on result
status = result.status
@@ -101,11 +102,12 @@ class RagPipelineImportConfirmApi(Resource):
current_user, _ = current_account_with_tenant()
# Create service with session
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session:
import_service = RagPipelineDslService(session)
# Confirm import
account = current_user
result = import_service.confirm_import(import_id=import_id, account=account)
session.commit()
# Return appropriate status code based on result
if result.status == ImportStatus.FAILED:
@@ -122,7 +124,7 @@ class RagPipelineImportCheckDependenciesApi(Resource):
@edit_permission_required
@marshal_with(pipeline_import_check_dependencies_model)
def get(self, pipeline: Pipeline):
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session:
import_service = RagPipelineDslService(session)
result = import_service.check_dependencies(pipeline=pipeline)
@@ -140,7 +142,7 @@ class RagPipelineExportApi(Resource):
# Add include_secret params
query = IncludeSecretQuery.model_validate(request.args.to_dict())
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session:
export_service = RagPipelineDslService(session)
result = export_service.export_rag_pipeline_dsl(
pipeline=pipeline, include_secret=query.include_secret == "true"

View File

@@ -6,7 +6,7 @@ from flask import abort, request
from flask_restx import Resource, marshal_with # type: ignore
from graphon.model_runtime.utils.encoders import jsonable_encoder
from pydantic import BaseModel, Field
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Session
from werkzeug.exceptions import BadRequest, Forbidden, InternalServerError, NotFound
import services
@@ -608,7 +608,7 @@ class PublishedRagPipelineApi(Resource):
# The role of the current user in the ta table must be admin, owner, or editor
current_user, _ = current_account_with_tenant()
rag_pipeline_service = RagPipelineService()
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session:
pipeline = session.merge(pipeline)
workflow = rag_pipeline_service.publish_workflow(
session=session,
@@ -620,6 +620,8 @@ class PublishedRagPipelineApi(Resource):
session.add(pipeline)
workflow_created_at = TimestampField().format(workflow.created_at)
session.commit()
return {
"result": "success",
"created_at": workflow_created_at,
@@ -693,7 +695,7 @@ class PublishedAllRagPipelineApi(Resource):
raise Forbidden()
rag_pipeline_service = RagPipelineService()
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session:
workflows, has_more = rag_pipeline_service.get_all_published_workflow(
session=session,
pipeline=pipeline,
@@ -765,7 +767,7 @@ class RagPipelineByIdApi(Resource):
rag_pipeline_service = RagPipelineService()
# Create a session and manage the transaction
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
with Session(db.engine, expire_on_commit=False) as session:
workflow = rag_pipeline_service.update_workflow(
session=session,
workflow_id=workflow_id,
@@ -777,6 +779,9 @@ class RagPipelineByIdApi(Resource):
if not workflow:
raise NotFound("Workflow not found")
# Commit the transaction in the controller
session.commit()
return workflow
@setup_required
@@ -793,13 +798,14 @@ class RagPipelineByIdApi(Resource):
workflow_service = WorkflowService()
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session:
try:
workflow_service.delete_workflow(
session=session,
workflow_id=workflow_id,
tenant_id=pipeline.tenant_id,
)
session.commit()
except WorkflowInUseError as e:
abort(400, description=str(e))
except DraftWorkflowDeletionError as e:

View File

@@ -2,7 +2,7 @@ from typing import Any
from flask import request
from pydantic import BaseModel, Field, TypeAdapter, model_validator
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Session
from werkzeug.exceptions import NotFound
from controllers.common.schema import register_schema_models
@@ -74,7 +74,7 @@ class ConversationListApi(InstalledAppResource):
try:
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session:
pagination = WebConversationService.pagination_by_last_id(
session=session,
app_model=app_model,

View File

@@ -2,7 +2,7 @@ from collections.abc import Callable
from functools import wraps
from typing import ParamSpec, TypeVar
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden
from extensions.ext_database import db
@@ -24,7 +24,7 @@ def plugin_permission_required(
user = current_user
tenant_id = current_tenant_id
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session:
permission = (
session.query(TenantPluginPermission)
.where(

View File

@@ -8,7 +8,7 @@ from flask import request
from flask_restx import Resource, fields, marshal_with
from pydantic import BaseModel, Field, field_validator, model_validator
from sqlalchemy import select
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Session
from configs import dify_config
from constants.languages import supported_language
@@ -519,7 +519,7 @@ class EducationAutoCompleteApi(Resource):
@cloud_edition_billing_enabled
@marshal_with(data_fields)
def get(self):
payload = request.args.to_dict(flat=True)
payload = request.args.to_dict(flat=True) # type: ignore
args = EducationAutocompleteQuery.model_validate(payload)
return BillingService.EducationIdentity.autocomplete(args.keywords, args.page, args.limit)
@@ -562,7 +562,7 @@ class ChangeEmailSendEmailApi(Resource):
user_email = current_user.email
else:
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session:
account = AccountService.get_account_by_email_with_case_fallback(args.email, session=session)
if account is None:
raise AccountNotFound()

View File

@@ -99,7 +99,7 @@ class ModelProviderListApi(Resource):
_, current_tenant_id = current_account_with_tenant()
tenant_id = current_tenant_id
payload = request.args.to_dict(flat=True)
payload = request.args.to_dict(flat=True) # type: ignore
args = ParserModelList.model_validate(payload)
model_provider_service = ModelProviderService()
@@ -118,7 +118,7 @@ class ModelProviderCredentialApi(Resource):
_, current_tenant_id = current_account_with_tenant()
tenant_id = current_tenant_id
# if credential_id is not provided, return current used credential
payload = request.args.to_dict(flat=True)
payload = request.args.to_dict(flat=True) # type: ignore
args = ParserCredentialId.model_validate(payload)
model_provider_service = ModelProviderService()

View File

@@ -7,7 +7,7 @@ from flask import make_response, redirect, request, send_file
from flask_restx import Resource
from graphon.model_runtime.utils.encoders import jsonable_encoder
from pydantic import BaseModel, Field, HttpUrl, field_validator, model_validator
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden
from configs import dify_config
@@ -1019,7 +1019,7 @@ class ToolProviderMCPApi(Resource):
# Step 1: Get provider data for URL validation (short-lived session, no network I/O)
validation_data = None
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session:
service = MCPToolManageService(session=session)
validation_data = service.get_provider_for_url_validation(
tenant_id=current_tenant_id, provider_id=payload.provider_id
@@ -1034,7 +1034,7 @@ class ToolProviderMCPApi(Resource):
)
# Step 3: Perform database update in a transaction
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session, session.begin():
service = MCPToolManageService(session=session)
service.update_provider(
tenant_id=current_tenant_id,
@@ -1061,7 +1061,7 @@ class ToolProviderMCPApi(Resource):
payload = MCPProviderDeletePayload.model_validate(console_ns.payload or {})
_, current_tenant_id = current_account_with_tenant()
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session, session.begin():
service = MCPToolManageService(session=session)
service.delete_provider(tenant_id=current_tenant_id, provider_id=payload.provider_id)
@@ -1079,7 +1079,7 @@ class ToolMCPAuthApi(Resource):
provider_id = payload.provider_id
_, tenant_id = current_account_with_tenant()
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session, session.begin():
service = MCPToolManageService(session=session)
db_provider = service.get_provider(provider_id=provider_id, tenant_id=tenant_id)
if not db_provider:
@@ -1100,7 +1100,7 @@ class ToolMCPAuthApi(Resource):
sse_read_timeout=provider_entity.sse_read_timeout,
):
# Update credentials in new transaction
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session, session.begin():
service = MCPToolManageService(session=session)
service.update_provider_credentials(
provider_id=provider_id,
@@ -1118,17 +1118,17 @@ class ToolMCPAuthApi(Resource):
resource_metadata_url=e.resource_metadata_url,
scope_hint=e.scope_hint,
)
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session, session.begin():
service = MCPToolManageService(session=session)
response = service.execute_auth_actions(auth_result)
return response
except MCPRefreshTokenError as e:
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session, session.begin():
service = MCPToolManageService(session=session)
service.clear_provider_credentials(provider_id=provider_id, tenant_id=tenant_id)
raise ValueError(f"Failed to refresh token, please try to authorize again: {e}") from e
except (MCPError, ValueError) as e:
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session, session.begin():
service = MCPToolManageService(session=session)
service.clear_provider_credentials(provider_id=provider_id, tenant_id=tenant_id)
raise ValueError(f"Failed to connect to MCP server: {e}") from e
@@ -1141,7 +1141,7 @@ class ToolMCPDetailApi(Resource):
@account_initialization_required
def get(self, provider_id):
_, tenant_id = current_account_with_tenant()
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session, session.begin():
service = MCPToolManageService(session=session)
provider = service.get_provider(provider_id=provider_id, tenant_id=tenant_id)
return jsonable_encoder(ToolTransformService.mcp_provider_to_user_provider(provider, for_list=True))
@@ -1155,7 +1155,7 @@ class ToolMCPListAllApi(Resource):
def get(self):
_, tenant_id = current_account_with_tenant()
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session, session.begin():
service = MCPToolManageService(session=session)
# Skip sensitive data decryption for list view to improve performance
tools = service.list_providers(tenant_id=tenant_id, include_sensitive=False)
@@ -1170,7 +1170,7 @@ class ToolMCPUpdateApi(Resource):
@account_initialization_required
def get(self, provider_id):
_, tenant_id = current_account_with_tenant()
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session, session.begin():
service = MCPToolManageService(session=session)
tools = service.list_provider_tools(
tenant_id=tenant_id,
@@ -1188,7 +1188,7 @@ class ToolMCPCallbackApi(Resource):
authorization_code = query.code
# Create service instance for handle_callback
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session, session.begin():
mcp_service = MCPToolManageService(session=session)
# handle_callback now returns state data and tokens
state_data, tokens = handle_callback(state_key, authorization_code)

View File

@@ -5,7 +5,7 @@ from flask import make_response, redirect, request
from flask_restx import Resource
from graphon.model_runtime.utils.encoders import jsonable_encoder
from pydantic import BaseModel, model_validator
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Session
from werkzeug.exceptions import BadRequest, Forbidden
from configs import dify_config
@@ -375,7 +375,7 @@ class TriggerSubscriptionDeleteApi(Resource):
assert user.current_tenant_id is not None
try:
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session:
# Delete trigger provider subscription
TriggerProviderService.delete_trigger_provider(
session=session,
@@ -388,6 +388,7 @@ class TriggerSubscriptionDeleteApi(Resource):
tenant_id=user.current_tenant_id,
subscription_id=subscription_id,
)
session.commit()
return {"result": "success"}
except ValueError as e:
raise BadRequest(str(e))

View File

@@ -155,7 +155,7 @@ class WorkspaceListApi(Resource):
@setup_required
@admin_required
def get(self):
payload = request.args.to_dict(flat=True)
payload = request.args.to_dict(flat=True) # type: ignore
args = WorkspaceListQuery.model_validate(payload)
stmt = select(Tenant).order_by(Tenant.created_at.desc())

View File

@@ -4,7 +4,7 @@ from flask import Response
from flask_restx import Resource
from graphon.variables.input_entities import VariableEntity
from pydantic import BaseModel, Field, ValidationError
from sqlalchemy.orm import Session, sessionmaker
from sqlalchemy.orm import Session
from controllers.common.schema import register_schema_model
from controllers.mcp import mcp_ns
@@ -67,7 +67,7 @@ class MCPAppApi(Resource):
request_id: Union[int, str] | None = args.id
mcp_request = self._parse_mcp_request(args.model_dump(exclude_none=True))
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
with Session(db.engine, expire_on_commit=False) as session:
# Get MCP server and app
mcp_server, app = self._get_mcp_server_and_app(server_code, session)
self._validate_server_status(mcp_server)
@@ -189,7 +189,7 @@ class MCPAppApi(Resource):
def _retrieve_end_user(self, tenant_id: str, mcp_server_id: str) -> EndUser | None:
"""Get end user - manages its own database session"""
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
with Session(db.engine, expire_on_commit=False) as session, session.begin():
return (
session.query(EndUser)
.where(EndUser.tenant_id == tenant_id)
@@ -229,7 +229,9 @@ class MCPAppApi(Resource):
if not end_user and isinstance(mcp_request.root, mcp_types.InitializeRequest):
client_info = mcp_request.root.params.clientInfo
client_name = f"{client_info.name}@{client_info.version}"
with sessionmaker(db.engine, expire_on_commit=False).begin() as create_session:
# Commit the session before creating end user to avoid transaction conflicts
session.commit()
with Session(db.engine, expire_on_commit=False) as create_session, create_session.begin():
end_user = self._create_end_user(client_name, app.tenant_id, app.id, mcp_server.id, create_session)
return handle_mcp_request(app, mcp_request, user_input_form, mcp_server, end_user, request_id)

View File

@@ -2,7 +2,7 @@ from typing import Literal
from flask import request
from pydantic import BaseModel, Field, TypeAdapter, field_validator, model_validator
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Session
from werkzeug.exceptions import NotFound
from controllers.common.schema import register_schema_models
@@ -99,7 +99,7 @@ class ConversationListApi(WebApiResource):
query = ConversationListQuery.model_validate(raw_args)
try:
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session:
pagination = WebConversationService.pagination_by_last_id(
session=session,
app_model=app_model,

View File

@@ -4,7 +4,7 @@ import secrets
from flask import request
from flask_restx import Resource
from pydantic import BaseModel, Field, field_validator
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Session
from controllers.common.schema import register_schema_models
from controllers.console.auth.error import (
@@ -81,7 +81,7 @@ class ForgotPasswordSendEmailApi(Resource):
else:
language = "en-US"
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session:
account = AccountService.get_account_by_email_with_case_fallback(request_email, session=session)
token = None
if account is None:
@@ -180,17 +180,18 @@ class ForgotPasswordResetApi(Resource):
email = reset_data.get("email", "")
with sessionmaker(db.engine).begin() as session:
with Session(db.engine) as session:
account = AccountService.get_account_by_email_with_case_fallback(email, session=session)
if account:
self._update_existing_account(account, password_hashed, salt)
self._update_existing_account(account, password_hashed, salt, session)
else:
raise AuthenticationFailedError()
return {"result": "success"}
def _update_existing_account(self, account: Account, password_hashed, salt):
def _update_existing_account(self, account: Account, password_hashed, salt, session):
# Update existing account credentials
account.password = base64.b64encode(password_hashed).decode()
account.password_salt = base64.b64encode(salt).decode()
session.commit()

View File

@@ -6,7 +6,7 @@ from typing import Concatenate, ParamSpec, TypeVar
from flask import request
from flask_restx import Resource
from sqlalchemy import select
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Session
from werkzeug.exceptions import BadRequest, NotFound, Unauthorized
from constants import HEADER_NAME_APP_CODE
@@ -49,7 +49,7 @@ def decode_jwt_token(app_code: str | None = None, user_id: str | None = None):
decoded = PassportService().verify(tk)
app_code = decoded.get("app_code")
app_id = decoded.get("app_id")
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
with Session(db.engine, expire_on_commit=False) as session:
app_model = session.scalar(select(App).where(App.id == app_id))
site = session.scalar(select(Site).where(Site.code == app_code))
if not app_model:

View File

@@ -17,7 +17,7 @@ class CSVSanitizer:
"""
# Characters that can start a formula in Excel/LibreOffice/Google Sheets
FORMULA_CHARS = frozenset(("=", "+", "-", "@", "\t", "\r"))
FORMULA_CHARS = frozenset({"=", "+", "-", "@", "\t", "\r"})
@classmethod
def sanitize_value(cls, value: Any) -> str:

View File

@@ -122,6 +122,6 @@ class JiebaKeywordTableHandler:
results.add(token)
sub_tokens = re.findall(r"\w+", token)
if len(sub_tokens) > 1:
results.update({w for w in sub_tokens if w not in STOPWORDS})
results.update({w for w in sub_tokens if w not in list(STOPWORDS)})
return results

File diff suppressed because it is too large Load Diff

View File

@@ -4,7 +4,7 @@ import uuid
from enum import StrEnum
from typing import Any
from clickhouse_connect import get_client # type: ignore[import-untyped]
from clickhouse_connect import get_client
from pydantic import BaseModel
from configs import dify_config

View File

@@ -35,7 +35,7 @@ class PdfExtractor(BaseExtractor):
"""
# Magic bytes for image format detection: (magic_bytes, extension, mime_type)
IMAGE_FORMATS: tuple[tuple[bytes, str, str], ...] = (
IMAGE_FORMATS = [
(b"\xff\xd8\xff", "jpg", "image/jpeg"),
(b"\x89PNG\r\n\x1a\n", "png", "image/png"),
(b"\x00\x00\x00\x0c\x6a\x50\x20\x20\x0d\x0a\x87\x0a", "jp2", "image/jp2"),
@@ -45,7 +45,7 @@ class PdfExtractor(BaseExtractor):
(b"MM\x00*", "tiff", "image/tiff"),
(b"II+\x00", "tiff", "image/tiff"),
(b"MM\x00+", "tiff", "image/tiff"),
)
]
MAX_MAGIC_LEN = max(len(m) for m, _, _ in IMAGE_FORMATS)
def __init__(self, file_path: str, tenant_id: str, user_id: str, file_cache_key: str | None = None):

View File

@@ -5,11 +5,11 @@ TRIGGER_SCHEDULE_NODE_TYPE: Final[str] = "trigger-schedule"
TRIGGER_PLUGIN_NODE_TYPE: Final[str] = "trigger-plugin"
TRIGGER_NODE_TYPES: Final[frozenset[str]] = frozenset(
(
{
TRIGGER_WEBHOOK_NODE_TYPE,
TRIGGER_SCHEDULE_NODE_TYPE,
TRIGGER_PLUGIN_NODE_TYPE,
)
}
)

View File

@@ -8,20 +8,24 @@ from pydantic import BaseModel, Field, field_validator
from core.trigger.constants import TRIGGER_WEBHOOK_NODE_TYPE
_WEBHOOK_HEADER_ALLOWED_TYPES: frozenset[SegmentType] = frozenset((SegmentType.STRING,))
_WEBHOOK_HEADER_ALLOWED_TYPES = frozenset(
{
SegmentType.STRING,
}
)
_WEBHOOK_QUERY_PARAMETER_ALLOWED_TYPES: frozenset[SegmentType] = frozenset(
(
_WEBHOOK_QUERY_PARAMETER_ALLOWED_TYPES = frozenset(
{
SegmentType.STRING,
SegmentType.NUMBER,
SegmentType.BOOLEAN,
)
}
)
_WEBHOOK_PARAMETER_ALLOWED_TYPES = _WEBHOOK_HEADER_ALLOWED_TYPES | _WEBHOOK_QUERY_PARAMETER_ALLOWED_TYPES
_WEBHOOK_BODY_ALLOWED_TYPES: frozenset[SegmentType] = frozenset(
(
_WEBHOOK_BODY_ALLOWED_TYPES = frozenset(
{
SegmentType.STRING,
SegmentType.NUMBER,
SegmentType.BOOLEAN,
@@ -31,7 +35,7 @@ _WEBHOOK_BODY_ALLOWED_TYPES: frozenset[SegmentType] = frozenset(
SegmentType.ARRAY_BOOLEAN,
SegmentType.ARRAY_OBJECT,
SegmentType.FILE,
)
}
)

View File

@@ -1,12 +1,9 @@
from collections.abc import Collection
def convert_to_lower_and_upper_set(inputs: Collection[str]) -> set[str]:
def convert_to_lower_and_upper_set(inputs: list[str] | set[str]) -> set[str]:
"""
Convert a collection of strings to a set containing both lower and upper case versions of each string.
Convert a list or set of strings to a set containing both lower and upper case versions of each string.
Args:
inputs (Collection[str]): A collection of strings to be converted.
inputs (list[str] | set[str]): A list or set of strings to be converted.
Returns:
set[str]: A set containing both lower and upper case versions of each string.

View File

@@ -1386,7 +1386,7 @@ class ConversationVariable(TypeBase):
# Only `sys.query` and `sys.files` could be modified.
_EDITABLE_SYSTEM_VARIABLE = frozenset(("query", "files"))
_EDITABLE_SYSTEM_VARIABLE = frozenset(["query", "files"])
class WorkflowDraftVariable(Base):

View File

@@ -800,8 +800,8 @@ class DraftVariableSaver:
# technical variables from being exposed in the draft environment, particularly those
# that aren't meant to be directly edited or viewed by users.
_EXCLUDE_VARIABLE_NAMES_MAPPING: dict[NodeType, frozenset[str]] = {
BuiltinNodeTypes.LLM: frozenset(("finish_reason",)),
BuiltinNodeTypes.LOOP: frozenset(("loop_round",)),
BuiltinNodeTypes.LLM: frozenset(["finish_reason"]),
BuiltinNodeTypes.LOOP: frozenset(["loop_round"]),
}
# Database session used for persisting draft variables.

View File

@@ -383,21 +383,14 @@ class TestWorkflowAppLogEndpoints:
monkeypatch.setattr(workflow_app_log_module, "db", SimpleNamespace(engine=MagicMock()))
class DummySessionCtx:
class DummySession:
def __enter__(self):
return "session"
def __exit__(self, exc_type, exc, tb):
return False
class DummySessionMaker:
def __init__(self, *args, **kwargs):
pass
def begin(self):
return DummySessionCtx()
monkeypatch.setattr(workflow_app_log_module, "sessionmaker", DummySessionMaker)
monkeypatch.setattr(workflow_app_log_module, "Session", lambda *args, **kwargs: DummySession())
def fake_get_paginate(self, **_kwargs):
return {"items": [], "total": 0}
@@ -430,20 +423,13 @@ class TestWorkflowDraftVariableEndpoints:
monkeypatch.setattr(workflow_draft_variable_module, "db", SimpleNamespace(engine=MagicMock()))
monkeypatch.setattr(workflow_draft_variable_module, "current_user", SimpleNamespace(id="user-1"))
class DummySessionCtx:
class DummySession:
def __enter__(self):
return "session"
def __exit__(self, exc_type, exc, tb):
return False
class DummySessionMaker:
def __init__(self, *args, **kwargs):
pass
def begin(self):
return DummySessionCtx()
class DummyDraftService:
def __init__(self, session):
self.session = session
@@ -451,7 +437,7 @@ class TestWorkflowDraftVariableEndpoints:
def list_variables_without_values(self, **_kwargs):
return {"items": [], "total": 0}
monkeypatch.setattr(workflow_draft_variable_module, "sessionmaker", DummySessionMaker)
monkeypatch.setattr(workflow_draft_variable_module, "Session", lambda *args, **kwargs: DummySession())
class DummyWorkflowService:
def is_workflow_exist(self, *args, **kwargs):
@@ -557,21 +543,14 @@ class TestWorkflowTriggerEndpoints:
session = MagicMock()
session.query.return_value.where.return_value.first.return_value = trigger
class DummySessionCtx:
class DummySession:
def __enter__(self):
return session
def __exit__(self, exc_type, exc, tb):
return False
class DummySessionMaker:
def __init__(self, *args, **kwargs):
pass
def begin(self):
return DummySessionCtx()
monkeypatch.setattr(workflow_trigger_module, "sessionmaker", DummySessionMaker)
monkeypatch.setattr(workflow_trigger_module, "Session", lambda *_args, **_kwargs: DummySession())
with app.test_request_context("/?node_id=node-1"):
result = method(app_model=SimpleNamespace(id="app-1"))

View File

@@ -102,12 +102,12 @@ class TestDataSourceApi:
with (
app.test_request_context("/"),
patch("controllers.console.datasets.data_source.sessionmaker") as mock_session_class,
patch("controllers.console.datasets.data_source.Session") as mock_session_class,
patch("controllers.console.datasets.data_source.db.session.add"),
patch("controllers.console.datasets.data_source.db.session.commit"),
):
mock_session = MagicMock()
mock_session_class.return_value.begin.return_value.__enter__.return_value = mock_session
mock_session_class.return_value.__enter__.return_value = mock_session
mock_session.execute.return_value.scalar_one_or_none.return_value = binding
response, status = method(api, "b1", "enable")
@@ -123,12 +123,12 @@ class TestDataSourceApi:
with (
app.test_request_context("/"),
patch("controllers.console.datasets.data_source.sessionmaker") as mock_session_class,
patch("controllers.console.datasets.data_source.Session") as mock_session_class,
patch("controllers.console.datasets.data_source.db.session.add"),
patch("controllers.console.datasets.data_source.db.session.commit"),
):
mock_session = MagicMock()
mock_session_class.return_value.begin.return_value.__enter__.return_value = mock_session
mock_session_class.return_value.__enter__.return_value = mock_session
mock_session.execute.return_value.scalar_one_or_none.return_value = binding
response, status = method(api, "b1", "disable")
@@ -142,10 +142,10 @@ class TestDataSourceApi:
with (
app.test_request_context("/"),
patch("controllers.console.datasets.data_source.sessionmaker") as mock_session_class,
patch("controllers.console.datasets.data_source.Session") as mock_session_class,
):
mock_session = MagicMock()
mock_session_class.return_value.begin.return_value.__enter__.return_value = mock_session
mock_session_class.return_value.__enter__.return_value = mock_session
mock_session.execute.return_value.scalar_one_or_none.return_value = None
with pytest.raises(NotFound):
@@ -159,10 +159,10 @@ class TestDataSourceApi:
with (
app.test_request_context("/"),
patch("controllers.console.datasets.data_source.sessionmaker") as mock_session_class,
patch("controllers.console.datasets.data_source.Session") as mock_session_class,
):
mock_session = MagicMock()
mock_session_class.return_value.begin.return_value.__enter__.return_value = mock_session
mock_session_class.return_value.__enter__.return_value = mock_session
mock_session.execute.return_value.scalar_one_or_none.return_value = binding
with pytest.raises(ValueError):
@@ -176,10 +176,10 @@ class TestDataSourceApi:
with (
app.test_request_context("/"),
patch("controllers.console.datasets.data_source.sessionmaker") as mock_session_class,
patch("controllers.console.datasets.data_source.Session") as mock_session_class,
):
mock_session = MagicMock()
mock_session_class.return_value.begin.return_value.__enter__.return_value = mock_session
mock_session_class.return_value.__enter__.return_value = mock_session
mock_session.execute.return_value.scalar_one_or_none.return_value = binding
with pytest.raises(ValueError):
@@ -282,7 +282,7 @@ class TestDataSourceNotionListApi:
"controllers.console.datasets.data_source.DatasetService.get_dataset",
return_value=dataset,
),
patch("controllers.console.datasets.data_source.sessionmaker") as mock_session_class,
patch("controllers.console.datasets.data_source.Session") as mock_session_class,
patch(
"core.datasource.datasource_manager.DatasourceManager.get_datasource_runtime",
return_value=MagicMock(
@@ -292,7 +292,7 @@ class TestDataSourceNotionListApi:
),
):
mock_session = MagicMock()
mock_session_class.return_value.begin.return_value.__enter__.return_value = mock_session
mock_session_class.return_value.__enter__.return_value = mock_session
mock_session.scalars.return_value.all.return_value = [document]
response, status = method(api)
@@ -315,7 +315,7 @@ class TestDataSourceNotionListApi:
"controllers.console.datasets.data_source.DatasetService.get_dataset",
return_value=dataset,
),
patch("controllers.console.datasets.data_source.sessionmaker"),
patch("controllers.console.datasets.data_source.Session"),
):
with pytest.raises(ValueError):
method(api)

View File

@@ -1,153 +0,0 @@
"""Integration tests for console API key endpoints using testcontainers."""
from __future__ import annotations
from unittest.mock import MagicMock, patch
import pytest
from flask.testing import FlaskClient
from sqlalchemy import delete
from sqlalchemy.orm import Session
from models.enums import ApiTokenType
from models.model import ApiToken, App, AppMode
from tests.test_containers_integration_tests.controllers.console.helpers import (
authenticate_console_client,
create_console_account_and_tenant,
create_console_app,
)
@pytest.fixture
def setup_app(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> tuple[FlaskClient, dict[str, str], App]:
"""Create an authenticated client with an app for API key tests."""
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT)
headers = authenticate_console_client(test_client_with_containers, account)
return test_client_with_containers, headers, app
@pytest.fixture(autouse=True)
def cleanup_api_tokens(db_session_with_containers: Session):
"""Remove API tokens created during each test."""
yield
db_session_with_containers.execute(delete(ApiToken))
db_session_with_containers.commit()
class TestAppApiKeyListResource:
"""Tests for GET/POST /apps/<resource_id>/api-keys."""
def test_get_empty_keys(self, setup_app: tuple[FlaskClient, dict[str, str], App]) -> None:
client, headers, app = setup_app
resp = client.get(f"/console/api/apps/{app.id}/api-keys", headers=headers)
assert resp.status_code == 200
assert resp.json is not None
assert resp.json["data"] == []
def test_create_api_key(self, setup_app: tuple[FlaskClient, dict[str, str], App]) -> None:
client, headers, app = setup_app
resp = client.post(f"/console/api/apps/{app.id}/api-keys", headers=headers)
assert resp.status_code == 201
data = resp.json
assert data is not None
assert data["token"].startswith("app-")
assert data["id"] is not None
def test_get_keys_after_create(self, setup_app: tuple[FlaskClient, dict[str, str], App]) -> None:
client, headers, app = setup_app
client.post(f"/console/api/apps/{app.id}/api-keys", headers=headers)
client.post(f"/console/api/apps/{app.id}/api-keys", headers=headers)
resp = client.get(f"/console/api/apps/{app.id}/api-keys", headers=headers)
assert resp.status_code == 200
assert resp.json is not None
assert len(resp.json["data"]) == 2
def test_create_key_max_limit(
self,
setup_app: tuple[FlaskClient, dict[str, str], App],
db_session_with_containers: Session,
) -> None:
client, headers, app = setup_app
# Create 10 keys (the max)
for _ in range(10):
client.post(f"/console/api/apps/{app.id}/api-keys", headers=headers)
# 11th should fail
resp = client.post(f"/console/api/apps/{app.id}/api-keys", headers=headers)
assert resp.status_code == 400
def test_get_keys_for_nonexistent_app(
self,
setup_app: tuple[FlaskClient, dict[str, str], App],
) -> None:
client, headers, _ = setup_app
resp = client.get(
"/console/api/apps/00000000-0000-0000-0000-000000000000/api-keys",
headers=headers,
)
assert resp.status_code == 404
class TestAppApiKeyResource:
"""Tests for DELETE /apps/<resource_id>/api-keys/<api_key_id>."""
def test_delete_key_success(self, setup_app: tuple[FlaskClient, dict[str, str], App]) -> None:
client, headers, app = setup_app
create_resp = client.post(f"/console/api/apps/{app.id}/api-keys", headers=headers)
assert create_resp.json is not None
key_id = create_resp.json["id"]
resp = client.delete(f"/console/api/apps/{app.id}/api-keys/{key_id}", headers=headers)
assert resp.status_code == 204
def test_delete_nonexistent_key(self, setup_app: tuple[FlaskClient, dict[str, str], App]) -> None:
client, headers, app = setup_app
resp = client.delete(
f"/console/api/apps/{app.id}/api-keys/00000000-0000-0000-0000-000000000000",
headers=headers,
)
assert resp.status_code == 404
def test_delete_key_nonexistent_app(
self,
setup_app: tuple[FlaskClient, dict[str, str], App],
) -> None:
client, headers, _ = setup_app
resp = client.delete(
"/console/api/apps/00000000-0000-0000-0000-000000000000/api-keys/00000000-0000-0000-0000-000000000000",
headers=headers,
)
assert resp.status_code == 404
def test_delete_forbidden_for_non_admin(
self,
flask_app_with_containers,
) -> None:
"""A non-admin member cannot delete API keys via the controller permission check."""
from werkzeug.exceptions import Forbidden
from controllers.console.apikey import BaseApiKeyResource
resource = BaseApiKeyResource()
resource.resource_type = ApiTokenType.APP
resource.resource_model = MagicMock()
resource.resource_id_field = "app_id"
non_admin = MagicMock()
non_admin.is_admin_or_owner = False
with (
flask_app_with_containers.test_request_context("/"),
patch(
"controllers.console.apikey.current_account_with_tenant",
return_value=(non_admin, "tenant-id"),
),
patch("controllers.console.apikey._get_resource"),
):
with pytest.raises(Forbidden):
BaseApiKeyResource.delete(resource, "rid", "kid")

View File

@@ -69,7 +69,7 @@ def client(flask_app_with_containers):
return_value=(MagicMock(id="u1"), "t1"),
autospec=True,
)
@patch("controllers.console.workspace.tool_providers.sessionmaker", autospec=True)
@patch("controllers.console.workspace.tool_providers.Session", autospec=True)
@patch("controllers.console.workspace.tool_providers.MCPToolManageService._reconnect_with_url", autospec=True)
@pytest.mark.usefixtures("_mock_cache", "_mock_user_tenant")
def test_create_mcp_provider_populates_tools(mock_reconnect, mock_session, mock_current_account_with_tenant, client):
@@ -88,7 +88,7 @@ def test_create_mcp_provider_populates_tools(mock_reconnect, mock_session, mock_
create_result.id = "provider-1"
svc.create_provider.return_value = create_result
svc.get_provider.return_value = MagicMock(id="provider-1", tenant_id="t1") # used by reload path
mock_session.return_value.begin.return_value.__enter__.return_value = MagicMock()
mock_session.return_value.__enter__.return_value = MagicMock()
# Patch MCPToolManageService constructed inside controller
with patch("controllers.console.workspace.tool_providers.MCPToolManageService", return_value=svc, autospec=True):
payload = {

View File

@@ -306,14 +306,14 @@ class TestTriggerSubscriptionCrud:
app.test_request_context("/"),
patch("controllers.console.workspace.trigger_providers.current_user", mock_user()),
patch("controllers.console.workspace.trigger_providers.db") as mock_db,
patch("controllers.console.workspace.trigger_providers.sessionmaker") as mock_session_cls,
patch("controllers.console.workspace.trigger_providers.Session") as mock_session_cls,
patch("controllers.console.workspace.trigger_providers.TriggerProviderService.delete_trigger_provider"),
patch(
"controllers.console.workspace.trigger_providers.TriggerSubscriptionOperatorService.delete_plugin_trigger_by_subscription"
),
):
mock_db.engine = MagicMock()
mock_session_cls.return_value.begin.return_value.__enter__.return_value = mock_session
mock_session_cls.return_value.__enter__.return_value = mock_session
result = method(api, "sub1")
@@ -327,14 +327,14 @@ class TestTriggerSubscriptionCrud:
app.test_request_context("/"),
patch("controllers.console.workspace.trigger_providers.current_user", mock_user()),
patch("controllers.console.workspace.trigger_providers.db") as mock_db,
patch("controllers.console.workspace.trigger_providers.sessionmaker") as session_cls,
patch("controllers.console.workspace.trigger_providers.Session") as session_cls,
patch(
"controllers.console.workspace.trigger_providers.TriggerProviderService.delete_trigger_provider",
side_effect=ValueError("bad"),
),
):
mock_db.engine = MagicMock()
session_cls.return_value.begin.return_value.__enter__.return_value = MagicMock()
session_cls.return_value.__enter__.return_value = MagicMock()
with pytest.raises(BadRequest):
method(api, "sub1")

View File

@@ -37,7 +37,7 @@ class TestForgotPasswordSendEmailApi:
@patch("controllers.web.forgot_password.AccountService.get_account_by_email_with_case_fallback")
@patch("controllers.web.forgot_password.AccountService.is_email_send_ip_limit", return_value=False)
@patch("controllers.web.forgot_password.extract_remote_ip", return_value="127.0.0.1")
@patch("controllers.web.forgot_password.sessionmaker")
@patch("controllers.web.forgot_password.Session")
def test_should_normalize_email_before_sending(
self,
mock_session_cls,
@@ -51,7 +51,7 @@ class TestForgotPasswordSendEmailApi:
mock_get_account.return_value = mock_account
mock_send_mail.return_value = "token-123"
mock_session = MagicMock()
mock_session_cls.return_value.begin.return_value.__enter__.return_value = mock_session
mock_session_cls.return_value.__enter__.return_value = mock_session
with patch("controllers.web.forgot_password.db", SimpleNamespace(engine="engine")):
with app.test_request_context(
@@ -153,7 +153,7 @@ class TestForgotPasswordResetApi:
@patch("controllers.web.forgot_password.ForgotPasswordResetApi._update_existing_account")
@patch("controllers.web.forgot_password.AccountService.get_account_by_email_with_case_fallback")
@patch("controllers.web.forgot_password.sessionmaker")
@patch("controllers.web.forgot_password.Session")
@patch("controllers.web.forgot_password.AccountService.revoke_reset_password_token")
@patch("controllers.web.forgot_password.AccountService.get_reset_password_data")
def test_should_fetch_account_with_fallback(
@@ -169,7 +169,7 @@ class TestForgotPasswordResetApi:
mock_account = MagicMock()
mock_get_account.return_value = mock_account
mock_session = MagicMock()
mock_session_cls.return_value.begin.return_value.__enter__.return_value = mock_session
mock_session_cls.return_value.__enter__.return_value = mock_session
with patch("controllers.web.forgot_password.db", SimpleNamespace(engine="engine")):
with app.test_request_context(
@@ -190,7 +190,7 @@ class TestForgotPasswordResetApi:
@patch("controllers.web.forgot_password.hash_password", return_value=b"hashed-value")
@patch("controllers.web.forgot_password.secrets.token_bytes", return_value=b"0123456789abcdef")
@patch("controllers.web.forgot_password.sessionmaker")
@patch("controllers.web.forgot_password.Session")
@patch("controllers.web.forgot_password.AccountService.revoke_reset_password_token")
@patch("controllers.web.forgot_password.AccountService.get_reset_password_data")
@patch("controllers.web.forgot_password.AccountService.get_account_by_email_with_case_fallback")
@@ -208,7 +208,7 @@ class TestForgotPasswordResetApi:
account = MagicMock()
mock_get_account.return_value = account
mock_session = MagicMock()
mock_session_cls.return_value.begin.return_value.__enter__.return_value = mock_session
mock_session_cls.return_value.__enter__.return_value = mock_session
with patch("controllers.web.forgot_password.db", SimpleNamespace(engine="engine")):
with app.test_request_context(
@@ -231,3 +231,4 @@ class TestForgotPasswordResetApi:
assert account.password == expected_password
expected_salt = base64.b64encode(b"0123456789abcdef").decode()
assert account.password_salt == expected_salt
mock_session.commit.assert_called_once()

View File

@@ -0,0 +1,139 @@
from unittest.mock import MagicMock, patch
import pytest
from werkzeug.exceptions import Forbidden
from controllers.console.apikey import (
BaseApiKeyListResource,
BaseApiKeyResource,
_get_resource,
)
from models.enums import ApiTokenType
@pytest.fixture
def tenant_context_admin():
with patch("controllers.console.apikey.current_account_with_tenant") as mock:
user = MagicMock()
user.is_admin_or_owner = True
mock.return_value = (user, "tenant-123")
yield mock
@pytest.fixture
def tenant_context_non_admin():
with patch("controllers.console.apikey.current_account_with_tenant") as mock:
user = MagicMock()
user.is_admin_or_owner = False
mock.return_value = (user, "tenant-123")
yield mock
@pytest.fixture
def db_mock():
with patch("controllers.console.apikey.db") as mock_db:
mock_db.session = MagicMock()
yield mock_db
@pytest.fixture(autouse=True)
def bypass_permissions():
with patch(
"controllers.console.apikey.edit_permission_required",
lambda f: f,
):
yield
class DummyApiKeyListResource(BaseApiKeyListResource):
resource_type = ApiTokenType.APP
resource_model = MagicMock()
resource_id_field = "app_id"
token_prefix = "app-"
class DummyApiKeyResource(BaseApiKeyResource):
resource_type = ApiTokenType.APP
resource_model = MagicMock()
resource_id_field = "app_id"
class TestGetResource:
def test_get_resource_success(self):
fake_resource = MagicMock()
with (
patch("controllers.console.apikey.select") as mock_select,
patch("controllers.console.apikey.Session") as mock_session,
patch("controllers.console.apikey.db") as mock_db,
):
mock_db.engine = MagicMock()
mock_select.return_value.filter_by.return_value = MagicMock()
session = mock_session.return_value.__enter__.return_value
session.execute.return_value.scalar_one_or_none.return_value = fake_resource
result = _get_resource("rid", "tid", MagicMock)
assert result == fake_resource
def test_get_resource_not_found(self):
with (
patch("controllers.console.apikey.select") as mock_select,
patch("controllers.console.apikey.Session") as mock_session,
patch("controllers.console.apikey.db") as mock_db,
patch("controllers.console.apikey.flask_restx.abort") as abort,
):
mock_db.engine = MagicMock()
mock_select.return_value.filter_by.return_value = MagicMock()
session = mock_session.return_value.__enter__.return_value
session.execute.return_value.scalar_one_or_none.return_value = None
_get_resource("rid", "tid", MagicMock)
abort.assert_called_once()
class TestBaseApiKeyListResource:
def test_get_apikeys_success(self, tenant_context_admin, db_mock):
resource = DummyApiKeyListResource()
with patch("controllers.console.apikey._get_resource"):
db_mock.session.scalars.return_value.all.return_value = [MagicMock(), MagicMock()]
result = DummyApiKeyListResource.get.__wrapped__(resource, "resource-id")
assert "items" in result
class TestBaseApiKeyResource:
def test_delete_forbidden(self, tenant_context_non_admin, db_mock):
resource = DummyApiKeyResource()
with patch("controllers.console.apikey._get_resource"):
with pytest.raises(Forbidden):
DummyApiKeyResource.delete(resource, "rid", "kid")
def test_delete_key_not_found(self, tenant_context_admin, db_mock):
resource = DummyApiKeyResource()
db_mock.session.scalar.return_value = None
with patch("controllers.console.apikey._get_resource"):
with pytest.raises(Exception) as exc_info:
DummyApiKeyResource.delete(resource, "rid", "kid")
# flask_restx.abort raises HTTPException with message in data attribute
assert exc_info.value.data["message"] == "API key not found"
def test_delete_success(self, tenant_context_admin, db_mock):
resource = DummyApiKeyResource()
db_mock.session.scalar.return_value = MagicMock()
with (
patch("controllers.console.apikey._get_resource"),
patch("controllers.console.apikey.ApiTokenCache.delete"),
):
result, status = DummyApiKeyResource.delete(resource, "rid", "kid")
assert status == 204
assert result == {"result": "success"}
db_mock.session.commit.assert_called_once()

View File

@@ -1249,9 +1249,9 @@ class TestFileConstants:
"""
def test_image_extensions_set_properties(self):
"""Test that IMAGE_EXTENSIONS frozenset has expected properties."""
# Assert - Should be immutable
assert isinstance(IMAGE_EXTENSIONS, frozenset)
"""Test that IMAGE_EXTENSIONS set has expected properties."""
# Assert - Should be a set
assert isinstance(IMAGE_EXTENSIONS, set)
# Should not be empty
assert len(IMAGE_EXTENSIONS) > 0
# Should contain common image formats
@@ -1260,9 +1260,9 @@ class TestFileConstants:
assert ext in IMAGE_EXTENSIONS or ext.upper() in IMAGE_EXTENSIONS
def test_video_extensions_set_properties(self):
"""Test that VIDEO_EXTENSIONS frozenset has expected properties."""
# Assert - Should be immutable
assert isinstance(VIDEO_EXTENSIONS, frozenset)
"""Test that VIDEO_EXTENSIONS set has expected properties."""
# Assert - Should be a set
assert isinstance(VIDEO_EXTENSIONS, set)
# Should not be empty
assert len(VIDEO_EXTENSIONS) > 0
# Should contain common video formats
@@ -1271,9 +1271,9 @@ class TestFileConstants:
assert ext in VIDEO_EXTENSIONS or ext.upper() in VIDEO_EXTENSIONS
def test_audio_extensions_set_properties(self):
"""Test that AUDIO_EXTENSIONS frozenset has expected properties."""
# Assert - Should be immutable
assert isinstance(AUDIO_EXTENSIONS, frozenset)
"""Test that AUDIO_EXTENSIONS set has expected properties."""
# Assert - Should be a set
assert isinstance(AUDIO_EXTENSIONS, set)
# Should not be empty
assert len(AUDIO_EXTENSIONS) > 0
# Should contain common audio formats
@@ -1282,9 +1282,9 @@ class TestFileConstants:
assert ext in AUDIO_EXTENSIONS or ext.upper() in AUDIO_EXTENSIONS
def test_document_extensions_set_properties(self):
"""Test that DOCUMENT_EXTENSIONS frozenset has expected properties."""
# Assert - Should be immutable
assert isinstance(DOCUMENT_EXTENSIONS, frozenset)
"""Test that DOCUMENT_EXTENSIONS set has expected properties."""
# Assert - Should be a set
assert isinstance(DOCUMENT_EXTENSIONS, set)
# Should not be empty
assert len(DOCUMENT_EXTENSIONS) > 0
# Should contain common document formats

View File

@@ -2,6 +2,5 @@ from core.rag.datasource.keyword.jieba.stopwords import STOPWORDS
def test_stopwords_loaded():
assert isinstance(STOPWORDS, frozenset)
assert "during" in STOPWORDS
assert "the" in STOPWORDS

View File

@@ -1,5 +1,4 @@
import base64
import logging
import uuid
from collections.abc import Sequence
from unittest import mock
@@ -1262,10 +1261,6 @@ def test_llm_node_image_file_to_markdown(llm_node: LLMNode):
class TestSaveMultimodalOutputAndConvertResultToMarkdown:
class _UnknownItem:
def __str__(self) -> str:
return "<unknown-item>"
def test_str_content(self, llm_node_for_multimodal):
llm_node, mock_file_saver = llm_node_for_multimodal
gen = llm_node._save_multimodal_output_and_convert_result_to_markdown(
@@ -1335,23 +1330,18 @@ class TestSaveMultimodalOutputAndConvertResultToMarkdown:
def test_unknown_content_type(self, llm_node_for_multimodal):
llm_node, mock_file_saver = llm_node_for_multimodal
gen = llm_node._save_multimodal_output_and_convert_result_to_markdown(
contents=frozenset(("hello world",)), file_saver=mock_file_saver, file_outputs=[]
contents=frozenset(["hello world"]), file_saver=mock_file_saver, file_outputs=[]
)
assert list(gen) == ["hello world"]
mock_file_saver.save_binary_string.assert_not_called()
mock_file_saver.save_remote_url.assert_not_called()
def test_unknown_item_type(self, llm_node_for_multimodal, caplog):
def test_unknown_item_type(self, llm_node_for_multimodal):
llm_node, mock_file_saver = llm_node_for_multimodal
unknown_item = self._UnknownItem()
with caplog.at_level(logging.WARNING, logger="graphon.nodes.llm.node"):
gen = llm_node._save_multimodal_output_and_convert_result_to_markdown(
contents=[unknown_item], file_saver=mock_file_saver, file_outputs=[]
)
assert list(gen) == [str(unknown_item)]
assert "unknown item type encountered" in caplog.text
gen = llm_node._save_multimodal_output_and_convert_result_to_markdown(
contents=[frozenset(["hello world"])], file_saver=mock_file_saver, file_outputs=[]
)
assert list(gen) == ["frozenset({'hello world'})"]
mock_file_saver.save_binary_string.assert_not_called()
mock_file_saver.save_remote_url.assert_not_called()

View File

@@ -837,7 +837,7 @@ class TestBuildSegmentValueErrors:
self.ValueErrorTestCase(
name="frozenset_type",
description="frozenset (unsupported type)",
test_value=frozenset((1, 2, 3)),
test_value=frozenset([1, 2, 3]),
),
self.ValueErrorTestCase(
name="memoryview_type",

View File

@@ -3,93 +3,89 @@ from pathlib import Path
import yaml # type: ignore
from dotenv import dotenv_values
BASE_API_AND_DOCKER_CONFIG_SET_DIFF: frozenset[str] = frozenset(
(
"APP_MAX_EXECUTION_TIME",
"BATCH_UPLOAD_LIMIT",
"CELERY_BEAT_SCHEDULER_TIME",
"CODE_EXECUTION_API_KEY",
"HTTP_REQUEST_MAX_CONNECT_TIMEOUT",
"HTTP_REQUEST_MAX_READ_TIMEOUT",
"HTTP_REQUEST_MAX_WRITE_TIMEOUT",
"INNER_API_KEY",
"INNER_API_KEY_FOR_PLUGIN",
"KEYWORD_DATA_SOURCE_TYPE",
"LOGIN_LOCKOUT_DURATION",
"LOG_FORMAT",
"OCI_ACCESS_KEY",
"OCI_BUCKET_NAME",
"OCI_ENDPOINT",
"OCI_REGION",
"OCI_SECRET_KEY",
"PLUGIN_DAEMON_KEY",
"PLUGIN_DAEMON_URL",
"PLUGIN_REMOTE_INSTALL_HOST",
"PLUGIN_REMOTE_INSTALL_PORT",
"REDIS_DB",
"RESEND_API_URL",
"RESPECT_XFORWARD_HEADERS_ENABLED",
"SENTRY_DSN",
"SSRF_DEFAULT_CONNECT_TIME_OUT",
"SSRF_DEFAULT_MAX_RETRIES",
"SSRF_DEFAULT_READ_TIME_OUT",
"SSRF_DEFAULT_TIME_OUT",
"SSRF_DEFAULT_WRITE_TIME_OUT",
"UPSTASH_VECTOR_TOKEN",
"UPSTASH_VECTOR_URL",
"USING_UGC_INDEX",
"WEAVIATE_BATCH_SIZE",
)
)
BASE_API_AND_DOCKER_CONFIG_SET_DIFF = {
"APP_MAX_EXECUTION_TIME",
"BATCH_UPLOAD_LIMIT",
"CELERY_BEAT_SCHEDULER_TIME",
"CODE_EXECUTION_API_KEY",
"HTTP_REQUEST_MAX_CONNECT_TIMEOUT",
"HTTP_REQUEST_MAX_READ_TIMEOUT",
"HTTP_REQUEST_MAX_WRITE_TIMEOUT",
"INNER_API_KEY",
"INNER_API_KEY_FOR_PLUGIN",
"KEYWORD_DATA_SOURCE_TYPE",
"LOGIN_LOCKOUT_DURATION",
"LOG_FORMAT",
"OCI_ACCESS_KEY",
"OCI_BUCKET_NAME",
"OCI_ENDPOINT",
"OCI_REGION",
"OCI_SECRET_KEY",
"PLUGIN_DAEMON_KEY",
"PLUGIN_DAEMON_URL",
"PLUGIN_REMOTE_INSTALL_HOST",
"PLUGIN_REMOTE_INSTALL_PORT",
"REDIS_DB",
"RESEND_API_URL",
"RESPECT_XFORWARD_HEADERS_ENABLED",
"SENTRY_DSN",
"SSRF_DEFAULT_CONNECT_TIME_OUT",
"SSRF_DEFAULT_MAX_RETRIES",
"SSRF_DEFAULT_READ_TIME_OUT",
"SSRF_DEFAULT_TIME_OUT",
"SSRF_DEFAULT_WRITE_TIME_OUT",
"UPSTASH_VECTOR_TOKEN",
"UPSTASH_VECTOR_URL",
"USING_UGC_INDEX",
"WEAVIATE_BATCH_SIZE",
}
BASE_API_AND_DOCKER_COMPOSE_CONFIG_SET_DIFF: frozenset[str] = frozenset(
(
"BATCH_UPLOAD_LIMIT",
"CELERY_BEAT_SCHEDULER_TIME",
"HTTP_REQUEST_MAX_CONNECT_TIMEOUT",
"HTTP_REQUEST_MAX_READ_TIMEOUT",
"HTTP_REQUEST_MAX_WRITE_TIMEOUT",
"INNER_API_KEY",
"INNER_API_KEY_FOR_PLUGIN",
"KEYWORD_DATA_SOURCE_TYPE",
"LOGIN_LOCKOUT_DURATION",
"LOG_FORMAT",
"OPENDAL_FS_ROOT",
"OPENDAL_S3_ACCESS_KEY_ID",
"OPENDAL_S3_BUCKET",
"OPENDAL_S3_ENDPOINT",
"OPENDAL_S3_REGION",
"OPENDAL_S3_ROOT",
"OPENDAL_S3_SECRET_ACCESS_KEY",
"OPENDAL_S3_SERVER_SIDE_ENCRYPTION",
"PGVECTOR_MAX_CONNECTION",
"PGVECTOR_MIN_CONNECTION",
"PGVECTO_RS_DATABASE",
"PGVECTO_RS_HOST",
"PGVECTO_RS_PASSWORD",
"PGVECTO_RS_PORT",
"PGVECTO_RS_USER",
"PLUGIN_DAEMON_KEY",
"PLUGIN_DAEMON_URL",
"PLUGIN_REMOTE_INSTALL_HOST",
"PLUGIN_REMOTE_INSTALL_PORT",
"RESPECT_XFORWARD_HEADERS_ENABLED",
"SCARF_NO_ANALYTICS",
"SSRF_DEFAULT_CONNECT_TIME_OUT",
"SSRF_DEFAULT_MAX_RETRIES",
"SSRF_DEFAULT_READ_TIME_OUT",
"SSRF_DEFAULT_TIME_OUT",
"SSRF_DEFAULT_WRITE_TIME_OUT",
"STORAGE_OPENDAL_SCHEME",
"SUPABASE_API_KEY",
"SUPABASE_BUCKET_NAME",
"SUPABASE_URL",
"USING_UGC_INDEX",
"VIKINGDB_CONNECTION_TIMEOUT",
"VIKINGDB_SOCKET_TIMEOUT",
"WEAVIATE_BATCH_SIZE",
)
)
BASE_API_AND_DOCKER_COMPOSE_CONFIG_SET_DIFF = {
"BATCH_UPLOAD_LIMIT",
"CELERY_BEAT_SCHEDULER_TIME",
"HTTP_REQUEST_MAX_CONNECT_TIMEOUT",
"HTTP_REQUEST_MAX_READ_TIMEOUT",
"HTTP_REQUEST_MAX_WRITE_TIMEOUT",
"INNER_API_KEY",
"INNER_API_KEY_FOR_PLUGIN",
"KEYWORD_DATA_SOURCE_TYPE",
"LOGIN_LOCKOUT_DURATION",
"LOG_FORMAT",
"OPENDAL_FS_ROOT",
"OPENDAL_S3_ACCESS_KEY_ID",
"OPENDAL_S3_BUCKET",
"OPENDAL_S3_ENDPOINT",
"OPENDAL_S3_REGION",
"OPENDAL_S3_ROOT",
"OPENDAL_S3_SECRET_ACCESS_KEY",
"OPENDAL_S3_SERVER_SIDE_ENCRYPTION",
"PGVECTOR_MAX_CONNECTION",
"PGVECTOR_MIN_CONNECTION",
"PGVECTO_RS_DATABASE",
"PGVECTO_RS_HOST",
"PGVECTO_RS_PASSWORD",
"PGVECTO_RS_PORT",
"PGVECTO_RS_USER",
"PLUGIN_DAEMON_KEY",
"PLUGIN_DAEMON_URL",
"PLUGIN_REMOTE_INSTALL_HOST",
"PLUGIN_REMOTE_INSTALL_PORT",
"RESPECT_XFORWARD_HEADERS_ENABLED",
"SCARF_NO_ANALYTICS",
"SSRF_DEFAULT_CONNECT_TIME_OUT",
"SSRF_DEFAULT_MAX_RETRIES",
"SSRF_DEFAULT_READ_TIME_OUT",
"SSRF_DEFAULT_TIME_OUT",
"SSRF_DEFAULT_WRITE_TIME_OUT",
"STORAGE_OPENDAL_SCHEME",
"SUPABASE_API_KEY",
"SUPABASE_BUCKET_NAME",
"SUPABASE_URL",
"USING_UGC_INDEX",
"VIKINGDB_CONNECTION_TIMEOUT",
"VIKINGDB_SOCKET_TIMEOUT",
"WEAVIATE_BATCH_SIZE",
}
API_CONFIG_SET = set(dotenv_values(Path("api") / Path(".env.example")).keys())
DOCKER_CONFIG_SET = set(dotenv_values(Path("docker") / Path(".env.example")).keys())

215
pnpm-lock.yaml generated
View File

@@ -235,8 +235,8 @@ catalogs:
specifier: 0.5.21
version: 0.5.21
'@vitest/coverage-v8':
specifier: 4.1.1
version: 4.1.1
specifier: 4.1.2
version: 4.1.2
abcjs:
specifier: 6.6.2
version: 6.6.2
@@ -570,6 +570,7 @@ overrides:
array.prototype.flatmap: npm:@nolyfill/array.prototype.flatmap@^1.0.44
array.prototype.tosorted: npm:@nolyfill/array.prototype.tosorted@^1.0.44
assert: npm:@nolyfill/assert@^1.0.26
axios: 1.14.0
brace-expansion@<2.0.2: 2.0.2
canvas: ^3.2.2
devalue@<5.3.2: 5.3.2
@@ -647,6 +648,10 @@ importers:
version: 0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))(yaml@2.8.3)
sdks/nodejs-client:
dependencies:
axios:
specifier: 1.14.0
version: 1.14.0
devDependencies:
'@eslint/js':
specifier: 'catalog:'
@@ -662,7 +667,7 @@ importers:
version: 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
'@vitest/coverage-v8':
specifier: 'catalog:'
version: 4.1.1(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))(yaml@2.8.3))
version: 4.1.2(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))(yaml@2.8.3))
eslint:
specifier: 'catalog:'
version: 10.1.0(jiti@2.6.1)
@@ -1119,7 +1124,7 @@ importers:
version: 0.5.21(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(react-dom@19.2.4(react@19.2.4))(react-server-dom-webpack@19.2.4(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3)))(react@19.2.4)
'@vitest/coverage-v8':
specifier: 'catalog:'
version: 4.1.1(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))
version: 4.1.2(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))
agentation:
specifier: 'catalog:'
version: 3.0.2(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
@@ -2400,6 +2405,10 @@ packages:
resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==}
engines: {node: '>= 8'}
'@nolyfill/hasown@1.0.44':
resolution: {integrity: sha512-GA/21lkTr2PAQuT6jGnhLuBD5IFd/AEhBXJ/tf33+/bVxPxg+5ejKx9jGQGnyV/P0eSmdup5E+s8b2HL6lOrwQ==}
engines: {node: '>=12.4.0'}
'@nolyfill/is-core-module@1.0.39':
resolution: {integrity: sha512-nn5ozdjYQpUCZlWGuxcJY/KpxkWQs4DcbMCmKojjyrYDEAGy4Ce19NN4v5MduafTwJlbKc99UA8YhSVqq9yPZA==}
engines: {node: '>=12.4.0'}
@@ -4431,11 +4440,11 @@ packages:
react-server-dom-webpack:
optional: true
'@vitest/coverage-v8@4.1.1':
resolution: {integrity: sha512-nZ4RWwGCoGOQRMmU/Q9wlUY540RVRxJZ9lxFsFfy0QV7Zmo5VVBhB6Sl9Xa0KIp2iIs3zWfPlo9LcY1iqbpzCw==}
'@vitest/coverage-v8@4.1.2':
resolution: {integrity: sha512-sPK//PHO+kAkScb8XITeB1bf7fsk85Km7+rt4eeuRR3VS1/crD47cmV5wicisJmjNdfeokTZwjMk4Mj2d58Mgg==}
peerDependencies:
'@vitest/browser': 4.1.1
vitest: 4.1.1
'@vitest/browser': 4.1.2
vitest: 4.1.2
peerDependenciesMeta:
'@vitest/browser':
optional: true
@@ -4462,8 +4471,8 @@ packages:
'@vitest/pretty-format@3.2.4':
resolution: {integrity: sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==}
'@vitest/pretty-format@4.1.1':
resolution: {integrity: sha512-GM+TEQN5WhOygr1lp7skeVjdLPqqWMHsfzXrcHAqZJi/lIVh63H0kaRCY8MDhNWikx19zBUK8ceaLB7X5AH9NQ==}
'@vitest/pretty-format@4.1.2':
resolution: {integrity: sha512-dwQga8aejqeuB+TvXCMzSQemvV9hNEtDDpgUKDzOmNQayl2OG241PSWeJwKRH3CiC+sESrmoFd49rfnq7T4RnA==}
'@vitest/spy@3.2.4':
resolution: {integrity: sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==}
@@ -4471,8 +4480,8 @@ packages:
'@vitest/utils@3.2.4':
resolution: {integrity: sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==}
'@vitest/utils@4.1.1':
resolution: {integrity: sha512-cNxAlaB3sHoCdL6pj6yyUXv9Gry1NHNg0kFTXdvSIZXLHsqKH7chiWOkwJ5s5+d/oMwcoG9T0bKU38JZWKusrQ==}
'@vitest/utils@4.1.2':
resolution: {integrity: sha512-xw2/TiX82lQHA06cgbqRKFb5lCAy3axQ4H4SoUFhUsg+wztiet+co86IAMDtF6Vm1hc7J6j09oh/rgDn+JdKIQ==}
'@voidzero-dev/vite-plus-core@0.1.14':
resolution: {integrity: sha512-CCWzdkfW0fo0cQNlIsYp5fOuH2IwKuPZEb2UY2Z8gXcp5pG74A82H2Pthj0heAuvYTAnfT7kEC6zM+RbiBgQbg==}
@@ -4832,6 +4841,9 @@ packages:
async@3.2.6:
resolution: {integrity: sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==}
asynckit@0.4.0:
resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
autoprefixer@10.4.27:
resolution: {integrity: sha512-NP9APE+tO+LuJGn7/9+cohklunJsXWiaWEfV3si4Gi/XHDwVNgkwr1J3RQYFIvPy76GmJ9/bW8vyoU1LcxwKHA==}
engines: {node: ^10 || ^12 || >=14}
@@ -4839,6 +4851,9 @@ packages:
peerDependencies:
postcss: ^8.1.0
axios@1.14.0:
resolution: {integrity: sha512-3Y8yrqLSwjuzpXuZ0oIYZ/XGgLwUIBU3uLvbcpb0pidD9ctpShJd43KSlEEkVQg6DS0G9NKyzOvBfUtDKEyHvQ==}
bail@2.0.2:
resolution: {integrity: sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==}
@@ -4936,6 +4951,10 @@ packages:
resolution: {integrity: sha512-tixWYgm5ZoOD+3g6UTea91eow5z6AAHaho3g0V9CNSNb45gM8SmflpAc+GRd1InC4AqN/07Unrgp56Y94N9hJQ==}
engines: {node: '>=20.19.0'}
call-bind-apply-helpers@1.0.2:
resolution: {integrity: sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==}
engines: {node: '>= 0.4'}
callsites@3.1.0:
resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==}
engines: {node: '>=6'}
@@ -5107,6 +5126,10 @@ packages:
colorette@2.0.20:
resolution: {integrity: sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==}
combined-stream@1.0.8:
resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==}
engines: {node: '>= 0.8'}
comma-separated-tokens@1.0.8:
resolution: {integrity: sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw==}
@@ -5441,6 +5464,10 @@ packages:
delaunator@5.1.0:
resolution: {integrity: sha512-AGrQ4QSgssa1NGmWmLPqN5NY2KajF5MqxetNEO+o0n3ZwZZeTmt7bBnvzHWrmkZFxGgr4HdyFgelzgi06otLuQ==}
delayed-stream@1.0.0:
resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==}
engines: {node: '>=0.4.0'}
dequal@2.0.3:
resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==}
engines: {node: '>=6'}
@@ -5506,6 +5533,10 @@ packages:
resolution: {integrity: sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==}
engines: {node: '>=12'}
dunder-proto@1.0.1:
resolution: {integrity: sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==}
engines: {node: '>= 0.4'}
echarts-for-react@3.0.6:
resolution: {integrity: sha512-4zqLgTGWS3JvkQDXjzkR1k1CHRdpd6by0988TWMJgnvDytegWLbeP/VNZmMa+0VJx2eD7Y632bi2JquXDgiGJg==}
peerDependencies:
@@ -5582,12 +5613,28 @@ packages:
error-stack-parser@2.1.4:
resolution: {integrity: sha512-Sk5V6wVazPhq5MhpO+AUxJn5x7XSXGl1R93Vn7i+zS15KDVxQijejNCrz8340/2bgLBjR9GtEG8ZVKONDjcqGQ==}
es-define-property@1.0.1:
resolution: {integrity: sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==}
engines: {node: '>= 0.4'}
es-errors@1.3.0:
resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==}
engines: {node: '>= 0.4'}
es-module-lexer@1.7.0:
resolution: {integrity: sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==}
es-module-lexer@2.0.0:
resolution: {integrity: sha512-5POEcUuZybH7IdmGsD8wlf0AI55wMecM9rVBTI/qEAy2c1kTOm3DjFYjrBdI2K3BaJjJYfYFeRtM0t9ssnRuxw==}
es-object-atoms@1.1.1:
resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==}
engines: {node: '>= 0.4'}
es-set-tostringtag@2.1.0:
resolution: {integrity: sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==}
engines: {node: '>= 0.4'}
es-toolkit@1.45.1:
resolution: {integrity: sha512-/jhoOj/Fx+A+IIyDNOvO3TItGmlMKhtX8ISAHKE90c4b/k1tqaqEZ+uUqfpU8DMnW5cgNJv606zS55jGvza0Xw==}
@@ -6068,6 +6115,19 @@ packages:
flatted@3.4.2:
resolution: {integrity: sha512-PjDse7RzhcPkIJwy5t7KPWQSZ9cAbzQXcafsetQoD7sOJRQlGikNbx7yZp2OotDnJyrDcbyRq3Ttb18iYOqkxA==}
follow-redirects@1.15.11:
resolution: {integrity: sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==}
engines: {node: '>=4.0'}
peerDependencies:
debug: '*'
peerDependenciesMeta:
debug:
optional: true
form-data@4.0.5:
resolution: {integrity: sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==}
engines: {node: '>= 6'}
format@0.2.2:
resolution: {integrity: sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==}
engines: {node: '>=0.4.x'}
@@ -6104,6 +6164,9 @@ packages:
engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
os: [darwin]
function-bind@1.1.2:
resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==}
functional-red-black-tree@1.0.1:
resolution: {integrity: sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==}
@@ -6118,10 +6181,18 @@ packages:
resolution: {integrity: sha512-CQ+bEO+Tva/qlmw24dCejulK5pMzVnUOFOijVogd3KQs07HnRIgp8TGipvCCRT06xeYEbpbgwaCxglFyiuIcmA==}
engines: {node: '>=18'}
get-intrinsic@1.3.0:
resolution: {integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==}
engines: {node: '>= 0.4'}
get-nonce@1.0.1:
resolution: {integrity: sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==}
engines: {node: '>=6'}
get-proto@1.0.1:
resolution: {integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==}
engines: {node: '>= 0.4'}
get-stream@5.2.0:
resolution: {integrity: sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==}
engines: {node: '>=8'}
@@ -6178,6 +6249,10 @@ packages:
peerDependencies:
csstype: ^3.0.10
gopd@1.2.0:
resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==}
engines: {node: '>= 0.4'}
graceful-fs@4.2.11:
resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==}
@@ -6196,6 +6271,14 @@ packages:
resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==}
engines: {node: '>=8'}
has-symbols@1.1.0:
resolution: {integrity: sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==}
engines: {node: '>= 0.4'}
has-tostringtag@1.0.2:
resolution: {integrity: sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==}
engines: {node: '>= 0.4'}
hast-util-from-dom@5.0.1:
resolution: {integrity: sha512-N+LqofjR2zuzTjCPzyDUdSshy4Ma6li7p/c3pA78uTwzFgENbgbUrm2ugwsOdcjI1muO+o6Dgzp9p8WHtn/39Q==}
@@ -6837,6 +6920,10 @@ packages:
engines: {node: '>= 20'}
hasBin: true
math-intrinsics@1.1.0:
resolution: {integrity: sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==}
engines: {node: '>= 0.4'}
mdast-util-directive@3.1.0:
resolution: {integrity: sha512-I3fNFt+DHmpWCYAT7quoM6lHf9wuqtI+oCOfvILnoicNIqjh5E3dEJWiXuYME2gNe8vl1iMQwyUHa7bgFmak6Q==}
@@ -7564,6 +7651,10 @@ packages:
property-information@7.1.0:
resolution: {integrity: sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==}
proxy-from-env@2.1.0:
resolution: {integrity: sha512-cJ+oHTW1VAEa8cJslgmUZrc+sjRKgAKl3Zyse6+PV38hZe/V6Z14TbCuXcan9F9ghlz4QrFr2c92TNF82UkYHA==}
engines: {node: '>=10'}
pump@3.0.4:
resolution: {integrity: sha512-VS7sjc6KR7e1ukRFhQSY5LM2uBWAUPiOPa/A3mkKmiMwSmRFUITt0xuj+/lesgnCv+dPIEYlkzrcyXgquIHMcA==}
@@ -10406,6 +10497,8 @@ snapshots:
'@nodelib/fs.scandir': 2.1.5
fastq: 1.20.1
'@nolyfill/hasown@1.0.44': {}
'@nolyfill/is-core-module@1.0.39': {}
'@nolyfill/safer-buffer@1.0.44': {}
@@ -12261,10 +12354,10 @@ snapshots:
optionalDependencies:
react-server-dom-webpack: 19.2.4(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))
'@vitest/coverage-v8@4.1.1(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))':
'@vitest/coverage-v8@4.1.2(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))':
dependencies:
'@bcoe/v8-coverage': 1.0.2
'@vitest/utils': 4.1.1
'@vitest/utils': 4.1.2
ast-v8-to-istanbul: 1.0.0
istanbul-lib-coverage: 3.2.2
istanbul-lib-report: 3.0.1
@@ -12275,10 +12368,10 @@ snapshots:
tinyrainbow: 3.1.0
vitest: '@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
'@vitest/coverage-v8@4.1.1(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))(yaml@2.8.3))':
'@vitest/coverage-v8@4.1.2(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))(yaml@2.8.3))':
dependencies:
'@bcoe/v8-coverage': 1.0.2
'@vitest/utils': 4.1.1
'@vitest/utils': 4.1.2
ast-v8-to-istanbul: 1.0.0
istanbul-lib-coverage: 3.2.2
istanbul-lib-report: 3.0.1
@@ -12313,7 +12406,7 @@ snapshots:
dependencies:
tinyrainbow: 2.0.0
'@vitest/pretty-format@4.1.1':
'@vitest/pretty-format@4.1.2':
dependencies:
tinyrainbow: 3.1.0
@@ -12327,9 +12420,9 @@ snapshots:
loupe: 3.2.1
tinyrainbow: 2.0.0
'@vitest/utils@4.1.1':
'@vitest/utils@4.1.2':
dependencies:
'@vitest/pretty-format': 4.1.1
'@vitest/pretty-format': 4.1.2
convert-source-map: 2.0.0
tinyrainbow: 3.1.0
@@ -12723,6 +12816,8 @@ snapshots:
async@3.2.6: {}
asynckit@0.4.0: {}
autoprefixer@10.4.27(postcss@8.5.8):
dependencies:
browserslist: 4.28.1
@@ -12732,6 +12827,14 @@ snapshots:
postcss: 8.5.8
postcss-value-parser: 4.2.0
axios@1.14.0:
dependencies:
follow-redirects: 1.15.11
form-data: 4.0.5
proxy-from-env: 2.1.0
transitivePeerDependencies:
- debug
bail@2.0.2: {}
balanced-match@1.0.2: {}
@@ -12811,6 +12914,11 @@ snapshots:
cac@7.0.0: {}
call-bind-apply-helpers@1.0.2:
dependencies:
es-errors: 1.3.0
function-bind: 1.1.2
callsites@3.1.0: {}
camelcase-css@2.0.1: {}
@@ -13000,6 +13108,10 @@ snapshots:
colorette@2.0.20: {}
combined-stream@1.0.8:
dependencies:
delayed-stream: 1.0.0
comma-separated-tokens@1.0.8: {}
comma-separated-tokens@2.0.3: {}
@@ -13329,6 +13441,8 @@ snapshots:
dependencies:
robust-predicates: 3.0.3
delayed-stream@1.0.0: {}
dequal@2.0.3: {}
destr@2.0.5: {}
@@ -13385,6 +13499,12 @@ snapshots:
dotenv@16.6.1: {}
dunder-proto@1.0.1:
dependencies:
call-bind-apply-helpers: 1.0.2
es-errors: 1.3.0
gopd: 1.2.0
echarts-for-react@3.0.6(echarts@6.0.0)(react@19.2.4):
dependencies:
echarts: 6.0.0
@@ -13451,10 +13571,25 @@ snapshots:
dependencies:
stackframe: 1.3.4
es-define-property@1.0.1: {}
es-errors@1.3.0: {}
es-module-lexer@1.7.0: {}
es-module-lexer@2.0.0: {}
es-object-atoms@1.1.1:
dependencies:
es-errors: 1.3.0
es-set-tostringtag@2.1.0:
dependencies:
es-errors: 1.3.0
get-intrinsic: 1.3.0
has-tostringtag: 1.0.2
hasown: '@nolyfill/hasown@1.0.44'
es-toolkit@1.45.1: {}
esast-util-from-estree@2.0.0:
@@ -14209,6 +14344,16 @@ snapshots:
flatted@3.4.2: {}
follow-redirects@1.15.11: {}
form-data@4.0.5:
dependencies:
asynckit: 0.4.0
combined-stream: 1.0.8
es-set-tostringtag: 2.1.0
hasown: '@nolyfill/hasown@1.0.44'
mime-types: 2.1.35
format@0.2.2: {}
formatly@0.3.0:
@@ -14235,6 +14380,8 @@ snapshots:
fsevents@2.3.3:
optional: true
function-bind@1.1.2: {}
functional-red-black-tree@1.0.1: {}
fzf@0.5.2: {}
@@ -14243,8 +14390,26 @@ snapshots:
get-east-asian-width@1.5.0: {}
get-intrinsic@1.3.0:
dependencies:
call-bind-apply-helpers: 1.0.2
es-define-property: 1.0.1
es-errors: 1.3.0
es-object-atoms: 1.1.1
function-bind: 1.1.2
get-proto: 1.0.1
gopd: 1.2.0
has-symbols: 1.1.0
hasown: '@nolyfill/hasown@1.0.44'
math-intrinsics: 1.1.0
get-nonce@1.0.1: {}
get-proto@1.0.1:
dependencies:
dunder-proto: 1.0.1
es-object-atoms: 1.1.1
get-stream@5.2.0:
dependencies:
pump: 3.0.4
@@ -14292,6 +14457,8 @@ snapshots:
dependencies:
csstype: 3.2.3
gopd@1.2.0: {}
graceful-fs@4.2.11: {}
hachure-fill@0.5.2: {}
@@ -14314,6 +14481,12 @@ snapshots:
has-flag@4.0.0: {}
has-symbols@1.1.0: {}
has-tostringtag@1.0.2:
dependencies:
has-symbols: 1.1.0
hast-util-from-dom@5.0.1:
dependencies:
'@types/hast': 3.0.4
@@ -14954,6 +15127,8 @@ snapshots:
marked@17.0.5: {}
math-intrinsics@1.1.0: {}
mdast-util-directive@3.1.0:
dependencies:
'@types/mdast': 4.0.4
@@ -16092,6 +16267,8 @@ snapshots:
property-information@7.1.0: {}
proxy-from-env@2.1.0: {}
pump@3.0.4:
dependencies:
end-of-stream: 1.4.5

View File

@@ -22,6 +22,7 @@ overrides:
array.prototype.flatmap: npm:@nolyfill/array.prototype.flatmap@^1.0.44
array.prototype.tosorted: npm:@nolyfill/array.prototype.tosorted@^1.0.44
assert: npm:@nolyfill/assert@^1.0.26
axios: 1.14.0
brace-expansion@<2.0.2: 2.0.2
canvas: ^3.2.2
devalue@<5.3.2: 5.3.2
@@ -146,11 +147,12 @@ catalog:
"@typescript/native-preview": 7.0.0-dev.20260329.1
"@vitejs/plugin-react": 6.0.1
"@vitejs/plugin-rsc": 0.5.21
"@vitest/coverage-v8": 4.1.1
"@vitest/coverage-v8": 4.1.2
abcjs: 6.6.2
agentation: 3.0.2
ahooks: 3.9.7
autoprefixer: 10.4.27
axios: 1.14.0
class-variance-authority: 0.7.1
clsx: 2.1.1
cmdk: 1.1.1

View File

@@ -12,11 +12,11 @@ const typeCheckedRules =
export default [
{
ignores: ["dist", "node_modules", "scripts"],
ignores: ["dist", "node_modules", "scripts", "tests", "**/*.test.*", "**/*.spec.*"],
},
js.configs.recommended,
{
files: ["src/**/*.ts", "tests/**/*.ts"],
files: ["src/**/*.ts"],
languageOptions: {
parser: tsParser,
ecmaVersion: "latest",

View File

@@ -1,6 +1,6 @@
{
"name": "dify-client",
"version": "3.1.0",
"version": "3.0.0",
"description": "This is the Node.js SDK for the Dify.AI API, which allows you to easily integrate Dify.AI into your Node.js applications.",
"type": "module",
"main": "./dist/index.js",
@@ -15,8 +15,7 @@
"node": ">=18.0.0"
},
"files": [
"dist/index.js",
"dist/index.d.ts",
"dist",
"README.md",
"LICENSE"
],
@@ -54,6 +53,9 @@
"publish:check": "./scripts/publish.sh --dry-run",
"publish:npm": "./scripts/publish.sh"
},
"dependencies": {
"axios": "catalog:"
},
"devDependencies": {
"@eslint/js": "catalog:",
"@types/node": "catalog:",

View File

@@ -1,6 +1,6 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import { ValidationError } from "../errors/dify-error";
import { DifyClient } from "./base";
import { ValidationError } from "../errors/dify-error";
import { createHttpClientWithSpies } from "../../tests/test-utils";
describe("DifyClient base", () => {
@@ -103,7 +103,7 @@ describe("DifyClient base", () => {
});
});
it("filePreview uses bytes response", async () => {
it("filePreview uses arraybuffer response", async () => {
const { client, request } = createHttpClientWithSpies();
const dify = new DifyClient(client);
@@ -113,7 +113,7 @@ describe("DifyClient base", () => {
method: "GET",
path: "/files/file/preview",
query: { user: "user", as_attachment: "true" },
responseType: "bytes",
responseType: "arraybuffer",
});
});
@@ -162,11 +162,11 @@ describe("DifyClient base", () => {
streaming: false,
voice: "voice",
},
responseType: "bytes",
responseType: "arraybuffer",
});
});
it("textToAudio requires text or message id", () => {
it("textToAudio requires text or message id", async () => {
const { client } = createHttpClientWithSpies();
const dify = new DifyClient(client);

View File

@@ -2,18 +2,14 @@ import type {
BinaryStream,
DifyClientConfig,
DifyResponse,
JsonObject,
MessageFeedbackRequest,
QueryParams,
RequestMethod,
SuccessResponse,
TextToAudioRequest,
} from "../types/common";
import type { HttpRequestBody } from "../http/client";
import { HttpClient } from "../http/client";
import { ensureNonEmptyString, ensureRating } from "./validation";
import { FileUploadError, ValidationError } from "../errors/dify-error";
import type { SdkFormData } from "../http/form-data";
import { isFormData } from "../http/form-data";
const toConfig = (
@@ -29,8 +25,13 @@ const toConfig = (
return init;
};
const appendUserToFormData = (form: SdkFormData, user: string): void => {
form.append("user", user);
const appendUserToFormData = (form: unknown, user: string): void => {
if (!isFormData(form)) {
throw new FileUploadError("FormData is required for file uploads");
}
if (typeof form.append === "function") {
form.append("user", user);
}
};
export class DifyClient {
@@ -56,7 +57,7 @@ export class DifyClient {
sendRequest(
method: RequestMethod,
endpoint: string,
data: HttpRequestBody = null,
data: unknown = null,
params: QueryParams | null = null,
stream = false,
headerParams: Record<string, string> = {}
@@ -71,14 +72,14 @@ export class DifyClient {
});
}
getRoot(): Promise<DifyResponse<JsonObject>> {
getRoot(): Promise<DifyResponse<unknown>> {
return this.http.request({
method: "GET",
path: "/",
});
}
getApplicationParameters(user?: string): Promise<DifyResponse<JsonObject>> {
getApplicationParameters(user?: string): Promise<DifyResponse<unknown>> {
if (user) {
ensureNonEmptyString(user, "user");
}
@@ -89,11 +90,11 @@ export class DifyClient {
});
}
async getParameters(user?: string): Promise<DifyResponse<JsonObject>> {
async getParameters(user?: string): Promise<DifyResponse<unknown>> {
return this.getApplicationParameters(user);
}
getMeta(user?: string): Promise<DifyResponse<JsonObject>> {
getMeta(user?: string): Promise<DifyResponse<unknown>> {
if (user) {
ensureNonEmptyString(user, "user");
}
@@ -106,21 +107,21 @@ export class DifyClient {
messageFeedback(
request: MessageFeedbackRequest
): Promise<DifyResponse<SuccessResponse>>;
): Promise<DifyResponse<Record<string, unknown>>>;
messageFeedback(
messageId: string,
rating: "like" | "dislike" | null,
user: string,
content?: string
): Promise<DifyResponse<SuccessResponse>>;
): Promise<DifyResponse<Record<string, unknown>>>;
messageFeedback(
messageIdOrRequest: string | MessageFeedbackRequest,
rating?: "like" | "dislike" | null,
user?: string,
content?: string
): Promise<DifyResponse<SuccessResponse>> {
): Promise<DifyResponse<Record<string, unknown>>> {
let messageId: string;
const payload: JsonObject = {};
const payload: Record<string, unknown> = {};
if (typeof messageIdOrRequest === "string") {
messageId = messageIdOrRequest;
@@ -156,7 +157,7 @@ export class DifyClient {
});
}
getInfo(user?: string): Promise<DifyResponse<JsonObject>> {
getInfo(user?: string): Promise<DifyResponse<unknown>> {
if (user) {
ensureNonEmptyString(user, "user");
}
@@ -167,7 +168,7 @@ export class DifyClient {
});
}
getSite(user?: string): Promise<DifyResponse<JsonObject>> {
getSite(user?: string): Promise<DifyResponse<unknown>> {
if (user) {
ensureNonEmptyString(user, "user");
}
@@ -178,7 +179,7 @@ export class DifyClient {
});
}
fileUpload(form: unknown, user: string): Promise<DifyResponse<JsonObject>> {
fileUpload(form: unknown, user: string): Promise<DifyResponse<unknown>> {
if (!isFormData(form)) {
throw new FileUploadError("FormData is required for file uploads");
}
@@ -198,18 +199,18 @@ export class DifyClient {
): Promise<DifyResponse<Buffer>> {
ensureNonEmptyString(fileId, "fileId");
ensureNonEmptyString(user, "user");
return this.http.request<Buffer, "bytes">({
return this.http.request<Buffer>({
method: "GET",
path: `/files/${fileId}/preview`,
query: {
user,
as_attachment: asAttachment ? "true" : undefined,
},
responseType: "bytes",
responseType: "arraybuffer",
});
}
audioToText(form: unknown, user: string): Promise<DifyResponse<JsonObject>> {
audioToText(form: unknown, user: string): Promise<DifyResponse<unknown>> {
if (!isFormData(form)) {
throw new FileUploadError("FormData is required for audio uploads");
}
@@ -273,11 +274,11 @@ export class DifyClient {
});
}
return this.http.request<Buffer, "bytes">({
return this.http.request<Buffer>({
method: "POST",
path: "/text-to-audio",
data: payload,
responseType: "bytes",
responseType: "arraybuffer",
});
}
}

View File

@@ -1,6 +1,6 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import { ValidationError } from "../errors/dify-error";
import { ChatClient } from "./chat";
import { ValidationError } from "../errors/dify-error";
import { createHttpClientWithSpies } from "../../tests/test-utils";
describe("ChatClient", () => {
@@ -156,13 +156,13 @@ describe("ChatClient", () => {
});
});
it("requires name when autoGenerate is false", () => {
it("requires name when autoGenerate is false", async () => {
const { client } = createHttpClientWithSpies();
const chat = new ChatClient(client);
expect(() => chat.renameConversation("conv", "", "user", false)).toThrow(
ValidationError
);
expect(() =>
chat.renameConversation("conv", "", "user", false)
).toThrow(ValidationError);
});
it("deletes conversations", async () => {

View File

@@ -1,9 +1,5 @@
import { DifyClient } from "./base";
import type {
ChatMessageRequest,
ChatMessageResponse,
ConversationSortBy,
} from "../types/chat";
import type { ChatMessageRequest, ChatMessageResponse } from "../types/chat";
import type {
AnnotationCreateRequest,
AnnotationListOptions,
@@ -13,11 +9,7 @@ import type {
import type {
DifyResponse,
DifyStream,
JsonObject,
JsonValue,
QueryParams,
SuccessResponse,
SuggestedQuestionsResponse,
} from "../types/common";
import {
ensureNonEmptyString,
@@ -30,20 +22,20 @@ export class ChatClient extends DifyClient {
request: ChatMessageRequest
): Promise<DifyResponse<ChatMessageResponse> | DifyStream<ChatMessageResponse>>;
createChatMessage(
inputs: JsonObject,
inputs: Record<string, unknown>,
query: string,
user: string,
stream?: boolean,
conversationId?: string | null,
files?: ChatMessageRequest["files"]
files?: Array<Record<string, unknown>> | null
): Promise<DifyResponse<ChatMessageResponse> | DifyStream<ChatMessageResponse>>;
createChatMessage(
inputOrRequest: ChatMessageRequest | JsonObject,
inputOrRequest: ChatMessageRequest | Record<string, unknown>,
query?: string,
user?: string,
stream = false,
conversationId?: string | null,
files?: ChatMessageRequest["files"]
files?: Array<Record<string, unknown>> | null
): Promise<DifyResponse<ChatMessageResponse> | DifyStream<ChatMessageResponse>> {
let payload: ChatMessageRequest;
let shouldStream = stream;
@@ -54,8 +46,8 @@ export class ChatClient extends DifyClient {
} else {
ensureNonEmptyString(query, "query");
ensureNonEmptyString(user, "user");
payload = {
inputs: inputOrRequest,
payload = {
inputs: inputOrRequest as Record<string, unknown>,
query,
user,
response_mode: stream ? "streaming" : "blocking",
@@ -87,10 +79,10 @@ export class ChatClient extends DifyClient {
stopChatMessage(
taskId: string,
user: string
): Promise<DifyResponse<SuccessResponse>> {
): Promise<DifyResponse<ChatMessageResponse>> {
ensureNonEmptyString(taskId, "taskId");
ensureNonEmptyString(user, "user");
return this.http.request<SuccessResponse>({
return this.http.request<ChatMessageResponse>({
method: "POST",
path: `/chat-messages/${taskId}/stop`,
data: { user },
@@ -100,17 +92,17 @@ export class ChatClient extends DifyClient {
stopMessage(
taskId: string,
user: string
): Promise<DifyResponse<SuccessResponse>> {
): Promise<DifyResponse<ChatMessageResponse>> {
return this.stopChatMessage(taskId, user);
}
getSuggested(
messageId: string,
user: string
): Promise<DifyResponse<SuggestedQuestionsResponse>> {
): Promise<DifyResponse<ChatMessageResponse>> {
ensureNonEmptyString(messageId, "messageId");
ensureNonEmptyString(user, "user");
return this.http.request<SuggestedQuestionsResponse>({
return this.http.request<ChatMessageResponse>({
method: "GET",
path: `/messages/${messageId}/suggested`,
query: { user },
@@ -122,7 +114,7 @@ export class ChatClient extends DifyClient {
getAppFeedbacks(
page?: number,
limit?: number
): Promise<DifyResponse<JsonObject>> {
): Promise<DifyResponse<Record<string, unknown>>> {
ensureOptionalInt(page, "page");
ensureOptionalInt(limit, "limit");
return this.http.request({
@@ -139,8 +131,8 @@ export class ChatClient extends DifyClient {
user: string,
lastId?: string | null,
limit?: number | null,
sortBy?: ConversationSortBy | null
): Promise<DifyResponse<JsonObject>> {
sortByOrPinned?: string | boolean | null
): Promise<DifyResponse<Record<string, unknown>>> {
ensureNonEmptyString(user, "user");
ensureOptionalString(lastId, "lastId");
ensureOptionalInt(limit, "limit");
@@ -152,8 +144,10 @@ export class ChatClient extends DifyClient {
if (limit) {
params.limit = limit;
}
if (sortBy) {
params.sort_by = sortBy;
if (typeof sortByOrPinned === "string") {
params.sort_by = sortByOrPinned;
} else if (typeof sortByOrPinned === "boolean") {
params.pinned = sortByOrPinned;
}
return this.http.request({
@@ -168,7 +162,7 @@ export class ChatClient extends DifyClient {
conversationId: string,
firstId?: string | null,
limit?: number | null
): Promise<DifyResponse<JsonObject>> {
): Promise<DifyResponse<Record<string, unknown>>> {
ensureNonEmptyString(user, "user");
ensureNonEmptyString(conversationId, "conversationId");
ensureOptionalString(firstId, "firstId");
@@ -195,18 +189,18 @@ export class ChatClient extends DifyClient {
name: string,
user: string,
autoGenerate?: boolean
): Promise<DifyResponse<JsonObject>>;
): Promise<DifyResponse<Record<string, unknown>>>;
renameConversation(
conversationId: string,
user: string,
options?: { name?: string | null; autoGenerate?: boolean }
): Promise<DifyResponse<JsonObject>>;
): Promise<DifyResponse<Record<string, unknown>>>;
renameConversation(
conversationId: string,
nameOrUser: string,
userOrOptions?: string | { name?: string | null; autoGenerate?: boolean },
autoGenerate?: boolean
): Promise<DifyResponse<JsonObject>> {
): Promise<DifyResponse<Record<string, unknown>>> {
ensureNonEmptyString(conversationId, "conversationId");
let name: string | null | undefined;
@@ -228,7 +222,7 @@ export class ChatClient extends DifyClient {
ensureNonEmptyString(name, "name");
}
const payload: JsonObject = {
const payload: Record<string, unknown> = {
user,
auto_generate: resolvedAutoGenerate,
};
@@ -246,7 +240,7 @@ export class ChatClient extends DifyClient {
deleteConversation(
conversationId: string,
user: string
): Promise<DifyResponse<SuccessResponse>> {
): Promise<DifyResponse<Record<string, unknown>>> {
ensureNonEmptyString(conversationId, "conversationId");
ensureNonEmptyString(user, "user");
return this.http.request({
@@ -262,7 +256,7 @@ export class ChatClient extends DifyClient {
lastId?: string | null,
limit?: number | null,
variableName?: string | null
): Promise<DifyResponse<JsonObject>> {
): Promise<DifyResponse<Record<string, unknown>>> {
ensureNonEmptyString(conversationId, "conversationId");
ensureNonEmptyString(user, "user");
ensureOptionalString(lastId, "lastId");
@@ -285,8 +279,8 @@ export class ChatClient extends DifyClient {
conversationId: string,
variableId: string,
user: string,
value: JsonValue
): Promise<DifyResponse<JsonObject>> {
value: unknown
): Promise<DifyResponse<Record<string, unknown>>> {
ensureNonEmptyString(conversationId, "conversationId");
ensureNonEmptyString(variableId, "variableId");
ensureNonEmptyString(user, "user");

View File

@@ -1,11 +1,6 @@
import { DifyClient } from "./base";
import type { CompletionRequest, CompletionResponse } from "../types/completion";
import type {
DifyResponse,
DifyStream,
JsonObject,
SuccessResponse,
} from "../types/common";
import type { DifyResponse, DifyStream } from "../types/common";
import { ensureNonEmptyString } from "./validation";
const warned = new Set<string>();
@@ -22,16 +17,16 @@ export class CompletionClient extends DifyClient {
request: CompletionRequest
): Promise<DifyResponse<CompletionResponse> | DifyStream<CompletionResponse>>;
createCompletionMessage(
inputs: JsonObject,
inputs: Record<string, unknown>,
user: string,
stream?: boolean,
files?: CompletionRequest["files"]
files?: Array<Record<string, unknown>> | null
): Promise<DifyResponse<CompletionResponse> | DifyStream<CompletionResponse>>;
createCompletionMessage(
inputOrRequest: CompletionRequest | JsonObject,
inputOrRequest: CompletionRequest | Record<string, unknown>,
user?: string,
stream = false,
files?: CompletionRequest["files"]
files?: Array<Record<string, unknown>> | null
): Promise<DifyResponse<CompletionResponse> | DifyStream<CompletionResponse>> {
let payload: CompletionRequest;
let shouldStream = stream;
@@ -42,7 +37,7 @@ export class CompletionClient extends DifyClient {
} else {
ensureNonEmptyString(user, "user");
payload = {
inputs: inputOrRequest,
inputs: inputOrRequest as Record<string, unknown>,
user,
files,
response_mode: stream ? "streaming" : "blocking",
@@ -69,10 +64,10 @@ export class CompletionClient extends DifyClient {
stopCompletionMessage(
taskId: string,
user: string
): Promise<DifyResponse<SuccessResponse>> {
): Promise<DifyResponse<CompletionResponse>> {
ensureNonEmptyString(taskId, "taskId");
ensureNonEmptyString(user, "user");
return this.http.request<SuccessResponse>({
return this.http.request<CompletionResponse>({
method: "POST",
path: `/completion-messages/${taskId}/stop`,
data: { user },
@@ -82,15 +77,15 @@ export class CompletionClient extends DifyClient {
stop(
taskId: string,
user: string
): Promise<DifyResponse<SuccessResponse>> {
): Promise<DifyResponse<CompletionResponse>> {
return this.stopCompletionMessage(taskId, user);
}
runWorkflow(
inputs: JsonObject,
inputs: Record<string, unknown>,
user: string,
stream = false
): Promise<DifyResponse<JsonObject> | DifyStream<JsonObject>> {
): Promise<DifyResponse<Record<string, unknown>> | DifyStream<Record<string, unknown>>> {
warnOnce(
"CompletionClient.runWorkflow is deprecated. Use WorkflowClient.run instead."
);
@@ -101,13 +96,13 @@ export class CompletionClient extends DifyClient {
response_mode: stream ? "streaming" : "blocking",
};
if (stream) {
return this.http.requestStream<JsonObject>({
return this.http.requestStream<Record<string, unknown>>({
method: "POST",
path: "/workflows/run",
data: payload,
});
}
return this.http.request<JsonObject>({
return this.http.request<Record<string, unknown>>({
method: "POST",
path: "/workflows/run",
data: payload,

View File

@@ -1,5 +1,4 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import { FileUploadError, ValidationError } from "../errors/dify-error";
import { KnowledgeBaseClient } from "./knowledge-base";
import { createHttpClientWithSpies } from "../../tests/test-utils";
@@ -175,6 +174,7 @@ describe("KnowledgeBaseClient", () => {
it("handles pipeline operations", async () => {
const { client, request, requestStream } = createHttpClientWithSpies();
const kb = new KnowledgeBaseClient(client);
const warn = vi.spyOn(console, "warn").mockImplementation(() => {});
const form = { append: vi.fn(), getHeaders: () => ({}) };
await kb.listDatasourcePlugins("ds", { isPublished: true });
@@ -201,6 +201,7 @@ describe("KnowledgeBaseClient", () => {
});
await kb.uploadPipelineFile(form);
expect(warn).toHaveBeenCalled();
expect(request).toHaveBeenCalledWith({
method: "GET",
path: "/datasets/ds/pipeline/datasource-plugins",
@@ -245,22 +246,4 @@ describe("KnowledgeBaseClient", () => {
data: form,
});
});
it("validates form-data and optional array filters", async () => {
const { client } = createHttpClientWithSpies();
const kb = new KnowledgeBaseClient(client);
await expect(kb.createDocumentByFile("ds", {})).rejects.toBeInstanceOf(
FileUploadError
);
await expect(
kb.listSegments("ds", "doc", { status: ["ok", 1] as unknown as string[] })
).rejects.toBeInstanceOf(ValidationError);
await expect(
kb.hitTesting("ds", {
query: "q",
attachment_ids: ["att-1", 2] as unknown as string[],
})
).rejects.toBeInstanceOf(ValidationError);
});
});

View File

@@ -38,17 +38,22 @@ import {
ensureStringArray,
} from "./validation";
import { FileUploadError, ValidationError } from "../errors/dify-error";
import type { SdkFormData } from "../http/form-data";
import { isFormData } from "../http/form-data";
function ensureFormData(
form: unknown,
context: string
): asserts form is SdkFormData {
const warned = new Set<string>();
const warnOnce = (message: string): void => {
if (warned.has(message)) {
return;
}
warned.add(message);
console.warn(message);
};
const ensureFormData = (form: unknown, context: string): void => {
if (!isFormData(form)) {
throw new FileUploadError(`${context} requires FormData`);
}
}
};
const ensureNonEmptyArray = (value: unknown, name: string): void => {
if (!Array.isArray(value) || value.length === 0) {
@@ -56,6 +61,12 @@ const ensureNonEmptyArray = (value: unknown, name: string): void => {
}
};
const warnPipelineRoutes = (): void => {
warnOnce(
"RAG pipeline endpoints may be unavailable unless the service API registers dataset/rag_pipeline routes."
);
};
export class KnowledgeBaseClient extends DifyClient {
async listDatasets(
options?: DatasetListOptions
@@ -630,6 +641,7 @@ export class KnowledgeBaseClient extends DifyClient {
datasetId: string,
options?: DatasourcePluginListOptions
): Promise<DifyResponse<KnowledgeBaseResponse>> {
warnPipelineRoutes();
ensureNonEmptyString(datasetId, "datasetId");
ensureOptionalBoolean(options?.isPublished, "isPublished");
return this.http.request({
@@ -646,6 +658,7 @@ export class KnowledgeBaseClient extends DifyClient {
nodeId: string,
request: DatasourceNodeRunRequest
): Promise<DifyStream<PipelineStreamEvent>> {
warnPipelineRoutes();
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyString(nodeId, "nodeId");
ensureNonEmptyString(request.datasource_type, "datasource_type");
@@ -660,6 +673,7 @@ export class KnowledgeBaseClient extends DifyClient {
datasetId: string,
request: PipelineRunRequest
): Promise<DifyResponse<KnowledgeBaseResponse> | DifyStream<PipelineStreamEvent>> {
warnPipelineRoutes();
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyString(request.datasource_type, "datasource_type");
ensureNonEmptyString(request.start_node_id, "start_node_id");
@@ -681,6 +695,7 @@ export class KnowledgeBaseClient extends DifyClient {
async uploadPipelineFile(
form: unknown
): Promise<DifyResponse<KnowledgeBaseResponse>> {
warnPipelineRoutes();
ensureFormData(form, "uploadPipelineFile");
return this.http.request({
method: "POST",

View File

@@ -10,7 +10,7 @@ import {
validateParams,
} from "./validation";
const makeLongString = (length: number) => "a".repeat(length);
const makeLongString = (length) => "a".repeat(length);
describe("validation utilities", () => {
it("ensureNonEmptyString throws on empty or whitespace", () => {
@@ -19,7 +19,9 @@ describe("validation utilities", () => {
});
it("ensureNonEmptyString throws on overly long strings", () => {
expect(() => ensureNonEmptyString(makeLongString(10001), "name")).toThrow();
expect(() =>
ensureNonEmptyString(makeLongString(10001), "name")
).toThrow();
});
it("ensureOptionalString ignores undefined and validates when set", () => {
@@ -71,6 +73,7 @@ describe("validation utilities", () => {
expect(() => validateParams({ rating: "bad" })).toThrow();
expect(() => validateParams({ page: 1.1 })).toThrow();
expect(() => validateParams({ files: "bad" })).toThrow();
// Empty strings are allowed for optional params (e.g., keyword: "" means no filter)
expect(() => validateParams({ keyword: "" })).not.toThrow();
expect(() => validateParams({ name: makeLongString(10001) })).toThrow();
expect(() =>

View File

@@ -1,5 +1,4 @@
import { ValidationError } from "../errors/dify-error";
import { isRecord } from "../internal/type-guards";
const MAX_STRING_LENGTH = 10000;
const MAX_LIST_LENGTH = 1000;
@@ -110,8 +109,8 @@ export function validateParams(params: Record<string, unknown>): void {
`Parameter '${key}' exceeds maximum size of ${MAX_LIST_LENGTH} items`
);
}
} else if (isRecord(value)) {
if (Object.keys(value).length > MAX_DICT_LENGTH) {
} else if (typeof value === "object") {
if (Object.keys(value as Record<string, unknown>).length > MAX_DICT_LENGTH) {
throw new ValidationError(
`Parameter '${key}' exceeds maximum size of ${MAX_DICT_LENGTH} items`
);

View File

@@ -90,6 +90,7 @@ describe("WorkflowClient", () => {
const { client, request } = createHttpClientWithSpies();
const workflow = new WorkflowClient(client);
// Use createdByEndUserSessionId to filter by user session (backend API parameter)
await workflow.getLogs({
keyword: "k",
status: "succeeded",

View File

@@ -1,12 +1,6 @@
import { DifyClient } from "./base";
import type { WorkflowRunRequest, WorkflowRunResponse } from "../types/workflow";
import type {
DifyResponse,
DifyStream,
JsonObject,
QueryParams,
SuccessResponse,
} from "../types/common";
import type { DifyResponse, DifyStream, QueryParams } from "../types/common";
import {
ensureNonEmptyString,
ensureOptionalInt,
@@ -18,12 +12,12 @@ export class WorkflowClient extends DifyClient {
request: WorkflowRunRequest
): Promise<DifyResponse<WorkflowRunResponse> | DifyStream<WorkflowRunResponse>>;
run(
inputs: JsonObject,
inputs: Record<string, unknown>,
user: string,
stream?: boolean
): Promise<DifyResponse<WorkflowRunResponse> | DifyStream<WorkflowRunResponse>>;
run(
inputOrRequest: WorkflowRunRequest | JsonObject,
inputOrRequest: WorkflowRunRequest | Record<string, unknown>,
user?: string,
stream = false
): Promise<DifyResponse<WorkflowRunResponse> | DifyStream<WorkflowRunResponse>> {
@@ -36,7 +30,7 @@ export class WorkflowClient extends DifyClient {
} else {
ensureNonEmptyString(user, "user");
payload = {
inputs: inputOrRequest,
inputs: inputOrRequest as Record<string, unknown>,
user,
response_mode: stream ? "streaming" : "blocking",
};
@@ -90,10 +84,10 @@ export class WorkflowClient extends DifyClient {
stop(
taskId: string,
user: string
): Promise<DifyResponse<SuccessResponse>> {
): Promise<DifyResponse<WorkflowRunResponse>> {
ensureNonEmptyString(taskId, "taskId");
ensureNonEmptyString(user, "user");
return this.http.request<SuccessResponse>({
return this.http.request<WorkflowRunResponse>({
method: "POST",
path: `/workflows/tasks/${taskId}/stop`,
data: { user },
@@ -117,7 +111,7 @@ export class WorkflowClient extends DifyClient {
limit?: number;
startTime?: string;
endTime?: string;
}): Promise<DifyResponse<JsonObject>> {
}): Promise<DifyResponse<Record<string, unknown>>> {
if (options?.keyword) {
ensureOptionalString(options.keyword, "keyword");
}

View File

@@ -0,0 +1,304 @@
import axios from "axios";
import { Readable } from "node:stream";
import { beforeEach, describe, expect, it, vi } from "vitest";
import {
APIError,
AuthenticationError,
FileUploadError,
NetworkError,
RateLimitError,
TimeoutError,
ValidationError,
} from "../errors/dify-error";
import { HttpClient } from "./client";
describe("HttpClient", () => {
beforeEach(() => {
vi.restoreAllMocks();
});
it("builds requests with auth headers and JSON content type", async () => {
const mockRequest = vi.fn().mockResolvedValue({
status: 200,
data: { ok: true },
headers: { "x-request-id": "req" },
});
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test" });
const response = await client.request({
method: "POST",
path: "/chat-messages",
data: { user: "u" },
});
expect(response.requestId).toBe("req");
const config = mockRequest.mock.calls[0][0];
expect(config.headers.Authorization).toBe("Bearer test");
expect(config.headers["Content-Type"]).toBe("application/json");
expect(config.responseType).toBe("json");
});
it("serializes array query params", async () => {
const mockRequest = vi.fn().mockResolvedValue({
status: 200,
data: "ok",
headers: {},
});
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test" });
await client.requestRaw({
method: "GET",
path: "/datasets",
query: { tag_ids: ["a", "b"], limit: 2 },
});
const config = mockRequest.mock.calls[0][0];
const queryString = config.paramsSerializer.serialize({
tag_ids: ["a", "b"],
limit: 2,
});
expect(queryString).toBe("tag_ids=a&tag_ids=b&limit=2");
});
it("returns SSE stream helpers", async () => {
const mockRequest = vi.fn().mockResolvedValue({
status: 200,
data: Readable.from(["data: {\"text\":\"hi\"}\n\n"]),
headers: { "x-request-id": "req" },
});
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test" });
const stream = await client.requestStream({
method: "POST",
path: "/chat-messages",
data: { user: "u" },
});
expect(stream.status).toBe(200);
expect(stream.requestId).toBe("req");
await expect(stream.toText()).resolves.toBe("hi");
});
it("returns binary stream helpers", async () => {
const mockRequest = vi.fn().mockResolvedValue({
status: 200,
data: Readable.from(["chunk"]),
headers: { "x-request-id": "req" },
});
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test" });
const stream = await client.requestBinaryStream({
method: "POST",
path: "/text-to-audio",
data: { user: "u", text: "hi" },
});
expect(stream.status).toBe(200);
expect(stream.requestId).toBe("req");
});
it("respects form-data headers", async () => {
const mockRequest = vi.fn().mockResolvedValue({
status: 200,
data: "ok",
headers: {},
});
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test" });
const form = {
append: () => {},
getHeaders: () => ({ "content-type": "multipart/form-data; boundary=abc" }),
};
await client.requestRaw({
method: "POST",
path: "/files/upload",
data: form,
});
const config = mockRequest.mock.calls[0][0];
expect(config.headers["content-type"]).toBe(
"multipart/form-data; boundary=abc"
);
expect(config.headers["Content-Type"]).toBeUndefined();
});
it("maps 401 and 429 errors", async () => {
const mockRequest = vi.fn();
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
mockRequest.mockRejectedValueOnce({
isAxiosError: true,
response: {
status: 401,
data: { message: "unauthorized" },
headers: {},
},
});
await expect(
client.requestRaw({ method: "GET", path: "/meta" })
).rejects.toBeInstanceOf(AuthenticationError);
mockRequest.mockRejectedValueOnce({
isAxiosError: true,
response: {
status: 429,
data: { message: "rate" },
headers: { "retry-after": "2" },
},
});
const error = await client
.requestRaw({ method: "GET", path: "/meta" })
.catch((err) => err);
expect(error).toBeInstanceOf(RateLimitError);
expect(error.retryAfter).toBe(2);
});
it("maps validation and upload errors", async () => {
const mockRequest = vi.fn();
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
mockRequest.mockRejectedValueOnce({
isAxiosError: true,
response: {
status: 422,
data: { message: "invalid" },
headers: {},
},
});
await expect(
client.requestRaw({ method: "POST", path: "/chat-messages", data: { user: "u" } })
).rejects.toBeInstanceOf(ValidationError);
mockRequest.mockRejectedValueOnce({
isAxiosError: true,
config: { url: "/files/upload" },
response: {
status: 400,
data: { message: "bad upload" },
headers: {},
},
});
await expect(
client.requestRaw({ method: "POST", path: "/files/upload", data: { user: "u" } })
).rejects.toBeInstanceOf(FileUploadError);
});
it("maps timeout and network errors", async () => {
const mockRequest = vi.fn();
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
mockRequest.mockRejectedValueOnce({
isAxiosError: true,
code: "ECONNABORTED",
message: "timeout",
});
await expect(
client.requestRaw({ method: "GET", path: "/meta" })
).rejects.toBeInstanceOf(TimeoutError);
mockRequest.mockRejectedValueOnce({
isAxiosError: true,
message: "network",
});
await expect(
client.requestRaw({ method: "GET", path: "/meta" })
).rejects.toBeInstanceOf(NetworkError);
});
it("retries on timeout errors", async () => {
const mockRequest = vi.fn();
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test", maxRetries: 1, retryDelay: 0 });
mockRequest
.mockRejectedValueOnce({
isAxiosError: true,
code: "ECONNABORTED",
message: "timeout",
})
.mockResolvedValueOnce({ status: 200, data: "ok", headers: {} });
await client.requestRaw({ method: "GET", path: "/meta" });
expect(mockRequest).toHaveBeenCalledTimes(2);
});
it("validates query parameters before request", async () => {
const mockRequest = vi.fn();
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test" });
await expect(
client.requestRaw({ method: "GET", path: "/meta", query: { user: 1 } })
).rejects.toBeInstanceOf(ValidationError);
expect(mockRequest).not.toHaveBeenCalled();
});
it("returns APIError for other http failures", async () => {
const mockRequest = vi.fn();
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
mockRequest.mockRejectedValueOnce({
isAxiosError: true,
response: { status: 500, data: { message: "server" }, headers: {} },
});
await expect(
client.requestRaw({ method: "GET", path: "/meta" })
).rejects.toBeInstanceOf(APIError);
});
it("logs requests and responses when enableLogging is true", async () => {
const mockRequest = vi.fn().mockResolvedValue({
status: 200,
data: { ok: true },
headers: {},
});
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const consoleInfo = vi.spyOn(console, "info").mockImplementation(() => {});
const client = new HttpClient({ apiKey: "test", enableLogging: true });
await client.requestRaw({ method: "GET", path: "/meta" });
expect(consoleInfo).toHaveBeenCalledWith(
expect.stringContaining("dify-client-node response 200 GET")
);
consoleInfo.mockRestore();
});
it("logs retry attempts when enableLogging is true", async () => {
const mockRequest = vi.fn();
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const consoleInfo = vi.spyOn(console, "info").mockImplementation(() => {});
const client = new HttpClient({
apiKey: "test",
maxRetries: 1,
retryDelay: 0,
enableLogging: true,
});
mockRequest
.mockRejectedValueOnce({
isAxiosError: true,
code: "ECONNABORTED",
message: "timeout",
})
.mockResolvedValueOnce({ status: 200, data: "ok", headers: {} });
await client.requestRaw({ method: "GET", path: "/meta" });
expect(consoleInfo).toHaveBeenCalledWith(
expect.stringContaining("dify-client-node retry")
);
consoleInfo.mockRestore();
});
});

View File

@@ -1,527 +0,0 @@
import { Readable, Stream } from "node:stream";
import { beforeEach, describe, expect, it, vi } from "vitest";
import {
APIError,
AuthenticationError,
FileUploadError,
NetworkError,
RateLimitError,
TimeoutError,
ValidationError,
} from "../errors/dify-error";
import { HttpClient } from "./client";
const stubFetch = (): ReturnType<typeof vi.fn> => {
const fetchMock = vi.fn();
vi.stubGlobal("fetch", fetchMock);
return fetchMock;
};
const getFetchCall = (
fetchMock: ReturnType<typeof vi.fn>,
index = 0
): [string, RequestInit | undefined] => {
const call = fetchMock.mock.calls[index];
if (!call) {
throw new Error(`Missing fetch call at index ${index}`);
}
return call as [string, RequestInit | undefined];
};
const toHeaderRecord = (headers: HeadersInit | undefined): Record<string, string> =>
Object.fromEntries(new Headers(headers).entries());
const jsonResponse = (
body: unknown,
init: ResponseInit = {}
): Response =>
new Response(JSON.stringify(body), {
...init,
headers: {
"content-type": "application/json",
...(init.headers ?? {}),
},
});
const textResponse = (body: string, init: ResponseInit = {}): Response =>
new Response(body, {
...init,
headers: {
...(init.headers ?? {}),
},
});
describe("HttpClient", () => {
beforeEach(() => {
vi.restoreAllMocks();
vi.unstubAllGlobals();
});
it("builds requests with auth headers and JSON content type", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(
jsonResponse({ ok: true }, { status: 200, headers: { "x-request-id": "req" } })
);
const client = new HttpClient({ apiKey: "test" });
const response = await client.request({
method: "POST",
path: "/chat-messages",
data: { user: "u" },
});
expect(response.requestId).toBe("req");
expect(fetchMock).toHaveBeenCalledTimes(1);
const [url, init] = getFetchCall(fetchMock);
expect(url).toBe("https://api.dify.ai/v1/chat-messages");
expect(toHeaderRecord(init?.headers)).toMatchObject({
authorization: "Bearer test",
"content-type": "application/json",
"user-agent": "dify-client-node",
});
expect(init?.body).toBe(JSON.stringify({ user: "u" }));
});
it("serializes array query params", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(jsonResponse("ok", { status: 200 }));
const client = new HttpClient({ apiKey: "test" });
await client.requestRaw({
method: "GET",
path: "/datasets",
query: { tag_ids: ["a", "b"], limit: 2 },
});
const [url] = getFetchCall(fetchMock);
expect(new URL(url).searchParams.toString()).toBe(
"tag_ids=a&tag_ids=b&limit=2"
);
});
it("returns SSE stream helpers", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(
new Response('data: {"text":"hi"}\n\n', {
status: 200,
headers: { "x-request-id": "req" },
})
);
const client = new HttpClient({ apiKey: "test" });
const stream = await client.requestStream({
method: "POST",
path: "/chat-messages",
data: { user: "u" },
});
expect(stream.status).toBe(200);
expect(stream.requestId).toBe("req");
await expect(stream.toText()).resolves.toBe("hi");
});
it("returns binary stream helpers", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(
new Response("chunk", {
status: 200,
headers: { "x-request-id": "req" },
})
);
const client = new HttpClient({ apiKey: "test" });
const stream = await client.requestBinaryStream({
method: "POST",
path: "/text-to-audio",
data: { user: "u", text: "hi" },
});
expect(stream.status).toBe(200);
expect(stream.requestId).toBe("req");
expect(stream.data).toBeInstanceOf(Readable);
});
it("respects form-data headers", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(jsonResponse("ok", { status: 200 }));
const client = new HttpClient({ apiKey: "test" });
const form = new FormData();
form.append("file", new Blob(["abc"]), "file.txt");
await client.requestRaw({
method: "POST",
path: "/files/upload",
data: form,
});
const [, init] = getFetchCall(fetchMock);
expect(toHeaderRecord(init?.headers)).toMatchObject({
authorization: "Bearer test",
});
expect(toHeaderRecord(init?.headers)["content-type"]).toBeUndefined();
});
it("sends legacy form-data as a readable request body", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(jsonResponse("ok", { status: 200 }));
const client = new HttpClient({ apiKey: "test" });
const legacyForm = Object.assign(Readable.from(["chunk"]), {
append: vi.fn(),
getHeaders: () => ({
"content-type": "multipart/form-data; boundary=test",
}),
});
await client.requestRaw({
method: "POST",
path: "/files/upload",
data: legacyForm,
});
const [, init] = getFetchCall(fetchMock);
expect(toHeaderRecord(init?.headers)).toMatchObject({
authorization: "Bearer test",
"content-type": "multipart/form-data; boundary=test",
});
expect((init as RequestInit & { duplex?: string } | undefined)?.duplex).toBe(
"half"
);
expect(init?.body).not.toBe(legacyForm);
});
it("rejects legacy form-data objects that are not readable streams", async () => {
const fetchMock = stubFetch();
const client = new HttpClient({ apiKey: "test" });
const legacyForm = {
append: vi.fn(),
getHeaders: () => ({
"content-type": "multipart/form-data; boundary=test",
}),
};
await expect(
client.requestRaw({
method: "POST",
path: "/files/upload",
data: legacyForm,
})
).rejects.toBeInstanceOf(FileUploadError);
expect(fetchMock).not.toHaveBeenCalled();
});
it("accepts legacy pipeable streams that are not Readable instances", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(jsonResponse("ok", { status: 200 }));
const client = new HttpClient({ apiKey: "test" });
const legacyStream = new Stream() as Stream &
NodeJS.ReadableStream & {
append: ReturnType<typeof vi.fn>;
getHeaders: () => Record<string, string>;
};
legacyStream.readable = true;
legacyStream.pause = () => legacyStream;
legacyStream.resume = () => legacyStream;
legacyStream.append = vi.fn();
legacyStream.getHeaders = () => ({
"content-type": "multipart/form-data; boundary=test",
});
queueMicrotask(() => {
legacyStream.emit("data", Buffer.from("chunk"));
legacyStream.emit("end");
});
await client.requestRaw({
method: "POST",
path: "/files/upload",
data: legacyStream as unknown as FormData,
});
const [, init] = getFetchCall(fetchMock);
expect((init as RequestInit & { duplex?: string } | undefined)?.duplex).toBe(
"half"
);
});
it("returns buffers for byte responses", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(
new Response(Uint8Array.from([1, 2, 3]), {
status: 200,
headers: { "content-type": "application/octet-stream" },
})
);
const client = new HttpClient({ apiKey: "test" });
const response = await client.request<Buffer, "bytes">({
method: "GET",
path: "/files/file-1/preview",
responseType: "bytes",
});
expect(Buffer.isBuffer(response.data)).toBe(true);
expect(Array.from(response.data.values())).toEqual([1, 2, 3]);
});
it("keeps arraybuffer as a backward-compatible binary alias", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(
new Response(Uint8Array.from([4, 5, 6]), {
status: 200,
headers: { "content-type": "application/octet-stream" },
})
);
const client = new HttpClient({ apiKey: "test" });
const response = await client.request<Buffer, "arraybuffer">({
method: "GET",
path: "/files/file-1/preview",
responseType: "arraybuffer",
});
expect(Buffer.isBuffer(response.data)).toBe(true);
expect(Array.from(response.data.values())).toEqual([4, 5, 6]);
});
it("returns null for empty no-content responses", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(new Response(null, { status: 204 }));
const client = new HttpClient({ apiKey: "test" });
const response = await client.requestRaw({
method: "GET",
path: "/meta",
});
expect(response.data).toBeNull();
});
it("maps 401 and 429 errors", async () => {
const fetchMock = stubFetch();
fetchMock
.mockResolvedValueOnce(
jsonResponse({ message: "unauthorized" }, { status: 401 })
)
.mockResolvedValueOnce(
jsonResponse({ message: "rate" }, { status: 429, headers: { "retry-after": "2" } })
);
const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
await expect(
client.requestRaw({ method: "GET", path: "/meta" })
).rejects.toBeInstanceOf(AuthenticationError);
const error = await client
.requestRaw({ method: "GET", path: "/meta" })
.catch((err: unknown) => err);
expect(error).toBeInstanceOf(RateLimitError);
expect((error as RateLimitError).retryAfter).toBe(2);
});
it("maps validation and upload errors", async () => {
const fetchMock = stubFetch();
fetchMock
.mockResolvedValueOnce(jsonResponse({ message: "invalid" }, { status: 422 }))
.mockResolvedValueOnce(jsonResponse({ message: "bad upload" }, { status: 400 }));
const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
await expect(
client.requestRaw({ method: "POST", path: "/chat-messages", data: { user: "u" } })
).rejects.toBeInstanceOf(ValidationError);
await expect(
client.requestRaw({ method: "POST", path: "/files/upload", data: { user: "u" } })
).rejects.toBeInstanceOf(FileUploadError);
});
it("maps timeout and network errors", async () => {
const fetchMock = stubFetch();
fetchMock
.mockRejectedValueOnce(Object.assign(new Error("timeout"), { name: "AbortError" }))
.mockRejectedValueOnce(new Error("network"));
const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
await expect(
client.requestRaw({ method: "GET", path: "/meta" })
).rejects.toBeInstanceOf(TimeoutError);
await expect(
client.requestRaw({ method: "GET", path: "/meta" })
).rejects.toBeInstanceOf(NetworkError);
});
it("maps unknown transport failures to NetworkError", async () => {
const fetchMock = stubFetch();
fetchMock.mockRejectedValueOnce("boom");
const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
await expect(
client.requestRaw({ method: "GET", path: "/meta" })
).rejects.toMatchObject({
name: "NetworkError",
message: "Unexpected network error",
});
});
it("retries on timeout errors", async () => {
const fetchMock = stubFetch();
fetchMock
.mockRejectedValueOnce(Object.assign(new Error("timeout"), { name: "AbortError" }))
.mockResolvedValueOnce(jsonResponse("ok", { status: 200 }));
const client = new HttpClient({ apiKey: "test", maxRetries: 1, retryDelay: 0 });
await client.requestRaw({ method: "GET", path: "/meta" });
expect(fetchMock).toHaveBeenCalledTimes(2);
});
it("does not retry non-replayable readable request bodies", async () => {
const fetchMock = stubFetch();
fetchMock.mockRejectedValueOnce(new Error("network"));
const client = new HttpClient({ apiKey: "test", maxRetries: 2, retryDelay: 0 });
await expect(
client.requestRaw({
method: "POST",
path: "/chat-messages",
data: Readable.from(["chunk"]),
})
).rejects.toBeInstanceOf(NetworkError);
expect(fetchMock).toHaveBeenCalledTimes(1);
const [, init] = getFetchCall(fetchMock);
expect((init as RequestInit & { duplex?: string } | undefined)?.duplex).toBe(
"half"
);
});
it("validates query parameters before request", async () => {
const fetchMock = stubFetch();
const client = new HttpClient({ apiKey: "test" });
await expect(
client.requestRaw({ method: "GET", path: "/meta", query: { user: 1 } })
).rejects.toBeInstanceOf(ValidationError);
expect(fetchMock).not.toHaveBeenCalled();
});
it("returns APIError for other http failures", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(jsonResponse({ message: "server" }, { status: 500 }));
const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
await expect(
client.requestRaw({ method: "GET", path: "/meta" })
).rejects.toBeInstanceOf(APIError);
});
it("uses plain text bodies when json parsing is not possible", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(
textResponse("plain text", {
status: 200,
headers: { "content-type": "text/plain" },
})
);
const client = new HttpClient({ apiKey: "test" });
const response = await client.requestRaw({
method: "GET",
path: "/info",
});
expect(response.data).toBe("plain text");
});
it("keeps invalid json error bodies as API errors", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(
textResponse("{invalid", {
status: 500,
headers: { "content-type": "application/json", "x-request-id": "req-500" },
})
);
const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
await expect(
client.requestRaw({ method: "GET", path: "/meta" })
).rejects.toMatchObject({
name: "APIError",
statusCode: 500,
requestId: "req-500",
responseBody: "{invalid",
});
});
it("sends raw string bodies without additional json encoding", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(jsonResponse("ok", { status: 200 }));
const client = new HttpClient({ apiKey: "test" });
await client.requestRaw({
method: "POST",
path: "/meta",
data: '{"pre":"serialized"}',
headers: { "Content-Type": "application/custom+json" },
});
const [, init] = getFetchCall(fetchMock);
expect(init?.body).toBe('{"pre":"serialized"}');
expect(toHeaderRecord(init?.headers)).toMatchObject({
"content-type": "application/custom+json",
});
});
it("preserves explicit user-agent headers", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(jsonResponse({ ok: true }, { status: 200 }));
const client = new HttpClient({ apiKey: "test" });
await client.requestRaw({
method: "GET",
path: "/meta",
headers: { "User-Agent": "custom-agent" },
});
const [, init] = getFetchCall(fetchMock);
expect(toHeaderRecord(init?.headers)).toMatchObject({
"user-agent": "custom-agent",
});
});
it("logs requests and responses when enableLogging is true", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(jsonResponse({ ok: true }, { status: 200 }));
const consoleInfo = vi.spyOn(console, "info").mockImplementation(() => {});
const client = new HttpClient({ apiKey: "test", enableLogging: true });
await client.requestRaw({ method: "GET", path: "/meta" });
expect(consoleInfo).toHaveBeenCalledWith(
expect.stringContaining("dify-client-node response 200 GET")
);
});
it("logs retry attempts when enableLogging is true", async () => {
const fetchMock = stubFetch();
fetchMock
.mockRejectedValueOnce(Object.assign(new Error("timeout"), { name: "AbortError" }))
.mockResolvedValueOnce(jsonResponse("ok", { status: 200 }));
const consoleInfo = vi.spyOn(console, "info").mockImplementation(() => {});
const client = new HttpClient({
apiKey: "test",
maxRetries: 1,
retryDelay: 0,
enableLogging: true,
});
await client.requestRaw({ method: "GET", path: "/meta" });
expect(consoleInfo).toHaveBeenCalledWith(
expect.stringContaining("dify-client-node retry")
);
});
});

View File

@@ -1,4 +1,11 @@
import { Readable } from "node:stream";
import axios from "axios";
import type {
AxiosError,
AxiosInstance,
AxiosRequestConfig,
AxiosResponse,
} from "axios";
import type { Readable } from "node:stream";
import {
DEFAULT_BASE_URL,
DEFAULT_MAX_RETRIES,
@@ -6,69 +13,36 @@ import {
DEFAULT_TIMEOUT_SECONDS,
} from "../types/common";
import type {
BinaryStream,
DifyClientConfig,
DifyResponse,
DifyStream,
Headers,
JsonValue,
QueryParams,
RequestMethod,
} from "../types/common";
import type { DifyError } from "../errors/dify-error";
import {
APIError,
AuthenticationError,
DifyError,
FileUploadError,
NetworkError,
RateLimitError,
TimeoutError,
ValidationError,
} from "../errors/dify-error";
import type { SdkFormData } from "./form-data";
import { getFormDataHeaders, isFormData } from "./form-data";
import { createBinaryStream, createSseStream } from "./sse";
import { getRetryDelayMs, shouldRetry, sleep } from "./retry";
import { validateParams } from "../client/validation";
import { hasStringProperty, isRecord } from "../internal/type-guards";
const DEFAULT_USER_AGENT = "dify-client-node";
export type HttpResponseType = "json" | "bytes" | "stream" | "arraybuffer";
export type HttpRequestBody =
| JsonValue
| Readable
| SdkFormData
| URLSearchParams
| ArrayBuffer
| ArrayBufferView
| Blob
| string
| null;
export type ResponseDataFor<TResponseType extends HttpResponseType> =
TResponseType extends "stream"
? Readable
: TResponseType extends "bytes" | "arraybuffer"
? Buffer
: JsonValue | string | null;
export type RawHttpResponse<TData = unknown> = {
data: TData;
status: number;
headers: Headers;
requestId?: string;
url: string;
};
export type RequestOptions<TResponseType extends HttpResponseType = "json"> = {
export type RequestOptions = {
method: RequestMethod;
path: string;
query?: QueryParams;
data?: HttpRequestBody;
data?: unknown;
headers?: Headers;
responseType?: TResponseType;
responseType?: AxiosRequestConfig["responseType"];
};
export type HttpClientSettings = Required<
@@ -77,23 +51,6 @@ export type HttpClientSettings = Required<
apiKey: string;
};
type FetchRequestInit = RequestInit & {
duplex?: "half";
};
type PreparedRequestBody = {
body?: BodyInit | null;
headers: Headers;
duplex?: "half";
replayable: boolean;
};
type TimeoutContext = {
cleanup: () => void;
reason: Error;
signal: AbortSignal;
};
const normalizeSettings = (config: DifyClientConfig): HttpClientSettings => ({
apiKey: config.apiKey,
baseUrl: config.baseUrl ?? DEFAULT_BASE_URL,
@@ -103,10 +60,19 @@ const normalizeSettings = (config: DifyClientConfig): HttpClientSettings => ({
enableLogging: config.enableLogging ?? false,
});
const normalizeHeaders = (headers: globalThis.Headers): Headers => {
const normalizeHeaders = (headers: AxiosResponse["headers"]): Headers => {
const result: Headers = {};
headers.forEach((value, key) => {
result[key.toLowerCase()] = value;
if (!headers) {
return result;
}
Object.entries(headers).forEach(([key, value]) => {
if (Array.isArray(value)) {
result[key.toLowerCase()] = value.join(", ");
} else if (typeof value === "string") {
result[key.toLowerCase()] = value;
} else if (typeof value === "number") {
result[key.toLowerCase()] = value.toString();
}
});
return result;
};
@@ -114,18 +80,9 @@ const normalizeHeaders = (headers: globalThis.Headers): Headers => {
const resolveRequestId = (headers: Headers): string | undefined =>
headers["x-request-id"] ?? headers["x-requestid"];
const buildRequestUrl = (
baseUrl: string,
path: string,
query?: QueryParams
): string => {
const buildRequestUrl = (baseUrl: string, path: string): string => {
const trimmed = baseUrl.replace(/\/+$/, "");
const url = new URL(`${trimmed}${path}`);
const queryString = buildQueryString(query);
if (queryString) {
url.search = queryString;
}
return url.toString();
return `${trimmed}${path}`;
};
const buildQueryString = (params?: QueryParams): string => {
@@ -164,53 +121,24 @@ const parseRetryAfterSeconds = (headerValue?: string): number | undefined => {
return undefined;
};
const isPipeableStream = (value: unknown): value is { pipe: (destination: unknown) => unknown } => {
const isReadableStream = (value: unknown): value is Readable => {
if (!value || typeof value !== "object") {
return false;
}
return typeof (value as { pipe?: unknown }).pipe === "function";
};
const toNodeReadable = (value: unknown): Readable | null => {
if (value instanceof Readable) {
return value;
const isUploadLikeRequest = (config?: AxiosRequestConfig): boolean => {
const url = (config?.url ?? "").toLowerCase();
if (!url) {
return false;
}
if (!isPipeableStream(value)) {
return null;
}
const readable = new Readable({
read() {},
});
return readable.wrap(value as NodeJS.ReadableStream);
};
const isBinaryBody = (
value: unknown
): value is ArrayBuffer | ArrayBufferView | Blob => {
if (value instanceof Blob) {
return true;
}
if (value instanceof ArrayBuffer) {
return true;
}
return ArrayBuffer.isView(value);
};
const isJsonBody = (value: unknown): value is Exclude<JsonValue, string> =>
value === null ||
typeof value === "boolean" ||
typeof value === "number" ||
Array.isArray(value) ||
isRecord(value);
const isUploadLikeRequest = (path: string): boolean => {
const normalizedPath = path.toLowerCase();
return (
normalizedPath.includes("upload") ||
normalizedPath.includes("/files/") ||
normalizedPath.includes("audio-to-text") ||
normalizedPath.includes("create_by_file") ||
normalizedPath.includes("update_by_file")
url.includes("upload") ||
url.includes("/files/") ||
url.includes("audio-to-text") ||
url.includes("create_by_file") ||
url.includes("update_by_file")
);
};
@@ -218,242 +146,88 @@ const resolveErrorMessage = (status: number, responseBody: unknown): string => {
if (typeof responseBody === "string" && responseBody.trim().length > 0) {
return responseBody;
}
if (hasStringProperty(responseBody, "message")) {
const message = responseBody.message.trim();
if (message.length > 0) {
if (
responseBody &&
typeof responseBody === "object" &&
"message" in responseBody
) {
const message = (responseBody as Record<string, unknown>).message;
if (typeof message === "string" && message.trim().length > 0) {
return message;
}
}
return `Request failed with status code ${status}`;
};
const parseJsonLikeText = (
value: string,
contentType?: string | null
): JsonValue | string | null => {
if (value.length === 0) {
return null;
}
const shouldParseJson =
contentType?.includes("application/json") === true ||
contentType?.includes("+json") === true;
if (!shouldParseJson) {
try {
return JSON.parse(value) as JsonValue;
} catch {
return value;
}
}
return JSON.parse(value) as JsonValue;
};
const mapAxiosError = (error: unknown): DifyError => {
if (axios.isAxiosError(error)) {
const axiosError = error as AxiosError;
if (axiosError.response) {
const status = axiosError.response.status;
const headers = normalizeHeaders(axiosError.response.headers);
const requestId = resolveRequestId(headers);
const responseBody = axiosError.response.data;
const message = resolveErrorMessage(status, responseBody);
const prepareRequestBody = (
method: RequestMethod,
data: HttpRequestBody | undefined
): PreparedRequestBody => {
if (method === "GET" || data === undefined) {
return {
body: undefined,
headers: {},
replayable: true,
};
}
if (isFormData(data)) {
if ("getHeaders" in data && typeof data.getHeaders === "function") {
const readable = toNodeReadable(data);
if (!readable) {
throw new FileUploadError(
"Legacy FormData must be a readable stream when used with fetch"
);
if (status === 401) {
return new AuthenticationError(message, {
statusCode: status,
responseBody,
requestId,
});
}
return {
body: Readable.toWeb(readable) as BodyInit,
headers: getFormDataHeaders(data),
duplex: "half",
replayable: false,
};
if (status === 429) {
const retryAfter = parseRetryAfterSeconds(headers["retry-after"]);
return new RateLimitError(message, {
statusCode: status,
responseBody,
requestId,
retryAfter,
});
}
if (status === 422) {
return new ValidationError(message, {
statusCode: status,
responseBody,
requestId,
});
}
if (status === 400) {
if (isUploadLikeRequest(axiosError.config)) {
return new FileUploadError(message, {
statusCode: status,
responseBody,
requestId,
});
}
}
return new APIError(message, {
statusCode: status,
responseBody,
requestId,
});
}
return {
body: data as BodyInit,
headers: getFormDataHeaders(data),
replayable: true,
};
}
if (typeof data === "string") {
return {
body: data,
headers: {},
replayable: true,
};
}
const readable = toNodeReadable(data);
if (readable) {
return {
body: Readable.toWeb(readable) as BodyInit,
headers: {},
duplex: "half",
replayable: false,
};
}
if (data instanceof URLSearchParams || isBinaryBody(data)) {
const body =
ArrayBuffer.isView(data) && !(data instanceof Uint8Array)
? new Uint8Array(data.buffer, data.byteOffset, data.byteLength)
: data;
return {
body: body as BodyInit,
headers: {},
replayable: true,
};
}
if (isJsonBody(data)) {
return {
body: JSON.stringify(data),
headers: {
"Content-Type": "application/json",
},
replayable: true,
};
}
throw new ValidationError("Unsupported request body type");
};
const createTimeoutContext = (timeoutMs: number): TimeoutContext => {
const controller = new AbortController();
const reason = new Error("Request timed out");
const timer = setTimeout(() => {
controller.abort(reason);
}, timeoutMs);
return {
signal: controller.signal,
reason,
cleanup: () => {
clearTimeout(timer);
},
};
};
const parseResponseBody = async <TResponseType extends HttpResponseType>(
response: Response,
responseType: TResponseType
): Promise<ResponseDataFor<TResponseType>> => {
if (responseType === "stream") {
if (!response.body) {
throw new NetworkError("Response body is empty");
if (axiosError.code === "ECONNABORTED") {
return new TimeoutError("Request timed out", { cause: axiosError });
}
return Readable.fromWeb(
response.body as unknown as Parameters<typeof Readable.fromWeb>[0]
) as ResponseDataFor<TResponseType>;
return new NetworkError(axiosError.message, { cause: axiosError });
}
if (responseType === "bytes" || responseType === "arraybuffer") {
const bytes = Buffer.from(await response.arrayBuffer());
return bytes as ResponseDataFor<TResponseType>;
}
if (response.status === 204 || response.status === 205 || response.status === 304) {
return null as ResponseDataFor<TResponseType>;
}
const text = await response.text();
try {
return parseJsonLikeText(
text,
response.headers.get("content-type")
) as ResponseDataFor<TResponseType>;
} catch (error) {
if (!response.ok && error instanceof SyntaxError) {
return text as ResponseDataFor<TResponseType>;
}
throw error;
}
};
const mapHttpError = (
response: RawHttpResponse,
path: string
): DifyError => {
const status = response.status;
const responseBody = response.data;
const message = resolveErrorMessage(status, responseBody);
if (status === 401) {
return new AuthenticationError(message, {
statusCode: status,
responseBody,
requestId: response.requestId,
});
}
if (status === 429) {
const retryAfter = parseRetryAfterSeconds(response.headers["retry-after"]);
return new RateLimitError(message, {
statusCode: status,
responseBody,
requestId: response.requestId,
retryAfter,
});
}
if (status === 422) {
return new ValidationError(message, {
statusCode: status,
responseBody,
requestId: response.requestId,
});
}
if (status === 400 && isUploadLikeRequest(path)) {
return new FileUploadError(message, {
statusCode: status,
responseBody,
requestId: response.requestId,
});
}
return new APIError(message, {
statusCode: status,
responseBody,
requestId: response.requestId,
});
};
const mapTransportError = (
error: unknown,
timeoutContext: TimeoutContext
): DifyError => {
if (error instanceof DifyError) {
return error;
}
if (
timeoutContext.signal.aborted &&
timeoutContext.signal.reason === timeoutContext.reason
) {
return new TimeoutError("Request timed out", { cause: error });
}
if (error instanceof Error) {
if (error.name === "AbortError" || error.name === "TimeoutError") {
return new TimeoutError("Request timed out", { cause: error });
}
return new NetworkError(error.message, { cause: error });
}
return new NetworkError("Unexpected network error", { cause: error });
};
export class HttpClient {
private axios: AxiosInstance;
private settings: HttpClientSettings;
constructor(config: DifyClientConfig) {
this.settings = normalizeSettings(config);
this.axios = axios.create({
baseURL: this.settings.baseUrl,
timeout: this.settings.timeout * 1000,
});
}
updateApiKey(apiKey: string): void {
@@ -464,123 +238,118 @@ export class HttpClient {
return { ...this.settings };
}
async request<
T,
TResponseType extends HttpResponseType = "json",
>(options: RequestOptions<TResponseType>): Promise<DifyResponse<T>> {
async request<T>(options: RequestOptions): Promise<DifyResponse<T>> {
const response = await this.requestRaw(options);
const headers = normalizeHeaders(response.headers);
return {
data: response.data as T,
status: response.status,
headers: response.headers,
requestId: response.requestId,
headers,
requestId: resolveRequestId(headers),
};
}
async requestStream<T>(options: RequestOptions): Promise<DifyStream<T>> {
async requestStream<T>(options: RequestOptions) {
const response = await this.requestRaw({
...options,
responseType: "stream",
});
return createSseStream<T>(response.data, {
const headers = normalizeHeaders(response.headers);
return createSseStream<T>(response.data as Readable, {
status: response.status,
headers: response.headers,
requestId: response.requestId,
headers,
requestId: resolveRequestId(headers),
});
}
async requestBinaryStream(options: RequestOptions): Promise<BinaryStream> {
async requestBinaryStream(options: RequestOptions) {
const response = await this.requestRaw({
...options,
responseType: "stream",
});
return createBinaryStream(response.data, {
const headers = normalizeHeaders(response.headers);
return createBinaryStream(response.data as Readable, {
status: response.status,
headers: response.headers,
requestId: response.requestId,
headers,
requestId: resolveRequestId(headers),
});
}
async requestRaw<TResponseType extends HttpResponseType = "json">(
options: RequestOptions<TResponseType>
): Promise<RawHttpResponse<ResponseDataFor<TResponseType>>> {
const responseType = options.responseType ?? "json";
const { method, path, query, data, headers } = options;
const { apiKey, enableLogging, maxRetries, retryDelay, timeout } = this.settings;
async requestRaw(options: RequestOptions): Promise<AxiosResponse> {
const { method, path, query, data, headers, responseType } = options;
const { apiKey, enableLogging, maxRetries, retryDelay, timeout } =
this.settings;
if (query) {
validateParams(query as Record<string, unknown>);
}
if (isRecord(data) && !Array.isArray(data) && !isFormData(data) && !isPipeableStream(data)) {
validateParams(data);
if (
data &&
typeof data === "object" &&
!Array.isArray(data) &&
!isFormData(data) &&
!isReadableStream(data)
) {
validateParams(data as Record<string, unknown>);
}
const url = buildRequestUrl(this.settings.baseUrl, path, query);
const requestHeaders: Headers = {
Authorization: `Bearer ${apiKey}`,
...headers,
};
if (
typeof process !== "undefined" &&
!!process.versions?.node &&
!requestHeaders["User-Agent"] &&
!requestHeaders["user-agent"]
) {
requestHeaders["User-Agent"] = DEFAULT_USER_AGENT;
}
if (isFormData(data)) {
Object.assign(requestHeaders, getFormDataHeaders(data));
} else if (data && method !== "GET") {
requestHeaders["Content-Type"] = "application/json";
}
const url = buildRequestUrl(this.settings.baseUrl, path);
if (enableLogging) {
console.info(`dify-client-node request ${method} ${url}`);
}
const axiosConfig: AxiosRequestConfig = {
method,
url: path,
params: query,
paramsSerializer: {
serialize: (params) => buildQueryString(params as QueryParams),
},
headers: requestHeaders,
responseType: responseType ?? "json",
timeout: timeout * 1000,
};
if (method !== "GET" && data !== undefined) {
axiosConfig.data = data;
}
let attempt = 0;
// `attempt` is a zero-based retry counter
// Total attempts = 1 (initial) + maxRetries
// e.g., maxRetries=3 means: attempt 0 (initial), then retries at 1, 2, 3
while (true) {
const preparedBody = prepareRequestBody(method, data);
const requestHeaders: Headers = {
Authorization: `Bearer ${apiKey}`,
...preparedBody.headers,
...headers,
};
if (
typeof process !== "undefined" &&
!!process.versions?.node &&
!requestHeaders["User-Agent"] &&
!requestHeaders["user-agent"]
) {
requestHeaders["User-Agent"] = DEFAULT_USER_AGENT;
}
const timeoutContext = createTimeoutContext(timeout * 1000);
const requestInit: FetchRequestInit = {
method,
headers: requestHeaders,
body: preparedBody.body,
signal: timeoutContext.signal,
};
if (preparedBody.duplex) {
requestInit.duplex = preparedBody.duplex;
}
try {
const fetchResponse = await fetch(url, requestInit);
const responseHeaders = normalizeHeaders(fetchResponse.headers);
const parsedBody =
(await parseResponseBody(fetchResponse, responseType)) as ResponseDataFor<TResponseType>;
const response: RawHttpResponse<ResponseDataFor<TResponseType>> = {
data: parsedBody,
status: fetchResponse.status,
headers: responseHeaders,
requestId: resolveRequestId(responseHeaders),
url,
};
if (!fetchResponse.ok) {
throw mapHttpError(response, path);
}
const response = await this.axios.request(axiosConfig);
if (enableLogging) {
console.info(
`dify-client-node response ${response.status} ${method} ${url}`
);
}
return response;
} catch (error) {
const mapped = mapTransportError(error, timeoutContext);
const shouldRetryRequest =
preparedBody.replayable && shouldRetry(mapped, attempt, maxRetries);
if (!shouldRetryRequest) {
const mapped = mapAxiosError(error);
if (!shouldRetry(mapped, attempt, maxRetries)) {
throw mapped;
}
const retryAfterSeconds =
@@ -593,8 +362,6 @@ export class HttpClient {
}
attempt += 1;
await sleep(delay);
} finally {
timeoutContext.cleanup();
}
}
}

View File

@@ -1,4 +1,4 @@
import { describe, expect, it, vi } from "vitest";
import { describe, expect, it } from "vitest";
import { getFormDataHeaders, isFormData } from "./form-data";
describe("form-data helpers", () => {
@@ -11,15 +11,9 @@ describe("form-data helpers", () => {
expect(isFormData({})).toBe(false);
});
it("detects native FormData", () => {
const form = new FormData();
form.append("field", "value");
expect(isFormData(form)).toBe(true);
});
it("returns headers from form-data", () => {
const formLike = {
append: vi.fn(),
append: () => {},
getHeaders: () => ({ "content-type": "multipart/form-data" }),
};
expect(getFormDataHeaders(formLike)).toEqual({

View File

@@ -1,25 +1,19 @@
import type { Headers } from "../types/common";
type FormDataAppendValue = Blob | string;
export type WebFormData = FormData;
export type LegacyNodeFormData = {
append: (name: string, value: FormDataAppendValue, fileName?: string) => void;
getHeaders: () => Headers;
export type FormDataLike = {
append: (...args: unknown[]) => void;
getHeaders?: () => Headers;
constructor?: { name?: string };
};
export type SdkFormData = WebFormData | LegacyNodeFormData;
export const isFormData = (value: unknown): value is SdkFormData => {
export const isFormData = (value: unknown): value is FormDataLike => {
if (!value || typeof value !== "object") {
return false;
}
if (typeof FormData !== "undefined" && value instanceof FormData) {
return true;
}
const candidate = value as Partial<LegacyNodeFormData>;
const candidate = value as FormDataLike;
if (typeof candidate.append !== "function") {
return false;
}
@@ -29,8 +23,8 @@ export const isFormData = (value: unknown): value is SdkFormData => {
return candidate.constructor?.name === "FormData";
};
export const getFormDataHeaders = (form: SdkFormData): Headers => {
if ("getHeaders" in form && typeof form.getHeaders === "function") {
export const getFormDataHeaders = (form: FormDataLike): Headers => {
if (typeof form.getHeaders === "function") {
return form.getHeaders();
}
return {};

View File

@@ -2,7 +2,7 @@ import { describe, expect, it } from "vitest";
import { getRetryDelayMs, shouldRetry } from "./retry";
import { NetworkError, RateLimitError, TimeoutError } from "../errors/dify-error";
const withMockedRandom = (value: number, fn: () => void): void => {
const withMockedRandom = (value, fn) => {
const original = Math.random;
Math.random = () => value;
try {

View File

@@ -6,10 +6,10 @@ describe("sse parsing", () => {
it("parses event and data lines", async () => {
const stream = Readable.from([
"event: message\n",
'data: {"answer":"hi"}\n',
"data: {\"answer\":\"hi\"}\n",
"\n",
]);
const events: Array<{ event?: string; data: unknown; raw: string }> = [];
const events = [];
for await (const event of parseSseStream(stream)) {
events.push(event);
}
@@ -20,7 +20,7 @@ describe("sse parsing", () => {
it("handles multi-line data payloads", async () => {
const stream = Readable.from(["data: line1\n", "data: line2\n", "\n"]);
const events: Array<{ event?: string; data: unknown; raw: string }> = [];
const events = [];
for await (const event of parseSseStream(stream)) {
events.push(event);
}
@@ -28,28 +28,10 @@ describe("sse parsing", () => {
expect(events[0].data).toBe("line1\nline2");
});
it("ignores comments and flushes the last event without a trailing separator", async () => {
const stream = Readable.from([
Buffer.from(": keep-alive\n"),
Uint8Array.from(Buffer.from('event: message\ndata: {"delta":"hi"}\n')),
]);
const events: Array<{ event?: string; data: unknown; raw: string }> = [];
for await (const event of parseSseStream(stream)) {
events.push(event);
}
expect(events).toEqual([
{
event: "message",
data: { delta: "hi" },
raw: '{"delta":"hi"}',
},
]);
});
it("createSseStream exposes toText", async () => {
const stream = Readable.from([
'data: {"answer":"hello"}\n\n',
'data: {"delta":" world"}\n\n',
"data: {\"answer\":\"hello\"}\n\n",
"data: {\"delta\":\" world\"}\n\n",
]);
const sseStream = createSseStream(stream, {
status: 200,
@@ -90,6 +72,5 @@ describe("sse parsing", () => {
});
expect(binary.status).toBe(200);
expect(binary.headers["content-type"]).toBe("audio/mpeg");
expect(binary.toReadable()).toBe(stream);
});
});

View File

@@ -1,29 +1,12 @@
import type { Readable } from "node:stream";
import { StringDecoder } from "node:string_decoder";
import type {
BinaryStream,
DifyStream,
Headers,
JsonValue,
StreamEvent,
} from "../types/common";
import { isRecord } from "../internal/type-guards";
const toBufferChunk = (chunk: unknown): Buffer => {
if (Buffer.isBuffer(chunk)) {
return chunk;
}
if (chunk instanceof Uint8Array) {
return Buffer.from(chunk);
}
return Buffer.from(String(chunk));
};
import type { BinaryStream, DifyStream, Headers, StreamEvent } from "../types/common";
const readLines = async function* (stream: Readable): AsyncIterable<string> {
const decoder = new StringDecoder("utf8");
let buffered = "";
for await (const chunk of stream) {
buffered += decoder.write(toBufferChunk(chunk));
buffered += decoder.write(chunk as Buffer);
let index = buffered.indexOf("\n");
while (index >= 0) {
let line = buffered.slice(0, index);
@@ -41,12 +24,12 @@ const readLines = async function* (stream: Readable): AsyncIterable<string> {
}
};
const parseMaybeJson = (value: string): JsonValue | string | null => {
const parseMaybeJson = (value: string): unknown => {
if (!value) {
return null;
}
try {
return JSON.parse(value) as JsonValue;
return JSON.parse(value);
} catch {
return value;
}
@@ -98,17 +81,18 @@ const extractTextFromEvent = (data: unknown): string => {
if (typeof data === "string") {
return data;
}
if (!isRecord(data)) {
if (!data || typeof data !== "object") {
return "";
}
if (typeof data.answer === "string") {
return data.answer;
const record = data as Record<string, unknown>;
if (typeof record.answer === "string") {
return record.answer;
}
if (typeof data.text === "string") {
return data.text;
if (typeof record.text === "string") {
return record.text;
}
if (typeof data.delta === "string") {
return data.delta;
if (typeof record.delta === "string") {
return record.delta;
}
return "";
};

View File

@@ -0,0 +1,227 @@
import { afterAll, beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
import { ChatClient, DifyClient, WorkflowClient, BASE_URL, routes } from "./index";
import axios from "axios";
const mockRequest = vi.fn();
const setupAxiosMock = () => {
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
};
beforeEach(() => {
vi.restoreAllMocks();
mockRequest.mockReset();
setupAxiosMock();
});
describe("Client", () => {
it("should create a client", () => {
new DifyClient("test");
expect(axios.create).toHaveBeenCalledWith({
baseURL: BASE_URL,
timeout: 60000,
});
});
it("should update the api key", () => {
const difyClient = new DifyClient("test");
difyClient.updateApiKey("test2");
expect(difyClient.getHttpClient().getSettings().apiKey).toBe("test2");
});
});
describe("Send Requests", () => {
it("should make a successful request to the application parameter", async () => {
const difyClient = new DifyClient("test");
const method = "GET";
const endpoint = routes.application.url();
mockRequest.mockResolvedValue({
status: 200,
data: "response",
headers: {},
});
await difyClient.sendRequest(method, endpoint);
const requestConfig = mockRequest.mock.calls[0][0];
expect(requestConfig).toMatchObject({
method,
url: endpoint,
params: undefined,
responseType: "json",
timeout: 60000,
});
expect(requestConfig.headers.Authorization).toBe("Bearer test");
});
it("uses the getMeta route configuration", async () => {
const difyClient = new DifyClient("test");
mockRequest.mockResolvedValue({ status: 200, data: "ok", headers: {} });
await difyClient.getMeta("end-user");
expect(mockRequest).toHaveBeenCalledWith(expect.objectContaining({
method: routes.getMeta.method,
url: routes.getMeta.url(),
params: { user: "end-user" },
headers: expect.objectContaining({
Authorization: "Bearer test",
}),
responseType: "json",
timeout: 60000,
}));
});
});
describe("File uploads", () => {
const OriginalFormData = globalThis.FormData;
beforeAll(() => {
globalThis.FormData = class FormDataMock {
append() {}
getHeaders() {
return {
"content-type": "multipart/form-data; boundary=test",
};
}
};
});
afterAll(() => {
globalThis.FormData = OriginalFormData;
});
it("does not override multipart boundary headers for FormData", async () => {
const difyClient = new DifyClient("test");
const form = new globalThis.FormData();
mockRequest.mockResolvedValue({ status: 200, data: "ok", headers: {} });
await difyClient.fileUpload(form, "end-user");
expect(mockRequest).toHaveBeenCalledWith(expect.objectContaining({
method: routes.fileUpload.method,
url: routes.fileUpload.url(),
params: undefined,
headers: expect.objectContaining({
Authorization: "Bearer test",
"content-type": "multipart/form-data; boundary=test",
}),
responseType: "json",
timeout: 60000,
data: form,
}));
});
});
describe("Workflow client", () => {
it("uses tasks stop path for workflow stop", async () => {
const workflowClient = new WorkflowClient("test");
mockRequest.mockResolvedValue({ status: 200, data: "stopped", headers: {} });
await workflowClient.stop("task-1", "end-user");
expect(mockRequest).toHaveBeenCalledWith(expect.objectContaining({
method: routes.stopWorkflow.method,
url: routes.stopWorkflow.url("task-1"),
params: undefined,
headers: expect.objectContaining({
Authorization: "Bearer test",
"Content-Type": "application/json",
}),
responseType: "json",
timeout: 60000,
data: { user: "end-user" },
}));
});
it("maps workflow log filters to service api params", async () => {
const workflowClient = new WorkflowClient("test");
mockRequest.mockResolvedValue({ status: 200, data: "ok", headers: {} });
await workflowClient.getLogs({
createdAtAfter: "2024-01-01T00:00:00Z",
createdAtBefore: "2024-01-02T00:00:00Z",
createdByEndUserSessionId: "sess-1",
createdByAccount: "acc-1",
page: 2,
limit: 10,
});
expect(mockRequest).toHaveBeenCalledWith(expect.objectContaining({
method: "GET",
url: "/workflows/logs",
params: {
created_at__after: "2024-01-01T00:00:00Z",
created_at__before: "2024-01-02T00:00:00Z",
created_by_end_user_session_id: "sess-1",
created_by_account: "acc-1",
page: 2,
limit: 10,
},
headers: expect.objectContaining({
Authorization: "Bearer test",
}),
responseType: "json",
timeout: 60000,
}));
});
});
describe("Chat client", () => {
it("places user in query for suggested messages", async () => {
const chatClient = new ChatClient("test");
mockRequest.mockResolvedValue({ status: 200, data: "ok", headers: {} });
await chatClient.getSuggested("msg-1", "end-user");
expect(mockRequest).toHaveBeenCalledWith(expect.objectContaining({
method: routes.getSuggested.method,
url: routes.getSuggested.url("msg-1"),
params: { user: "end-user" },
headers: expect.objectContaining({
Authorization: "Bearer test",
}),
responseType: "json",
timeout: 60000,
}));
});
it("uses last_id when listing conversations", async () => {
const chatClient = new ChatClient("test");
mockRequest.mockResolvedValue({ status: 200, data: "ok", headers: {} });
await chatClient.getConversations("end-user", "last-1", 10);
expect(mockRequest).toHaveBeenCalledWith(expect.objectContaining({
method: routes.getConversations.method,
url: routes.getConversations.url(),
params: { user: "end-user", last_id: "last-1", limit: 10 },
headers: expect.objectContaining({
Authorization: "Bearer test",
}),
responseType: "json",
timeout: 60000,
}));
});
it("lists app feedbacks without user params", async () => {
const chatClient = new ChatClient("test");
mockRequest.mockResolvedValue({ status: 200, data: "ok", headers: {} });
await chatClient.getAppFeedbacks(1, 20);
expect(mockRequest).toHaveBeenCalledWith(expect.objectContaining({
method: "GET",
url: "/app/feedbacks",
params: { page: 1, limit: 20 },
headers: expect.objectContaining({
Authorization: "Bearer test",
}),
responseType: "json",
timeout: 60000,
}));
});
});

View File

@@ -1,240 +0,0 @@
import { Readable } from "node:stream";
import { afterAll, beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
import { BASE_URL, ChatClient, DifyClient, WorkflowClient, routes } from "./index";
const stubFetch = (): ReturnType<typeof vi.fn> => {
const fetchMock = vi.fn();
vi.stubGlobal("fetch", fetchMock);
return fetchMock;
};
const jsonResponse = (body: unknown, init: ResponseInit = {}): Response =>
new Response(JSON.stringify(body), {
status: 200,
...init,
headers: {
"content-type": "application/json",
...(init.headers ?? {}),
},
});
describe("Client", () => {
beforeEach(() => {
vi.restoreAllMocks();
vi.unstubAllGlobals();
});
it("creates a client with default settings", () => {
const difyClient = new DifyClient("test");
expect(difyClient.getHttpClient().getSettings()).toMatchObject({
apiKey: "test",
baseUrl: BASE_URL,
timeout: 60,
});
});
it("updates the api key", () => {
const difyClient = new DifyClient("test");
difyClient.updateApiKey("test2");
expect(difyClient.getHttpClient().getSettings().apiKey).toBe("test2");
});
});
describe("Send Requests", () => {
beforeEach(() => {
vi.restoreAllMocks();
vi.unstubAllGlobals();
});
it("makes a successful request to the application parameter route", async () => {
const fetchMock = stubFetch();
const difyClient = new DifyClient("test");
const method = "GET";
const endpoint = routes.application.url();
fetchMock.mockResolvedValueOnce(jsonResponse("response"));
const response = await difyClient.sendRequest(method, endpoint);
expect(response).toMatchObject({
status: 200,
data: "response",
headers: {
"content-type": "application/json",
},
});
const [url, init] = fetchMock.mock.calls[0] as [string, RequestInit];
expect(url).toBe(`${BASE_URL}${endpoint}`);
expect(init.method).toBe(method);
expect(init.headers).toMatchObject({
Authorization: "Bearer test",
"User-Agent": "dify-client-node",
});
});
it("uses the getMeta route configuration", async () => {
const fetchMock = stubFetch();
const difyClient = new DifyClient("test");
fetchMock.mockResolvedValueOnce(jsonResponse({ ok: true }));
await difyClient.getMeta("end-user");
const [url, init] = fetchMock.mock.calls[0] as [string, RequestInit];
expect(url).toBe(`${BASE_URL}${routes.getMeta.url()}?user=end-user`);
expect(init.method).toBe(routes.getMeta.method);
expect(init.headers).toMatchObject({
Authorization: "Bearer test",
});
});
});
describe("File uploads", () => {
const OriginalFormData = globalThis.FormData;
beforeAll(() => {
globalThis.FormData = class FormDataMock extends Readable {
constructor() {
super();
}
_read() {}
append() {}
getHeaders() {
return {
"content-type": "multipart/form-data; boundary=test",
};
}
} as unknown as typeof FormData;
});
afterAll(() => {
globalThis.FormData = OriginalFormData;
});
beforeEach(() => {
vi.restoreAllMocks();
vi.unstubAllGlobals();
});
it("does not override multipart boundary headers for legacy FormData", async () => {
const fetchMock = stubFetch();
const difyClient = new DifyClient("test");
const form = new globalThis.FormData();
fetchMock.mockResolvedValueOnce(jsonResponse({ ok: true }));
await difyClient.fileUpload(form, "end-user");
const [url, init] = fetchMock.mock.calls[0] as [string, RequestInit];
expect(url).toBe(`${BASE_URL}${routes.fileUpload.url()}`);
expect(init.method).toBe(routes.fileUpload.method);
expect(init.headers).toMatchObject({
Authorization: "Bearer test",
"content-type": "multipart/form-data; boundary=test",
});
expect(init.body).not.toBe(form);
expect((init as RequestInit & { duplex?: string }).duplex).toBe("half");
});
});
describe("Workflow client", () => {
beforeEach(() => {
vi.restoreAllMocks();
vi.unstubAllGlobals();
});
it("uses tasks stop path for workflow stop", async () => {
const fetchMock = stubFetch();
const workflowClient = new WorkflowClient("test");
fetchMock.mockResolvedValueOnce(jsonResponse({ result: "success" }));
await workflowClient.stop("task-1", "end-user");
const [url, init] = fetchMock.mock.calls[0] as [string, RequestInit];
expect(url).toBe(`${BASE_URL}${routes.stopWorkflow.url("task-1")}`);
expect(init.method).toBe(routes.stopWorkflow.method);
expect(init.headers).toMatchObject({
Authorization: "Bearer test",
"Content-Type": "application/json",
});
expect(init.body).toBe(JSON.stringify({ user: "end-user" }));
});
it("maps workflow log filters to service api params", async () => {
const fetchMock = stubFetch();
const workflowClient = new WorkflowClient("test");
fetchMock.mockResolvedValueOnce(jsonResponse({ ok: true }));
await workflowClient.getLogs({
createdAtAfter: "2024-01-01T00:00:00Z",
createdAtBefore: "2024-01-02T00:00:00Z",
createdByEndUserSessionId: "sess-1",
createdByAccount: "acc-1",
page: 2,
limit: 10,
});
const [url] = fetchMock.mock.calls[0] as [string, RequestInit];
const parsedUrl = new URL(url);
expect(parsedUrl.origin + parsedUrl.pathname).toBe(`${BASE_URL}/workflows/logs`);
expect(parsedUrl.searchParams.get("created_at__before")).toBe(
"2024-01-02T00:00:00Z"
);
expect(parsedUrl.searchParams.get("created_at__after")).toBe(
"2024-01-01T00:00:00Z"
);
expect(parsedUrl.searchParams.get("created_by_end_user_session_id")).toBe(
"sess-1"
);
expect(parsedUrl.searchParams.get("created_by_account")).toBe("acc-1");
expect(parsedUrl.searchParams.get("page")).toBe("2");
expect(parsedUrl.searchParams.get("limit")).toBe("10");
});
});
describe("Chat client", () => {
beforeEach(() => {
vi.restoreAllMocks();
vi.unstubAllGlobals();
});
it("places user in query for suggested messages", async () => {
const fetchMock = stubFetch();
const chatClient = new ChatClient("test");
fetchMock.mockResolvedValueOnce(jsonResponse({ result: "success", data: [] }));
await chatClient.getSuggested("msg-1", "end-user");
const [url, init] = fetchMock.mock.calls[0] as [string, RequestInit];
expect(url).toBe(`${BASE_URL}${routes.getSuggested.url("msg-1")}?user=end-user`);
expect(init.method).toBe(routes.getSuggested.method);
expect(init.headers).toMatchObject({
Authorization: "Bearer test",
});
});
it("uses last_id when listing conversations", async () => {
const fetchMock = stubFetch();
const chatClient = new ChatClient("test");
fetchMock.mockResolvedValueOnce(jsonResponse({ ok: true }));
await chatClient.getConversations("end-user", "last-1", 10);
const [url] = fetchMock.mock.calls[0] as [string, RequestInit];
expect(url).toBe(`${BASE_URL}${routes.getConversations.url()}?user=end-user&last_id=last-1&limit=10`);
});
it("lists app feedbacks without user params", async () => {
const fetchMock = stubFetch();
const chatClient = new ChatClient("test");
fetchMock.mockResolvedValueOnce(jsonResponse({ data: [] }));
await chatClient.getAppFeedbacks(1, 20);
const [url] = fetchMock.mock.calls[0] as [string, RequestInit];
expect(url).toBe(`${BASE_URL}/app/feedbacks?page=1&limit=20`);
});
});

View File

@@ -1,9 +0,0 @@
export const isRecord = (value: unknown): value is Record<string, unknown> =>
typeof value === "object" && value !== null;
export const hasStringProperty = <
TKey extends string,
>(
value: unknown,
key: TKey
): value is Record<TKey, string> => isRecord(value) && typeof value[key] === "string";

View File

@@ -15,5 +15,4 @@ export type AnnotationListOptions = {
keyword?: string;
};
export type AnnotationResponse = JsonObject;
import type { JsonObject } from "./common";
export type AnnotationResponse = Record<string, unknown>;

View File

@@ -1,28 +1,17 @@
import type {
DifyRequestFile,
JsonObject,
ResponseMode,
StreamEvent,
} from "./common";
import type { StreamEvent } from "./common";
export type ChatMessageRequest = {
inputs?: JsonObject;
inputs?: Record<string, unknown>;
query: string;
user: string;
response_mode?: ResponseMode;
files?: DifyRequestFile[] | null;
response_mode?: "blocking" | "streaming";
files?: Array<Record<string, unknown>> | null;
conversation_id?: string;
auto_generate_name?: boolean;
workflow_id?: string;
retriever_from?: "app" | "dataset";
};
export type ChatMessageResponse = JsonObject;
export type ChatMessageResponse = Record<string, unknown>;
export type ChatStreamEvent = StreamEvent<JsonObject>;
export type ConversationSortBy =
| "created_at"
| "-created_at"
| "updated_at"
| "-updated_at";
export type ChatStreamEvent = StreamEvent<Record<string, unknown>>;

View File

@@ -1,18 +1,9 @@
import type { Readable } from "node:stream";
export const DEFAULT_BASE_URL = "https://api.dify.ai/v1";
export const DEFAULT_TIMEOUT_SECONDS = 60;
export const DEFAULT_MAX_RETRIES = 3;
export const DEFAULT_RETRY_DELAY_SECONDS = 1;
export type RequestMethod = "GET" | "POST" | "PATCH" | "PUT" | "DELETE";
export type ResponseMode = "blocking" | "streaming";
export type JsonPrimitive = string | number | boolean | null;
export type JsonValue = JsonPrimitive | JsonObject | JsonArray;
export type JsonObject = {
[key: string]: JsonValue;
};
export type JsonArray = JsonValue[];
export type QueryParamValue =
| string
@@ -24,13 +15,6 @@ export type QueryParamValue =
export type QueryParams = Record<string, QueryParamValue>;
export type Headers = Record<string, string>;
export type DifyRequestFile = JsonObject;
export type SuccessResponse = {
result: "success";
};
export type SuggestedQuestionsResponse = SuccessResponse & {
data: string[];
};
export type DifyClientConfig = {
apiKey: string;
@@ -70,18 +54,18 @@ export type StreamEvent<T = unknown> = {
};
export type DifyStream<T = unknown> = AsyncIterable<StreamEvent<T>> & {
data: Readable;
data: NodeJS.ReadableStream;
status: number;
headers: Headers;
requestId?: string;
toText(): Promise<string>;
toReadable(): Readable;
toReadable(): NodeJS.ReadableStream;
};
export type BinaryStream = {
data: Readable;
data: NodeJS.ReadableStream;
status: number;
headers: Headers;
requestId?: string;
toReadable(): Readable;
toReadable(): NodeJS.ReadableStream;
};

View File

@@ -1,18 +1,13 @@
import type {
DifyRequestFile,
JsonObject,
ResponseMode,
StreamEvent,
} from "./common";
import type { StreamEvent } from "./common";
export type CompletionRequest = {
inputs?: JsonObject;
response_mode?: ResponseMode;
inputs?: Record<string, unknown>;
response_mode?: "blocking" | "streaming";
user: string;
files?: DifyRequestFile[] | null;
files?: Array<Record<string, unknown>> | null;
retriever_from?: "app" | "dataset";
};
export type CompletionResponse = JsonObject;
export type CompletionResponse = Record<string, unknown>;
export type CompletionStreamEvent = StreamEvent<JsonObject>;
export type CompletionStreamEvent = StreamEvent<Record<string, unknown>>;

View File

@@ -14,7 +14,7 @@ export type DatasetCreateRequest = {
external_knowledge_api_id?: string | null;
provider?: string;
external_knowledge_id?: string | null;
retrieval_model?: JsonObject | null;
retrieval_model?: Record<string, unknown> | null;
embedding_model?: string | null;
embedding_model_provider?: string | null;
};
@@ -26,9 +26,9 @@ export type DatasetUpdateRequest = {
permission?: string | null;
embedding_model?: string | null;
embedding_model_provider?: string | null;
retrieval_model?: JsonObject | null;
retrieval_model?: Record<string, unknown> | null;
partial_member_list?: Array<Record<string, string>> | null;
external_retrieval_model?: JsonObject | null;
external_retrieval_model?: Record<string, unknown> | null;
external_knowledge_id?: string | null;
external_knowledge_api_id?: string | null;
};
@@ -61,12 +61,12 @@ export type DatasetTagUnbindingRequest = {
export type DocumentTextCreateRequest = {
name: string;
text: string;
process_rule?: JsonObject | null;
process_rule?: Record<string, unknown> | null;
original_document_id?: string | null;
doc_form?: string;
doc_language?: string;
indexing_technique?: string | null;
retrieval_model?: JsonObject | null;
retrieval_model?: Record<string, unknown> | null;
embedding_model?: string | null;
embedding_model_provider?: string | null;
};
@@ -74,10 +74,10 @@ export type DocumentTextCreateRequest = {
export type DocumentTextUpdateRequest = {
name?: string | null;
text?: string | null;
process_rule?: JsonObject | null;
process_rule?: Record<string, unknown> | null;
doc_form?: string;
doc_language?: string;
retrieval_model?: JsonObject | null;
retrieval_model?: Record<string, unknown> | null;
};
export type DocumentListOptions = {
@@ -92,7 +92,7 @@ export type DocumentGetOptions = {
};
export type SegmentCreateRequest = {
segments: JsonObject[];
segments: Array<Record<string, unknown>>;
};
export type SegmentUpdateRequest = {
@@ -155,8 +155,8 @@ export type MetadataOperationRequest = {
export type HitTestingRequest = {
query?: string | null;
retrieval_model?: JsonObject | null;
external_retrieval_model?: JsonObject | null;
retrieval_model?: Record<string, unknown> | null;
external_retrieval_model?: Record<string, unknown> | null;
attachment_ids?: string[] | null;
};
@@ -165,21 +165,20 @@ export type DatasourcePluginListOptions = {
};
export type DatasourceNodeRunRequest = {
inputs: JsonObject;
inputs: Record<string, unknown>;
datasource_type: string;
credential_id?: string | null;
is_published: boolean;
};
export type PipelineRunRequest = {
inputs: JsonObject;
inputs: Record<string, unknown>;
datasource_type: string;
datasource_info_list: JsonObject[];
datasource_info_list: Array<Record<string, unknown>>;
start_node_id: string;
is_published: boolean;
response_mode: ResponseMode;
response_mode: "streaming" | "blocking";
};
export type KnowledgeBaseResponse = JsonObject;
export type PipelineStreamEvent = JsonObject;
import type { JsonObject, ResponseMode } from "./common";
export type KnowledgeBaseResponse = Record<string, unknown>;
export type PipelineStreamEvent = Record<string, unknown>;

View File

@@ -1,17 +1,12 @@
import type {
DifyRequestFile,
JsonObject,
ResponseMode,
StreamEvent,
} from "./common";
import type { StreamEvent } from "./common";
export type WorkflowRunRequest = {
inputs?: JsonObject;
inputs?: Record<string, unknown>;
user: string;
response_mode?: ResponseMode;
files?: DifyRequestFile[] | null;
response_mode?: "blocking" | "streaming";
files?: Array<Record<string, unknown>> | null;
};
export type WorkflowRunResponse = JsonObject;
export type WorkflowRunResponse = Record<string, unknown>;
export type WorkflowStreamEvent = StreamEvent<JsonObject>;
export type WorkflowStreamEvent = StreamEvent<Record<string, unknown>>;

View File

@@ -1,4 +1,2 @@
import type { JsonObject } from "./common";
export type WorkspaceModelType = string;
export type WorkspaceModelsResponse = JsonObject;
export type WorkspaceModelsResponse = Record<string, unknown>;

View File

@@ -1,137 +0,0 @@
import { createServer } from "node:http";
import { Readable } from "node:stream";
import type { AddressInfo } from "node:net";
import { afterAll, beforeAll, describe, expect, it } from "vitest";
import { HttpClient } from "../src/http/client";
const readBody = async (stream: NodeJS.ReadableStream): Promise<Buffer> => {
const chunks: Buffer[] = [];
for await (const chunk of stream) {
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
}
return Buffer.concat(chunks);
};
describe("HttpClient integration", () => {
const requests: Array<{
url: string;
method: string;
headers: Record<string, string | string[] | undefined>;
body: Buffer;
}> = [];
const server = createServer((req, res) => {
void (async () => {
const body = await readBody(req);
requests.push({
url: req.url ?? "",
method: req.method ?? "",
headers: req.headers,
body,
});
if (req.url?.startsWith("/json")) {
res.writeHead(200, { "content-type": "application/json", "x-request-id": "req-json" });
res.end(JSON.stringify({ ok: true }));
return;
}
if (req.url === "/stream") {
res.writeHead(200, { "content-type": "text/event-stream" });
res.end('data: {"answer":"hello"}\n\ndata: {"delta":" world"}\n\n');
return;
}
if (req.url === "/bytes") {
res.writeHead(200, { "content-type": "application/octet-stream" });
res.end(Buffer.from([1, 2, 3, 4]));
return;
}
if (req.url === "/upload-stream") {
res.writeHead(200, { "content-type": "application/json" });
res.end(JSON.stringify({ received: body.toString("utf8") }));
return;
}
res.writeHead(404, { "content-type": "application/json" });
res.end(JSON.stringify({ message: "not found" }));
})();
});
let client: HttpClient;
beforeAll(async () => {
await new Promise<void>((resolve) => {
server.listen(0, "127.0.0.1", () => resolve());
});
const address = server.address() as AddressInfo;
client = new HttpClient({
apiKey: "test-key",
baseUrl: `http://127.0.0.1:${address.port}`,
maxRetries: 0,
retryDelay: 0,
});
});
afterAll(async () => {
await new Promise<void>((resolve, reject) => {
server.close((error) => {
if (error) {
reject(error);
return;
}
resolve();
});
});
});
it("uses real fetch for query serialization and json bodies", async () => {
const response = await client.request({
method: "POST",
path: "/json",
query: { tag_ids: ["a", "b"], limit: 2 },
data: { user: "u" },
});
expect(response.requestId).toBe("req-json");
expect(response.data).toEqual({ ok: true });
expect(requests.at(-1)).toMatchObject({
url: "/json?tag_ids=a&tag_ids=b&limit=2",
method: "POST",
});
expect(requests.at(-1)?.headers.authorization).toBe("Bearer test-key");
expect(requests.at(-1)?.headers["content-type"]).toBe("application/json");
expect(requests.at(-1)?.body.toString("utf8")).toBe(JSON.stringify({ user: "u" }));
});
it("supports streaming request bodies with duplex fetch", async () => {
const response = await client.request<{ received: string }>({
method: "POST",
path: "/upload-stream",
data: Readable.from(["hello ", "world"]),
});
expect(response.data).toEqual({ received: "hello world" });
expect(requests.at(-1)?.body.toString("utf8")).toBe("hello world");
});
it("parses real sse responses into text", async () => {
const stream = await client.requestStream({
method: "GET",
path: "/stream",
});
await expect(stream.toText()).resolves.toBe("hello world");
});
it("parses real byte responses into buffers", async () => {
const response = await client.request<Buffer, "bytes">({
method: "GET",
path: "/bytes",
responseType: "bytes",
});
expect(Array.from(response.data.values())).toEqual([1, 2, 3, 4]);
});
});

View File

@@ -0,0 +1,30 @@
import axios from "axios";
import { vi } from "vitest";
import { HttpClient } from "../src/http/client";
export const createHttpClient = (configOverrides = {}) => {
const mockRequest = vi.fn();
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test", ...configOverrides });
return { client, mockRequest };
};
export const createHttpClientWithSpies = (configOverrides = {}) => {
const { client, mockRequest } = createHttpClient(configOverrides);
const request = vi
.spyOn(client, "request")
.mockResolvedValue({ data: "ok", status: 200, headers: {} });
const requestStream = vi
.spyOn(client, "requestStream")
.mockResolvedValue({ data: null });
const requestBinaryStream = vi
.spyOn(client, "requestBinaryStream")
.mockResolvedValue({ data: null });
return {
client,
mockRequest,
request,
requestStream,
requestBinaryStream,
};
};

View File

@@ -1,48 +0,0 @@
import { vi } from "vitest";
import { HttpClient } from "../src/http/client";
import type { DifyClientConfig, DifyResponse } from "../src/types/common";
type FetchMock = ReturnType<typeof vi.fn>;
type RequestSpy = ReturnType<typeof vi.fn>;
type HttpClientWithFetchMock = {
client: HttpClient;
fetchMock: FetchMock;
};
type HttpClientWithSpies = HttpClientWithFetchMock & {
request: RequestSpy;
requestStream: RequestSpy;
requestBinaryStream: RequestSpy;
};
export const createHttpClient = (
configOverrides: Partial<DifyClientConfig> = {}
): HttpClientWithFetchMock => {
const fetchMock = vi.fn();
vi.stubGlobal("fetch", fetchMock);
const client = new HttpClient({ apiKey: "test", ...configOverrides });
return { client, fetchMock };
};
export const createHttpClientWithSpies = (
configOverrides: Partial<DifyClientConfig> = {}
): HttpClientWithSpies => {
const { client, fetchMock } = createHttpClient(configOverrides);
const request = vi
.spyOn(client, "request")
.mockResolvedValue({ data: "ok", status: 200, headers: {} } as DifyResponse<string>);
const requestStream = vi
.spyOn(client, "requestStream")
.mockResolvedValue({ data: null, status: 200, headers: {} } as never);
const requestBinaryStream = vi
.spyOn(client, "requestBinaryStream")
.mockResolvedValue({ data: null, status: 200, headers: {} } as never);
return {
client,
fetchMock,
request,
requestStream,
requestBinaryStream,
};
};

View File

@@ -3,7 +3,7 @@
"target": "ES2022",
"module": "ESNext",
"moduleResolution": "Bundler",
"rootDir": ".",
"rootDir": "src",
"outDir": "dist",
"declaration": true,
"declarationMap": true,
@@ -13,5 +13,5 @@
"forceConsistentCasingInFileNames": true,
"skipLibCheck": true
},
"include": ["src/**/*.ts", "tests/**/*.ts"]
"include": ["src/**/*.ts"]
}

View File

@@ -3,7 +3,7 @@ import { defineConfig } from "vitest/config";
export default defineConfig({
test: {
environment: "node",
include: ["**/*.test.ts"],
include: ["**/*.test.js"],
coverage: {
provider: "v8",
reporter: ["text", "text-summary"],

View File

@@ -1,11 +0,0 @@
import Evaluation from '@/app/components/evaluation'
const Page = async (props: {
params: Promise<{ appId: string }>
}) => {
const { appId } = await props.params
return <Evaluation resourceType="workflow" resourceId={appId} />
}
export default Page

View File

@@ -7,8 +7,6 @@ import {
RiDashboard2Line,
RiFileList3Fill,
RiFileList3Line,
RiFlaskFill,
RiFlaskLine,
RiTerminalBoxFill,
RiTerminalBoxLine,
RiTerminalWindowFill,
@@ -69,47 +67,40 @@ const AppDetailLayout: FC<IAppDetailLayoutProps> = (props) => {
}>>([])
const getNavigationConfig = useCallback((appId: string, isCurrentWorkspaceEditor: boolean, mode: AppModeEnum) => {
const navConfig = []
if (isCurrentWorkspaceEditor) {
navConfig.push({
name: t('appMenus.promptEng', { ns: 'common' }),
href: `/app/${appId}/${(mode === AppModeEnum.WORKFLOW || mode === AppModeEnum.ADVANCED_CHAT) ? 'workflow' : 'configuration'}`,
icon: RiTerminalWindowLine,
selectedIcon: RiTerminalWindowFill,
})
navConfig.push({
name: t('appMenus.evaluation', { ns: 'common' }),
href: `/app/${appId}/evaluation`,
icon: RiFlaskLine,
selectedIcon: RiFlaskFill,
})
}
navConfig.push({
name: t('appMenus.apiAccess', { ns: 'common' }),
href: `/app/${appId}/develop`,
icon: RiTerminalBoxLine,
selectedIcon: RiTerminalBoxFill,
})
if (isCurrentWorkspaceEditor) {
navConfig.push({
name: mode !== AppModeEnum.WORKFLOW
? t('appMenus.logAndAnn', { ns: 'common' })
: t('appMenus.logs', { ns: 'common' }),
href: `/app/${appId}/logs`,
icon: RiFileList3Line,
selectedIcon: RiFileList3Fill,
})
}
navConfig.push({
name: t('appMenus.overview', { ns: 'common' }),
href: `/app/${appId}/overview`,
icon: RiDashboard2Line,
selectedIcon: RiDashboard2Fill,
})
const navConfig = [
...(isCurrentWorkspaceEditor
? [{
name: t('appMenus.promptEng', { ns: 'common' }),
href: `/app/${appId}/${(mode === AppModeEnum.WORKFLOW || mode === AppModeEnum.ADVANCED_CHAT) ? 'workflow' : 'configuration'}`,
icon: RiTerminalWindowLine,
selectedIcon: RiTerminalWindowFill,
}]
: []
),
{
name: t('appMenus.apiAccess', { ns: 'common' }),
href: `/app/${appId}/develop`,
icon: RiTerminalBoxLine,
selectedIcon: RiTerminalBoxFill,
},
...(isCurrentWorkspaceEditor
? [{
name: mode !== AppModeEnum.WORKFLOW
? t('appMenus.logAndAnn', { ns: 'common' })
: t('appMenus.logs', { ns: 'common' }),
href: `/app/${appId}/logs`,
icon: RiFileList3Line,
selectedIcon: RiFileList3Fill,
}]
: []
),
{
name: t('appMenus.overview', { ns: 'common' }),
href: `/app/${appId}/overview`,
icon: RiDashboard2Line,
selectedIcon: RiDashboard2Fill,
},
]
return navConfig
}, [t])

View File

@@ -1,11 +0,0 @@
import Evaluation from '@/app/components/evaluation'
const Page = async (props: {
params: Promise<{ datasetId: string }>
}) => {
const { datasetId } = await props.params
return <Evaluation resourceType="pipeline" resourceId={datasetId} />
}
export default Page

View File

@@ -6,8 +6,6 @@ import {
RiEqualizer2Line,
RiFileTextFill,
RiFileTextLine,
RiFlaskFill,
RiFlaskLine,
RiFocus2Fill,
RiFocus2Line,
} from '@remixicon/react'
@@ -88,30 +86,20 @@ const DatasetDetailLayout: FC<IAppDetailLayoutProps> = (props) => {
]
if (datasetRes?.provider !== 'external') {
return [
{
name: t('datasetMenus.documents', { ns: 'common' }),
href: `/datasets/${datasetId}/documents`,
icon: RiFileTextLine,
selectedIcon: RiFileTextFill,
disabled: isButtonDisabledWithPipeline,
},
{
name: t('datasetMenus.pipeline', { ns: 'common' }),
href: `/datasets/${datasetId}/pipeline`,
icon: PipelineLine as RemixiconComponentType,
selectedIcon: PipelineFill as RemixiconComponentType,
disabled: false,
},
{
name: t('datasetMenus.evaluation', { ns: 'common' }),
href: `/datasets/${datasetId}/evaluation`,
icon: RiFlaskLine,
selectedIcon: RiFlaskFill,
disabled: false,
},
...baseNavigation,
]
baseNavigation.unshift({
name: t('datasetMenus.pipeline', { ns: 'common' }),
href: `/datasets/${datasetId}/pipeline`,
icon: PipelineLine as RemixiconComponentType,
selectedIcon: PipelineFill as RemixiconComponentType,
disabled: false,
})
baseNavigation.unshift({
name: t('datasetMenus.documents', { ns: 'common' }),
href: `/datasets/${datasetId}/documents`,
icon: RiFileTextLine,
selectedIcon: RiFileTextFill,
disabled: isButtonDisabledWithPipeline,
})
}
return baseNavigation

Some files were not shown because too many files have changed in this diff Show More