Compare commits

..

6 Commits

Author SHA1 Message Date
yessenia
e93372de48 feat: show tool readme info 2025-09-08 20:20:55 +08:00
Stream
e981bf21a5 feat: add API endpoint to extract plugin assets 2025-08-27 20:12:18 +08:00
Stream
a015f05aea feat: add API endpoint to extract plugin assets 2025-08-27 20:03:59 +08:00
Stream
11f4743624 feat: adapt to plugin_daemon endpoint 2025-08-27 16:12:40 +08:00
Stream
7db77cf9f8 Merge branch 'main' into feat/plugin-readme 2025-08-27 11:35:04 +08:00
Harry
19c10f9075 feat: add PluginReadmeApi to fetch plugin readme information 2025-08-22 16:38:48 +08:00
2063 changed files with 20003 additions and 68526 deletions

View File

@@ -1,19 +0,0 @@
{
"permissions": {
"allow": [],
"deny": []
},
"env": {
"__comment": "Environment variables for MCP servers. Override in .claude/settings.local.json with actual values.",
"GITHUB_PERSONAL_ACCESS_TOKEN": "ghp_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
},
"enabledMcpjsonServers": [
"context7",
"sequential-thinking",
"github",
"fetch",
"playwright",
"ide"
],
"enableAllProjectMcpServers": true
}

View File

@@ -1,6 +1,6 @@
#!/bin/bash
corepack enable
npm add -g pnpm@10.15.0
cd web && pnpm install
pipx install uv

34
.github/actions/setup-uv/action.yml vendored Normal file
View File

@@ -0,0 +1,34 @@
name: Setup UV and Python
inputs:
python-version:
description: Python version to use and the UV installed with
required: true
default: '3.12'
uv-version:
description: UV version to set up
required: true
default: '0.8.9'
uv-lockfile:
description: Path to the UV lockfile to restore cache from
required: true
default: ''
enable-cache:
required: true
default: true
runs:
using: composite
steps:
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ inputs.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v6
with:
version: ${{ inputs.uv-version }}
python-version: ${{ inputs.python-version }}
enable-cache: ${{ inputs.enable-cache }}
cache-dependency-glob: ${{ inputs.uv-lockfile }}

View File

@@ -1,12 +0,0 @@
version: 2
updates:
- package-ecosystem: "npm"
directory: "/web"
schedule:
interval: "weekly"
open-pull-requests-limit: 2
- package-ecosystem: "uv"
directory: "/api"
schedule:
interval: "weekly"
open-pull-requests-limit: 2

View File

@@ -1,7 +1,13 @@
name: Run Pytest
on:
workflow_call:
pull_request:
branches:
- main
paths:
- api/**
- docker/**
- .github/workflows/api-tests.yml
concurrency:
group: api-tests-${{ github.head_ref || github.run_id }}
@@ -27,11 +33,10 @@ jobs:
persist-credentials: false
- name: Setup UV and Python
uses: astral-sh/setup-uv@v6
uses: ./.github/actions/setup-uv
with:
enable-cache: true
python-version: ${{ matrix.python-version }}
cache-dependency-glob: api/uv.lock
uv-lockfile: api/uv.lock
- name: Check UV lockfile
run: uv lock --project api --check
@@ -42,7 +47,11 @@ jobs:
- name: Run Unit tests
run: |
uv run --project api bash dev/pytest/pytest_unit_tests.sh
- name: Run ty check
run: |
cd api
uv add --dev ty
uv run ty check || true
- name: Run pyrefly check
run: |
cd api
@@ -62,6 +71,15 @@ jobs:
- name: Run dify config tests
run: uv run --project api dev/pytest/pytest_config_tests.py
- name: MyPy Cache
uses: actions/cache@v4
with:
path: api/.mypy_cache
key: mypy-${{ matrix.python-version }}-${{ runner.os }}-${{ hashFiles('api/uv.lock') }}
- name: Run MyPy Checks
run: dev/mypy-check
- name: Set up dotenvs
run: |
cp docker/.env.example docker/.env

View File

@@ -1,7 +1,9 @@
name: autofix.ci
on:
workflow_call:
pull_request:
branches: ["main"]
push:
branches: [ "main" ]
permissions:
contents: read
@@ -13,67 +15,18 @@ jobs:
- uses: actions/checkout@v4
# Use uv to ensure we have the same ruff version in CI and locally.
- uses: astral-sh/setup-uv@v6
with:
python-version: "3.12"
- uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f
- run: |
cd api
uv sync --dev
# Fix lint errors
uv run ruff check --fix .
uv run ruff check --fix-only .
# Format code
uv run ruff format ..
uv run ruff format .
- name: ast-grep
run: |
uvx --from ast-grep-cli sg --pattern 'db.session.query($WHATEVER).filter($HERE)' --rewrite 'db.session.query($WHATEVER).where($HERE)' -l py --update-all
uvx --from ast-grep-cli sg --pattern 'session.query($WHATEVER).filter($HERE)' --rewrite 'session.query($WHATEVER).where($HERE)' -l py --update-all
# Convert Optional[T] to T | None (ignoring quoted types)
cat > /tmp/optional-rule.yml << 'EOF'
id: convert-optional-to-union
language: python
rule:
kind: generic_type
all:
- has:
kind: identifier
pattern: Optional
- has:
kind: type_parameter
has:
kind: type
pattern: $T
fix: $T | None
EOF
uvx --from ast-grep-cli sg scan --inline-rules "$(cat /tmp/optional-rule.yml)" --update-all
# Fix forward references that were incorrectly converted (Python doesn't support "Type" | None syntax)
find . -name "*.py" -type f -exec sed -i.bak -E 's/"([^"]+)" \| None/Optional["\1"]/g; s/'"'"'([^'"'"']+)'"'"' \| None/Optional['"'"'\1'"'"']/g' {} \;
find . -name "*.py.bak" -type f -delete
- name: mdformat
run: |
uvx mdformat .
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
package_json_file: web/package.json
run_install: false
- name: Setup NodeJS
uses: actions/setup-node@v4
with:
node-version: 22
cache: pnpm
cache-dependency-path: ./web/package.json
- name: Web dependencies
working-directory: ./web
run: pnpm install --frozen-lockfile
- name: oxlint
working-directory: ./web
run: |
pnpx oxlint --fix
- uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27

View File

@@ -1,7 +1,13 @@
name: DB Migration Test
on:
workflow_call:
pull_request:
branches:
- main
- plugins/beta
paths:
- api/migrations/**
- .github/workflows/db-migration-test.yml
concurrency:
group: db-migration-test-${{ github.ref }}
@@ -19,20 +25,12 @@ jobs:
persist-credentials: false
- name: Setup UV and Python
uses: astral-sh/setup-uv@v6
uses: ./.github/actions/setup-uv
with:
enable-cache: true
python-version: "3.12"
cache-dependency-glob: api/uv.lock
uv-lockfile: api/uv.lock
- name: Install dependencies
run: uv sync --project api
- name: Ensure Offline migration are supported
run: |
# upgrade
uv run --directory api flask db upgrade 'base:head' --sql
# downgrade
uv run --directory api flask db downgrade 'head:base' --sql
- name: Prepare middleware env
run: |

View File

@@ -19,23 +19,11 @@ jobs:
github.event.workflow_run.head_branch == 'deploy/enterprise'
steps:
- name: trigger deployments
env:
DEV_ENV_ADDRS: ${{ vars.DEV_ENV_ADDRS }}
DEPLOY_SECRET: ${{ secrets.DEPLOY_SECRET }}
run: |
IFS=',' read -ra ENDPOINTS <<< "${DEV_ENV_ADDRS:-}"
BODY='{"project":"dify-api","tag":"deploy-enterprise"}'
for ENDPOINT in "${ENDPOINTS[@]}"; do
ENDPOINT="$(echo "$ENDPOINT" | xargs)"
[ -z "$ENDPOINT" ] && continue
API_SIGNATURE=$(printf '%s' "$BODY" | openssl dgst -sha256 -hmac "$DEPLOY_SECRET" | awk '{print "sha256="$2}')
curl -sSf -X POST \
-H "Content-Type: application/json" \
-H "X-Hub-Signature-256: $API_SIGNATURE" \
-d "$BODY" \
"$ENDPOINT"
done
- name: Deploy to server
uses: appleboy/ssh-action@v0.1.8
with:
host: ${{ secrets.ENTERPRISE_SSH_HOST }}
username: ${{ secrets.ENTERPRISE_SSH_USER }}
password: ${{ secrets.ENTERPRISE_SSH_PASSWORD }}
script: |
${{ vars.ENTERPRISE_SSH_SCRIPT || secrets.ENTERPRISE_SSH_SCRIPT }}

View File

@@ -1,78 +0,0 @@
name: Main CI Pipeline
on:
pull_request:
branches: ["main"]
push:
branches: ["main"]
permissions:
contents: write
pull-requests: write
checks: write
statuses: write
concurrency:
group: main-ci-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
jobs:
# Check which paths were changed to determine which tests to run
check-changes:
name: Check Changed Files
runs-on: ubuntu-latest
outputs:
api-changed: ${{ steps.changes.outputs.api }}
web-changed: ${{ steps.changes.outputs.web }}
vdb-changed: ${{ steps.changes.outputs.vdb }}
migration-changed: ${{ steps.changes.outputs.migration }}
steps:
- uses: actions/checkout@v4
- uses: dorny/paths-filter@v3
id: changes
with:
filters: |
api:
- 'api/**'
- 'docker/**'
- '.github/workflows/api-tests.yml'
web:
- 'web/**'
vdb:
- 'api/core/rag/datasource/**'
- 'docker/**'
- '.github/workflows/vdb-tests.yml'
- 'api/uv.lock'
- 'api/pyproject.toml'
migration:
- 'api/migrations/**'
- '.github/workflows/db-migration-test.yml'
# Run tests in parallel
api-tests:
name: API Tests
needs: check-changes
if: needs.check-changes.outputs.api-changed == 'true'
uses: ./.github/workflows/api-tests.yml
web-tests:
name: Web Tests
needs: check-changes
if: needs.check-changes.outputs.web-changed == 'true'
uses: ./.github/workflows/web-tests.yml
style-check:
name: Style Check
uses: ./.github/workflows/style.yml
vdb-tests:
name: VDB Tests
needs: check-changes
if: needs.check-changes.outputs.vdb-changed == 'true'
uses: ./.github/workflows/vdb-tests.yml
db-migration-test:
name: DB Migration Test
needs: check-changes
if: needs.check-changes.outputs.migration-changed == 'true'
uses: ./.github/workflows/db-migration-test.yml

View File

@@ -1,7 +1,9 @@
name: Style check
on:
workflow_call:
pull_request:
branches:
- main
concurrency:
group: style-${{ github.head_ref || github.run_id }}
@@ -34,28 +36,30 @@ jobs:
- name: Setup UV and Python
if: steps.changed-files.outputs.any_changed == 'true'
uses: astral-sh/setup-uv@v6
uses: ./.github/actions/setup-uv
with:
uv-lockfile: api/uv.lock
enable-cache: false
python-version: "3.12"
cache-dependency-glob: api/uv.lock
- name: Install dependencies
if: steps.changed-files.outputs.any_changed == 'true'
run: uv sync --project api --dev
- name: Run Basedpyright Checks
- name: Ruff check
if: steps.changed-files.outputs.any_changed == 'true'
run: dev/basedpyright-check
- name: Run Mypy Type Checks
if: steps.changed-files.outputs.any_changed == 'true'
run: uv --directory api run mypy --exclude-gitignore --exclude 'tests/' --exclude 'migrations/' --check-untyped-defs --disable-error-code=import-untyped .
run: |
uv run --directory api ruff --version
uv run --directory api ruff check ./
uv run --directory api ruff format --check ./
- name: Dotenv check
if: steps.changed-files.outputs.any_changed == 'true'
run: uv run --project api dotenv-linter ./api/.env.example ./web/.env.example
- name: Lint hints
if: failure()
run: echo "Please run 'dev/reformat' to fix the fixable linting errors."
web-style:
name: Web Style
runs-on: ubuntu-latest
@@ -97,9 +101,7 @@ jobs:
- name: Web style check
if: steps.changed-files.outputs.any_changed == 'true'
working-directory: ./web
run: |
pnpm run lint
pnpm run eslint
run: pnpm run lint
docker-compose-template:
name: Docker Compose Template

View File

@@ -67,22 +67,12 @@ jobs:
working-directory: ./web
run: pnpm run auto-gen-i18n ${{ env.FILE_ARGS }}
- name: Generate i18n type definitions
if: env.FILES_CHANGED == 'true'
working-directory: ./web
run: pnpm run gen:i18n-types
- name: Create Pull Request
if: env.FILES_CHANGED == 'true'
uses: peter-evans/create-pull-request@v6
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: Update i18n files and type definitions based on en-US changes
title: 'chore: translate i18n files and update type definitions'
body: |
This PR was automatically created to update i18n files and TypeScript type definitions based on changes in en-US locale.
**Changes included:**
- Updated translation files for all locales
- Regenerated TypeScript type definitions for type safety
commit-message: Update i18n files based on en-US changes
title: 'chore: translate i18n files'
body: This PR was automatically created to update i18n files based on changes in en-US locale.
branch: chore/automated-i18n-updates

View File

@@ -1,7 +1,15 @@
name: Run VDB Tests
on:
workflow_call:
pull_request:
branches:
- main
paths:
- api/core/rag/datasource/**
- docker/**
- .github/workflows/vdb-tests.yml
- api/uv.lock
- api/pyproject.toml
concurrency:
group: vdb-tests-${{ github.head_ref || github.run_id }}
@@ -31,11 +39,10 @@ jobs:
remove_tool_cache: true
- name: Setup UV and Python
uses: astral-sh/setup-uv@v6
uses: ./.github/actions/setup-uv
with:
enable-cache: true
python-version: ${{ matrix.python-version }}
cache-dependency-glob: api/uv.lock
uv-lockfile: api/uv.lock
- name: Check UV lockfile
run: uv lock --project api --check

View File

@@ -1,7 +1,11 @@
name: Web Tests
on:
workflow_call:
pull_request:
branches:
- main
paths:
- web/**
concurrency:
group: web-tests-${{ github.head_ref || github.run_id }}
@@ -47,11 +51,6 @@ jobs:
working-directory: ./web
run: pnpm install --frozen-lockfile
- name: Check i18n types synchronization
if: steps.changed-files.outputs.any_changed == 'true'
working-directory: ./web
run: pnpm run check:i18n-types
- name: Run tests
if: steps.changed-files.outputs.any_changed == 'true'
working-directory: ./web

17
.gitignore vendored
View File

@@ -123,12 +123,10 @@ venv.bak/
# mkdocs documentation
/site
# type checking
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
pyrightconfig.json
!api/pyrightconfig.json
# Pyre type checker
.pyre/
@@ -197,8 +195,8 @@ sdks/python-client/dify_client.egg-info
.vscode/*
!.vscode/launch.json.template
!.vscode/README.md
pyrightconfig.json
api/.vscode
web/.vscode
# vscode Code History Extension
.history
@@ -216,18 +214,7 @@ mise.toml
# Next.js build output
.next/
# PWA generated files
web/public/sw.js
web/public/sw.js.map
web/public/workbox-*.js
web/public/workbox-*.js.map
web/public/fallback-*.js
# AI Assistant
.roo/
api/.env.backup
/clickzetta
# Benchmark
scripts/stress-test/setup/config/
scripts/stress-test/reports/

View File

@@ -1,34 +0,0 @@
{
"mcpServers": {
"context7": {
"type": "http",
"url": "https://mcp.context7.com/mcp"
},
"sequential-thinking": {
"type": "stdio",
"command": "npx",
"args": ["-y", "@modelcontextprotocol/server-sequential-thinking"],
"env": {}
},
"github": {
"type": "stdio",
"command": "npx",
"args": ["-y", "@modelcontextprotocol/server-github"],
"env": {
"GITHUB_PERSONAL_ACCESS_TOKEN": "${GITHUB_PERSONAL_ACCESS_TOKEN}"
}
},
"fetch": {
"type": "stdio",
"command": "uvx",
"args": ["mcp-server-fetch"],
"env": {}
},
"playwright": {
"type": "stdio",
"command": "npx",
"args": ["-y", "@playwright/mcp@latest"],
"env": {}
}
}
}

View File

@@ -1,87 +0,0 @@
# AGENTS.md
## Project Overview
Dify is an open-source platform for developing LLM applications with an intuitive interface combining agentic AI workflows, RAG pipelines, agent capabilities, and model management.
The codebase consists of:
- **Backend API** (`/api`): Python Flask application with Domain-Driven Design architecture
- **Frontend Web** (`/web`): Next.js 15 application with TypeScript and React 19
- **Docker deployment** (`/docker`): Containerized deployment configurations
## Development Commands
### Backend (API)
All Python commands must be prefixed with `uv run --project api`:
```bash
# Start development servers
./dev/start-api # Start API server
./dev/start-worker # Start Celery worker
# Run tests
uv run --project api pytest # Run all tests
uv run --project api pytest tests/unit_tests/ # Unit tests only
uv run --project api pytest tests/integration_tests/ # Integration tests
# Code quality
./dev/reformat # Run all formatters and linters
uv run --project api ruff check --fix ./ # Fix linting issues
uv run --project api ruff format ./ # Format code
uv run --directory api basedpyright # Type checking
```
### Frontend (Web)
```bash
cd web
pnpm lint # Run ESLint
pnpm eslint-fix # Fix ESLint issues
pnpm test # Run Jest tests
```
## Testing Guidelines
### Backend Testing
- Use `pytest` for all backend tests
- Write tests first (TDD approach)
- Test structure: Arrange-Act-Assert
## Code Style Requirements
### Python
- Use type hints for all functions and class attributes
- No `Any` types unless absolutely necessary
- Implement special methods (`__repr__`, `__str__`) appropriately
### TypeScript/JavaScript
- Strict TypeScript configuration
- ESLint with Prettier integration
- Avoid `any` type
## Important Notes
- **Environment Variables**: Always use UV for Python commands: `uv run --project api <command>`
- **Comments**: Only write meaningful comments that explain "why", not "what"
- **File Creation**: Always prefer editing existing files over creating new ones
- **Documentation**: Don't create documentation files unless explicitly requested
- **Code Quality**: Always run `./dev/reformat` before committing backend changes
## Common Development Tasks
### Adding a New API Endpoint
1. Create controller in `/api/controllers/`
1. Add service logic in `/api/services/`
1. Update routes in controller's `__init__.py`
1. Write tests in `/api/tests/`
## Project-Specific Conventions
- All async tasks use Celery with Redis as broker
- **Internationalization**: Frontend supports multiple languages with English (`web/i18n/en-US/`) as the source. All user-facing text must use i18n keys, no hardcoded strings. Edit corresponding module files in `en-US/` directory for translations.

View File

@@ -1 +0,0 @@
AGENTS.md

88
CLAUDE.md Normal file
View File

@@ -0,0 +1,88 @@
# CLAUDE.md
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
## Project Overview
Dify is an open-source platform for developing LLM applications with an intuitive interface combining agentic AI workflows, RAG pipelines, agent capabilities, and model management.
The codebase consists of:
- **Backend API** (`/api`): Python Flask application with Domain-Driven Design architecture
- **Frontend Web** (`/web`): Next.js 15 application with TypeScript and React 19
- **Docker deployment** (`/docker`): Containerized deployment configurations
## Development Commands
### Backend (API)
All Python commands must be prefixed with `uv run --project api`:
```bash
# Start development servers
./dev/start-api # Start API server
./dev/start-worker # Start Celery worker
# Run tests
uv run --project api pytest # Run all tests
uv run --project api pytest tests/unit_tests/ # Unit tests only
uv run --project api pytest tests/integration_tests/ # Integration tests
# Code quality
./dev/reformat # Run all formatters and linters
uv run --project api ruff check --fix ./ # Fix linting issues
uv run --project api ruff format ./ # Format code
uv run --project api mypy . # Type checking
```
### Frontend (Web)
```bash
cd web
pnpm lint # Run ESLint
pnpm eslint-fix # Fix ESLint issues
pnpm test # Run Jest tests
```
## Testing Guidelines
### Backend Testing
- Use `pytest` for all backend tests
- Write tests first (TDD approach)
- Test structure: Arrange-Act-Assert
## Code Style Requirements
### Python
- Use type hints for all functions and class attributes
- No `Any` types unless absolutely necessary
- Implement special methods (`__repr__`, `__str__`) appropriately
### TypeScript/JavaScript
- Strict TypeScript configuration
- ESLint with Prettier integration
- Avoid `any` type
## Important Notes
- **Environment Variables**: Always use UV for Python commands: `uv run --project api <command>`
- **Comments**: Only write meaningful comments that explain "why", not "what"
- **File Creation**: Always prefer editing existing files over creating new ones
- **Documentation**: Don't create documentation files unless explicitly requested
- **Code Quality**: Always run `./dev/reformat` before committing backend changes
## Common Development Tasks
### Adding a New API Endpoint
1. Create controller in `/api/controllers/`
1. Add service logic in `/api/services/`
1. Update routes in controller's `__init__.py`
1. Write tests in `/api/tests/`
## Project-Specific Conventions
- All async tasks use Celery with Redis as broker

View File

@@ -4,72 +4,6 @@ WEB_IMAGE=$(DOCKER_REGISTRY)/dify-web
API_IMAGE=$(DOCKER_REGISTRY)/dify-api
VERSION=latest
# Default target - show help
.DEFAULT_GOAL := help
# Backend Development Environment Setup
.PHONY: dev-setup prepare-docker prepare-web prepare-api
# Dev setup target
dev-setup: prepare-docker prepare-web prepare-api
@echo "✅ Backend development environment setup complete!"
# Step 1: Prepare Docker middleware
prepare-docker:
@echo "🐳 Setting up Docker middleware..."
@cp -n docker/middleware.env.example docker/middleware.env 2>/dev/null || echo "Docker middleware.env already exists"
@cd docker && docker compose -f docker-compose.middleware.yaml --env-file middleware.env -p dify-middlewares-dev up -d
@echo "✅ Docker middleware started"
# Step 2: Prepare web environment
prepare-web:
@echo "🌐 Setting up web environment..."
@cp -n web/.env.example web/.env 2>/dev/null || echo "Web .env already exists"
@cd web && pnpm install
@cd web && pnpm build
@echo "✅ Web environment prepared (not started)"
# Step 3: Prepare API environment
prepare-api:
@echo "🔧 Setting up API environment..."
@cp -n api/.env.example api/.env 2>/dev/null || echo "API .env already exists"
@cd api && uv sync --dev
@cd api && uv run flask db upgrade
@echo "✅ API environment prepared (not started)"
# Clean dev environment
dev-clean:
@echo "⚠️ Stopping Docker containers..."
@cd docker && docker compose -f docker-compose.middleware.yaml --env-file middleware.env -p dify-middlewares-dev down
@echo "🗑️ Removing volumes..."
@rm -rf docker/volumes/db
@rm -rf docker/volumes/redis
@rm -rf docker/volumes/plugin_daemon
@rm -rf docker/volumes/weaviate
@rm -rf api/storage
@echo "✅ Cleanup complete"
# Backend Code Quality Commands
format:
@echo "🎨 Running ruff format..."
@uv run --project api --dev ruff format ./api
@echo "✅ Code formatting complete"
check:
@echo "🔍 Running ruff check..."
@uv run --project api --dev ruff check ./api
@echo "✅ Code check complete"
lint:
@echo "🔧 Running ruff format and check with fixes..."
@uv run --directory api --dev sh -c 'ruff format ./api && ruff check --fix ./api'
@echo "✅ Linting complete"
type-check:
@echo "📝 Running type check with basedpyright..."
@uv run --directory api --dev basedpyright
@echo "✅ Type check complete"
# Build Docker images
build-web:
@echo "Building web Docker image: $(WEB_IMAGE):$(VERSION)..."
@@ -105,27 +39,5 @@ build-push-web: build-web push-web
build-push-all: build-all push-all
@echo "All Docker images have been built and pushed."
# Help target
help:
@echo "Development Setup Targets:"
@echo " make dev-setup - Run all setup steps for backend dev environment"
@echo " make prepare-docker - Set up Docker middleware"
@echo " make prepare-web - Set up web environment"
@echo " make prepare-api - Set up API environment"
@echo " make dev-clean - Stop Docker middleware containers"
@echo ""
@echo "Backend Code Quality:"
@echo " make format - Format code with ruff"
@echo " make check - Check code with ruff"
@echo " make lint - Format and fix code with ruff"
@echo " make type-check - Run type checking with basedpyright"
@echo ""
@echo "Docker Build Targets:"
@echo " make build-web - Build web Docker image"
@echo " make build-api - Build API Docker image"
@echo " make build-all - Build all Docker images"
@echo " make push-all - Push all Docker images"
@echo " make build-push-all - Build and push all Docker images"
# Phony targets
.PHONY: build-web build-api push-web push-api build-all push-all build-push-all dev-setup prepare-docker prepare-web prepare-api dev-clean help format check lint type-check
.PHONY: build-web build-api push-web push-api build-all push-all build-push-all

View File

@@ -180,7 +180,7 @@ docker compose up -d
## Contributing
对于那些想要贡献代码的人,请参阅我们的[贡献指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_CN.md)。
对于那些想要贡献代码的人,请参阅我们的[贡献指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)。
同时,请考虑通过社交媒体、活动和会议来支持 Dify 的分享。
> 我们正在寻找贡献者来帮助将 Dify 翻译成除了中文和英文之外的其他语言。如果您有兴趣帮助,请参阅我们的[i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)获取更多信息,并在我们的[Discord 社区服务器](https://discord.gg/8Tpq4AcN9c)的`global-users`频道中留言。

View File

@@ -173,7 +173,7 @@ Stellen Sie Dify mit einem Klick in AKS bereit, indem Sie [Azure Devops Pipeline
## Contributing
Falls Sie Code beitragen möchten, lesen Sie bitte unseren [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_DE.md). Gleichzeitig bitten wir Sie, Dify zu unterstützen, indem Sie es in den sozialen Medien teilen und auf Veranstaltungen und Konferenzen präsentieren.
Falls Sie Code beitragen möchten, lesen Sie bitte unseren [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). Gleichzeitig bitten wir Sie, Dify zu unterstützen, indem Sie es in den sozialen Medien teilen und auf Veranstaltungen und Konferenzen präsentieren.
> Wir suchen Mitwirkende, die dabei helfen, Dify in weitere Sprachen zu übersetzen außer Mandarin oder Englisch. Wenn Sie Interesse an einer Mitarbeit haben, lesen Sie bitte die [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) für weitere Informationen und hinterlassen Sie einen Kommentar im `global-users`-Kanal unseres [Discord Community Servers](https://discord.gg/8Tpq4AcN9c).

View File

@@ -170,7 +170,7 @@ Implementa Dify en AKS con un clic usando [Azure Devops Pipeline Helm Chart by @
## Contribuir
Para aquellos que deseen contribuir con código, consulten nuestra [Guía de contribución](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_ES.md).
Para aquellos que deseen contribuir con código, consulten nuestra [Guía de contribución](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
Al mismo tiempo, considera apoyar a Dify compartiéndolo en redes sociales y en eventos y conferencias.
> Estamos buscando colaboradores para ayudar con la traducción de Dify a idiomas que no sean el mandarín o el inglés. Si estás interesado en ayudar, consulta el [README de i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) para obtener más información y déjanos un comentario en el canal `global-users` de nuestro [Servidor de Comunidad en Discord](https://discord.gg/8Tpq4AcN9c).

View File

@@ -168,7 +168,7 @@ Déployez Dify sur AKS en un clic en utilisant [Azure Devops Pipeline Helm Chart
## Contribuer
Pour ceux qui souhaitent contribuer du code, consultez notre [Guide de contribution](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_FR.md).
Pour ceux qui souhaitent contribuer du code, consultez notre [Guide de contribution](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
Dans le même temps, veuillez envisager de soutenir Dify en le partageant sur les réseaux sociaux et lors d'événements et de conférences.
> Nous recherchons des contributeurs pour aider à traduire Dify dans des langues autres que le mandarin ou l'anglais. Si vous êtes intéressé à aider, veuillez consulter le [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) pour plus d'informations, et laissez-nous un commentaire dans le canal `global-users` de notre [Serveur communautaire Discord](https://discord.gg/8Tpq4AcN9c).

View File

@@ -169,7 +169,7 @@ docker compose up -d
## 貢献
コードに貢献したい方は、[Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_JA.md)を参照してください。
コードに貢献したい方は、[Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)を参照してください。
同時に、DifyをSNSやイベント、カンファレンスで共有してサポートしていただけると幸いです。
> Difyを英語または中国語以外の言語に翻訳してくれる貢献者を募集しています。興味がある場合は、詳細については[i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)を参照してください。また、[Discordコミュニティサーバー](https://discord.gg/8Tpq4AcN9c)の`global-users`チャンネルにコメントを残してください。

View File

@@ -162,7 +162,7 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했
## 기여
코드에 기여하고 싶은 분들은 [기여 가이드](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_KR.md)를 참조하세요.
코드에 기여하고 싶은 분들은 [기여 가이드](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)를 참조하세요.
동시에 Dify를 소셜 미디어와 행사 및 컨퍼런스에 공유하여 지원하는 것을 고려해 주시기 바랍니다.
> 우리는 Dify를 중국어나 영어 이외의 언어로 번역하는 데 도움을 줄 수 있는 기여자를 찾고 있습니다. 도움을 주고 싶으시다면 [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)에서 더 많은 정보를 확인하시고 [Discord 커뮤니티 서버](https://discord.gg/8Tpq4AcN9c)의 `global-users` 채널에 댓글을 남겨주세요.

View File

@@ -168,7 +168,7 @@ Implante o Dify no AKS com um clique usando [Azure Devops Pipeline Helm Chart by
## Contribuindo
Para aqueles que desejam contribuir com código, veja nosso [Guia de Contribuição](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_PT.md).
Para aqueles que desejam contribuir com código, veja nosso [Guia de Contribuição](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
Ao mesmo tempo, considere apoiar o Dify compartilhando-o nas redes sociais e em eventos e conferências.
> Estamos buscando contribuidores para ajudar na tradução do Dify para idiomas além de Mandarim e Inglês. Se você tiver interesse em ajudar, consulte o [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) para mais informações e deixe-nos um comentário no canal `global-users` em nosso [Servidor da Comunidade no Discord](https://discord.gg/8Tpq4AcN9c).

View File

@@ -161,7 +161,7 @@ Dify'ı bulut platformuna tek tıklamayla dağıtın [terraform](https://www.ter
## Katkıda Bulunma
Kod katkısında bulunmak isteyenler için [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_TR.md) bakabilirsiniz.
Kod katkısında bulunmak isteyenler için [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) bakabilirsiniz.
Aynı zamanda, lütfen Dify'ı sosyal medyada, etkinliklerde ve konferanslarda paylaşarak desteklemeyi düşünün.
> Dify'ı Mandarin veya İngilizce dışındaki dillere çevirmemize yardımcı olacak katkıda bulunanlara ihtiyacımız var. Yardımcı olmakla ilgileniyorsanız, lütfen daha fazla bilgi için [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) dosyasına bakın ve [Discord Topluluk Sunucumuzdaki](https://discord.gg/8Tpq4AcN9c) `global-users` kanalında bize bir yorum bırakın.

View File

@@ -173,7 +173,7 @@ Dify 的所有功能都提供相應的 API因此您可以輕鬆地將 Dify
## 貢獻
對於想要貢獻程式碼的開發者,請參閱我們的[貢獻指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_TW.md)。
對於想要貢獻程式碼的開發者,請參閱我們的[貢獻指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)。
同時,也請考慮透過在社群媒體和各種活動與會議上分享 Dify 來支持我們。
> 我們正在尋找貢獻者協助將 Dify 翻譯成中文和英文以外的語言。如果您有興趣幫忙,請查看 [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) 獲取更多資訊,並在我們的 [Discord 社群伺服器](https://discord.gg/8Tpq4AcN9c) 的 `global-users` 頻道留言給我們。

View File

@@ -162,7 +162,7 @@ Triển khai Dify lên AKS chỉ với một cú nhấp chuột bằng [Azure De
## Đóng góp
Đối với những người muốn đóng góp mã, xem [Hướng dẫn Đóng góp](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_VI.md) của chúng tôi.
Đối với những người muốn đóng góp mã, xem [Hướng dẫn Đóng góp](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) của chúng tôi.
Đồng thời, vui lòng xem xét hỗ trợ Dify bằng cách chia sẻ nó trên mạng xã hội và tại các sự kiện và hội nghị.
> Chúng tôi đang tìm kiếm người đóng góp để giúp dịch Dify sang các ngôn ngữ khác ngoài tiếng Trung hoặc tiếng Anh. Nếu bạn quan tâm đến việc giúp đỡ, vui lòng xem [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) để biết thêm thông tin và để lại bình luận cho chúng tôi trong kênh `global-users` của [Máy chủ Cộng đồng Discord](https://discord.gg/8Tpq4AcN9c) của chúng tôi.

View File

@@ -75,7 +75,6 @@ DB_PASSWORD=difyai123456
DB_HOST=localhost
DB_PORT=5432
DB_DATABASE=dify
SQLALCHEMY_POOL_PRE_PING=true
# Storage configuration
# use for store upload files, private keys...
@@ -328,7 +327,7 @@ MATRIXONE_DATABASE=dify
LINDORM_URL=http://ld-*******************-proxy-search-pub.lindorm.aliyuncs.com:30070
LINDORM_USERNAME=admin
LINDORM_PASSWORD=admin
LINDORM_USING_UGC=True
USING_UGC_INDEX=False
LINDORM_QUERY_TIMEOUT=1
# OceanBase Vector configuration
@@ -530,7 +529,6 @@ ENDPOINT_URL_TEMPLATE=http://localhost:5002/e/{hook_id}
# Reset password token expiry minutes
RESET_PASSWORD_TOKEN_EXPIRY_MINUTES=5
EMAIL_REGISTER_TOKEN_EXPIRY_MINUTES=5
CHANGE_EMAIL_TOKEN_EXPIRY_MINUTES=5
OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES=5
@@ -566,11 +564,3 @@ QUEUE_MONITOR_THRESHOLD=200
QUEUE_MONITOR_ALERT_EMAILS=
# Monitor interval in minutes, default is 30 minutes
QUEUE_MONITOR_INTERVAL=30
# Swagger UI configuration
SWAGGER_UI_ENABLED=true
SWAGGER_UI_PATH=/swagger-ui.html
# Whether to encrypt dataset IDs when exporting DSL files (default: true)
# Set to false to export dataset IDs as plain text for easier cross-environment import
DSL_EXPORT_ENCRYPT_DATASET_ID=true

View File

@@ -5,7 +5,7 @@ line-length = 120
quote-style = "double"
[lint]
preview = true
preview = false
select = [
"B", # flake8-bugbear rules
"C4", # flake8-comprehensions
@@ -43,9 +43,7 @@ select = [
"S302", # suspicious-marshal-usage, disallow use of `marshal` module
"S311", # suspicious-non-cryptographic-random-usage
"G001", # don't use str format to logging messages
"G003", # don't use + in logging messages
"G004", # don't use f-strings to format logging messages
"UP042", # use StrEnum
]
ignore = [
@@ -65,7 +63,6 @@ ignore = [
"B006", # mutable-argument-default
"B007", # unused-loop-control-variable
"B026", # star-arg-unpacking-after-keyword-arg
"B901", # allow return in yield
"B903", # class-as-data-structure
"B904", # raise-without-from-inside-except
"B905", # zip-without-explicit-strict

View File

@@ -99,14 +99,14 @@ uv run celery -A app.celery beat
1. Run the tests locally with mocked system environment variables in `tool.pytest_env` section in `pyproject.toml`, more can check [Claude.md](../CLAUDE.md)
```bash
uv run pytest # Run all tests
uv run pytest tests/unit_tests/ # Unit tests only
uv run pytest tests/integration_tests/ # Integration tests
```cli
uv run --project api pytest # Run all tests
uv run --project api pytest tests/unit_tests/ # Unit tests only
uv run --project api pytest tests/integration_tests/ # Integration tests
# Code quality
../dev/reformat # Run all formatters and linters
uv run ruff check --fix ./ # Fix linting issues
uv run ruff format ./ # Format code
uv run basedpyright . # Type checking
./dev/reformat # Run all formatters and linters
uv run --project api ruff check --fix ./ # Fix linting issues
uv run --project api ruff format ./ # Format code
uv run --project api mypy . # Type checking
```

View File

@@ -25,9 +25,6 @@ def create_flask_app_with_configs() -> DifyApp:
# add an unique identifier to each request
RecyclableContextVar.increment_thread_recycles()
# Capture the decorator's return value to avoid pyright reportUnusedFunction
_ = before_request
return dify_app

View File

@@ -1,9 +1,8 @@
import base64
import json
import logging
import operator
import secrets
from typing import Any
from typing import Any, Optional
import click
import sqlalchemy as sa
@@ -39,8 +38,6 @@ from services.plugin.data_migration import PluginDataMigration
from services.plugin.plugin_migration import PluginMigration
from tasks.remove_app_and_related_data_task import delete_draft_variables_batch
logger = logging.getLogger(__name__)
@click.command("reset-password", help="Reset the account password.")
@click.option("--email", prompt=True, help="Account email to reset password for")
@@ -213,9 +210,7 @@ def migrate_annotation_vector_database():
if not dataset_collection_binding:
click.echo(f"App annotation collection binding not found: {app.id}")
continue
annotations = db.session.scalars(
select(MessageAnnotation).where(MessageAnnotation.app_id == app.id)
).all()
annotations = db.session.query(MessageAnnotation).where(MessageAnnotation.app_id == app.id).all()
dataset = Dataset(
id=app.id,
tenant_id=app.tenant_id,
@@ -370,25 +365,29 @@ def migrate_knowledge_vector_database():
)
raise e
dataset_documents = db.session.scalars(
select(DatasetDocument).where(
dataset_documents = (
db.session.query(DatasetDocument)
.where(
DatasetDocument.dataset_id == dataset.id,
DatasetDocument.indexing_status == "completed",
DatasetDocument.enabled == True,
DatasetDocument.archived == False,
)
).all()
.all()
)
documents = []
segments_count = 0
for dataset_document in dataset_documents:
segments = db.session.scalars(
select(DocumentSegment).where(
segments = (
db.session.query(DocumentSegment)
.where(
DocumentSegment.document_id == dataset_document.id,
DocumentSegment.status == "completed",
DocumentSegment.enabled == True,
)
).all()
.all()
)
for segment in segments:
document = Document(
@@ -478,12 +477,12 @@ def convert_to_agent_apps():
click.echo(f"Converting app: {app.id}")
try:
app.mode = AppMode.AGENT_CHAT
app.mode = AppMode.AGENT_CHAT.value
db.session.commit()
# update conversation mode to agent
db.session.query(Conversation).where(Conversation.app_id == app.id).update(
{Conversation.mode: AppMode.AGENT_CHAT}
{Conversation.mode: AppMode.AGENT_CHAT.value}
)
db.session.commit()
@@ -510,7 +509,7 @@ def add_qdrant_index(field: str):
from qdrant_client.http.exceptions import UnexpectedResponse
from qdrant_client.http.models import PayloadSchemaType
from core.rag.datasource.vdb.qdrant.qdrant_vector import PathQdrantParams, QdrantConfig
from core.rag.datasource.vdb.qdrant.qdrant_vector import QdrantConfig
for binding in bindings:
if dify_config.QDRANT_URL is None:
@@ -524,21 +523,7 @@ def add_qdrant_index(field: str):
prefer_grpc=dify_config.QDRANT_GRPC_ENABLED,
)
try:
params = qdrant_config.to_qdrant_params()
# Check the type before using
if isinstance(params, PathQdrantParams):
# PathQdrantParams case
client = qdrant_client.QdrantClient(path=params.path)
else:
# UrlQdrantParams case - params is UrlQdrantParams
client = qdrant_client.QdrantClient(
url=params.url,
api_key=params.api_key,
timeout=int(params.timeout),
verify=params.verify,
grpc_port=params.grpc_port,
prefer_grpc=params.prefer_grpc,
)
client = qdrant_client.QdrantClient(**qdrant_config.to_qdrant_params())
# create payload index
client.create_payload_index(binding.collection_name, field, field_schema=PayloadSchemaType.KEYWORD)
create_count += 1
@@ -584,7 +569,7 @@ def old_metadata_migration():
for document in documents:
if document.doc_metadata:
doc_metadata = document.doc_metadata
for key in doc_metadata:
for key, value in doc_metadata.items():
for field in BuiltInField:
if field.value == key:
break
@@ -640,7 +625,7 @@ def old_metadata_migration():
@click.option("--email", prompt=True, help="Tenant account email.")
@click.option("--name", prompt=True, help="Workspace name.")
@click.option("--language", prompt=True, help="Account language, default: en-US.")
def create_tenant(email: str, language: str | None = None, name: str | None = None):
def create_tenant(email: str, language: Optional[str] = None, name: Optional[str] = None):
"""
Create tenant account
"""
@@ -700,7 +685,7 @@ def upgrade_db():
click.echo(click.style("Database migration successful!", fg="green"))
except Exception:
logger.exception("Failed to execute database migration")
logging.exception("Failed to execute database migration")
finally:
lock.release()
else:
@@ -748,7 +733,7 @@ where sites.id is null limit 1000"""
except Exception:
failed_app_ids.append(app_id)
click.echo(click.style(f"Failed to fix missing site for app {app_id}", fg="red"))
logger.exception("Failed to fix app related site missing issue, app_id: %s", app_id)
logging.exception("Failed to fix app related site missing issue, app_id: %s", app_id)
continue
if not processed_count:
@@ -954,7 +939,7 @@ def clear_orphaned_file_records(force: bool):
click.echo(click.style("- Deleting orphaned message_files records", fg="white"))
query = "DELETE FROM message_files WHERE id IN :ids"
with db.engine.begin() as conn:
conn.execute(sa.text(query), {"ids": tuple(record["id"] for record in orphaned_message_files)})
conn.execute(sa.text(query), {"ids": tuple([record["id"] for record in orphaned_message_files])})
click.echo(
click.style(f"Removed {len(orphaned_message_files)} orphaned message_files records.", fg="green")
)
@@ -1308,7 +1293,7 @@ def cleanup_orphaned_draft_variables(
if dry_run:
logger.info("DRY RUN: Would delete the following:")
for app_id, count in sorted(stats["orphaned_by_app"].items(), key=operator.itemgetter(1), reverse=True)[
for app_id, count in sorted(stats["orphaned_by_app"].items(), key=lambda x: x[1], reverse=True)[
:10
]: # Show top 10
logger.info(" App %s: %s variables", app_id, count)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,28 +9,28 @@ class NotionConfig(BaseSettings):
Configuration settings for Notion integration
"""
NOTION_CLIENT_ID: str | None = Field(
NOTION_CLIENT_ID: Optional[str] = Field(
description="Client ID for Notion API authentication. Required for OAuth 2.0 flow.",
default=None,
)
NOTION_CLIENT_SECRET: str | None = Field(
NOTION_CLIENT_SECRET: Optional[str] = Field(
description="Client secret for Notion API authentication. Required for OAuth 2.0 flow.",
default=None,
)
NOTION_INTEGRATION_TYPE: str | None = Field(
NOTION_INTEGRATION_TYPE: Optional[str] = Field(
description="Type of Notion integration."
" Set to 'internal' for internal integrations, or None for public integrations.",
default=None,
)
NOTION_INTERNAL_SECRET: str | None = Field(
NOTION_INTERNAL_SECRET: Optional[str] = Field(
description="Secret key for internal Notion integrations. Required when NOTION_INTEGRATION_TYPE is 'internal'.",
default=None,
)
NOTION_INTEGRATION_TOKEN: str | None = Field(
NOTION_INTEGRATION_TOKEN: Optional[str] = Field(
description="Integration token for Notion API access. Used for direct API calls without OAuth flow.",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, NonNegativeFloat
from pydantic_settings import BaseSettings
@@ -7,7 +9,7 @@ class SentryConfig(BaseSettings):
Configuration settings for Sentry error tracking and performance monitoring
"""
SENTRY_DSN: str | None = Field(
SENTRY_DSN: Optional[str] = Field(
description="Sentry Data Source Name (DSN)."
" This is the unique identifier of your Sentry project, used to send events to the correct project.",
default=None,

View File

@@ -1,4 +1,4 @@
from typing import Literal
from typing import Annotated, Literal, Optional
from pydantic import (
AliasChoices,
@@ -31,12 +31,6 @@ class SecurityConfig(BaseSettings):
description="Duration in minutes for which a password reset token remains valid",
default=5,
)
EMAIL_REGISTER_TOKEN_EXPIRY_MINUTES: PositiveInt = Field(
description="Duration in minutes for which a email register token remains valid",
default=5,
)
CHANGE_EMAIL_TOKEN_EXPIRY_MINUTES: PositiveInt = Field(
description="Duration in minutes for which a change email token remains valid",
default=5,
@@ -57,7 +51,7 @@ class SecurityConfig(BaseSettings):
default=False,
)
ADMIN_API_KEY: str | None = Field(
ADMIN_API_KEY: Optional[str] = Field(
description="admin api key for authentication",
default=None,
)
@@ -97,17 +91,17 @@ class CodeExecutionSandboxConfig(BaseSettings):
default="dify-sandbox",
)
CODE_EXECUTION_CONNECT_TIMEOUT: float | None = Field(
CODE_EXECUTION_CONNECT_TIMEOUT: Optional[float] = Field(
description="Connection timeout in seconds for code execution requests",
default=10.0,
)
CODE_EXECUTION_READ_TIMEOUT: float | None = Field(
CODE_EXECUTION_READ_TIMEOUT: Optional[float] = Field(
description="Read timeout in seconds for code execution requests",
default=60.0,
)
CODE_EXECUTION_WRITE_TIMEOUT: float | None = Field(
CODE_EXECUTION_WRITE_TIMEOUT: Optional[float] = Field(
description="Write timeout in seconds for code execution request",
default=10.0,
)
@@ -368,17 +362,17 @@ class HttpConfig(BaseSettings):
default=3,
)
SSRF_PROXY_ALL_URL: str | None = Field(
SSRF_PROXY_ALL_URL: Optional[str] = Field(
description="Proxy URL for HTTP or HTTPS requests to prevent Server-Side Request Forgery (SSRF)",
default=None,
)
SSRF_PROXY_HTTP_URL: str | None = Field(
SSRF_PROXY_HTTP_URL: Optional[str] = Field(
description="Proxy URL for HTTP requests to prevent Server-Side Request Forgery (SSRF)",
default=None,
)
SSRF_PROXY_HTTPS_URL: str | None = Field(
SSRF_PROXY_HTTPS_URL: Optional[str] = Field(
description="Proxy URL for HTTPS requests to prevent Server-Side Request Forgery (SSRF)",
default=None,
)
@@ -420,7 +414,7 @@ class InnerAPIConfig(BaseSettings):
default=False,
)
INNER_API_KEY: str | None = Field(
INNER_API_KEY: Optional[str] = Field(
description="API key for accessing the internal API",
default=None,
)
@@ -436,7 +430,7 @@ class LoggingConfig(BaseSettings):
default="INFO",
)
LOG_FILE: str | None = Field(
LOG_FILE: Optional[str] = Field(
description="File path for log output.",
default=None,
)
@@ -456,12 +450,12 @@ class LoggingConfig(BaseSettings):
default="%(asctime)s.%(msecs)03d %(levelname)s [%(threadName)s] [%(filename)s:%(lineno)d] - %(message)s",
)
LOG_DATEFORMAT: str | None = Field(
LOG_DATEFORMAT: Optional[str] = Field(
description="Date format string for log timestamps",
default=None,
)
LOG_TZ: str | None = Field(
LOG_TZ: Optional[str] = Field(
description="Timezone for log timestamps (e.g., 'America/New_York')",
default="UTC",
)
@@ -595,22 +589,22 @@ class AuthConfig(BaseSettings):
default="/console/api/oauth/authorize",
)
GITHUB_CLIENT_ID: str | None = Field(
GITHUB_CLIENT_ID: Optional[str] = Field(
description="GitHub OAuth client ID",
default=None,
)
GITHUB_CLIENT_SECRET: str | None = Field(
GITHUB_CLIENT_SECRET: Optional[str] = Field(
description="GitHub OAuth client secret",
default=None,
)
GOOGLE_CLIENT_ID: str | None = Field(
GOOGLE_CLIENT_ID: Optional[str] = Field(
description="Google OAuth client ID",
default=None,
)
GOOGLE_CLIENT_SECRET: str | None = Field(
GOOGLE_CLIENT_SECRET: Optional[str] = Field(
description="Google OAuth client secret",
default=None,
)
@@ -645,11 +639,6 @@ class AuthConfig(BaseSettings):
default=86400,
)
EMAIL_REGISTER_LOCKOUT_DURATION: PositiveInt = Field(
description="Time (in seconds) a user must wait before retrying email register after exceeding the rate limit.",
default=86400,
)
class ModerationConfig(BaseSettings):
"""
@@ -678,42 +667,42 @@ class MailConfig(BaseSettings):
Configuration for email services
"""
MAIL_TYPE: str | None = Field(
MAIL_TYPE: Optional[str] = Field(
description="Email service provider type ('smtp' or 'resend' or 'sendGrid), default to None.",
default=None,
)
MAIL_DEFAULT_SEND_FROM: str | None = Field(
MAIL_DEFAULT_SEND_FROM: Optional[str] = Field(
description="Default email address to use as the sender",
default=None,
)
RESEND_API_KEY: str | None = Field(
RESEND_API_KEY: Optional[str] = Field(
description="API key for Resend email service",
default=None,
)
RESEND_API_URL: str | None = Field(
RESEND_API_URL: Optional[str] = Field(
description="API URL for Resend email service",
default=None,
)
SMTP_SERVER: str | None = Field(
SMTP_SERVER: Optional[str] = Field(
description="SMTP server hostname",
default=None,
)
SMTP_PORT: int | None = Field(
SMTP_PORT: Optional[int] = Field(
description="SMTP server port number",
default=465,
)
SMTP_USERNAME: str | None = Field(
SMTP_USERNAME: Optional[str] = Field(
description="Username for SMTP authentication",
default=None,
)
SMTP_PASSWORD: str | None = Field(
SMTP_PASSWORD: Optional[str] = Field(
description="Password for SMTP authentication",
default=None,
)
@@ -733,7 +722,7 @@ class MailConfig(BaseSettings):
default=50,
)
SENDGRID_API_KEY: str | None = Field(
SENDGRID_API_KEY: Optional[str] = Field(
description="API key for SendGrid service",
default=None,
)
@@ -756,17 +745,17 @@ class RagEtlConfig(BaseSettings):
default="database",
)
UNSTRUCTURED_API_URL: str | None = Field(
UNSTRUCTURED_API_URL: Optional[str] = Field(
description="API URL for Unstructured.io service",
default=None,
)
UNSTRUCTURED_API_KEY: str | None = Field(
UNSTRUCTURED_API_KEY: Optional[str] = Field(
description="API key for Unstructured.io service",
default="",
)
SCARF_NO_ANALYTICS: str | None = Field(
SCARF_NO_ANALYTICS: Optional[str] = Field(
description="This is about whether to disable Scarf analytics in Unstructured library.",
default="false",
)
@@ -807,11 +796,6 @@ class DataSetConfig(BaseSettings):
default=30,
)
DSL_EXPORT_ENCRYPT_DATASET_ID: bool = Field(
description="Enable or disable dataset ID encryption when exporting DSL files",
default=True,
)
class WorkspaceConfig(BaseSettings):
"""
@@ -992,18 +976,6 @@ class WorkflowLogConfig(BaseSettings):
)
class SwaggerUIConfig(BaseSettings):
SWAGGER_UI_ENABLED: bool = Field(
description="Whether to enable Swagger UI in api module",
default=True,
)
SWAGGER_UI_PATH: str = Field(
description="Swagger UI page path in api module",
default="/swagger-ui.html",
)
class FeatureConfig(
# place the configs in alphabet order
AppExecutionConfig,
@@ -1035,7 +1007,6 @@ class FeatureConfig(
WorkspaceConfig,
LoginConfig,
AccountConfig,
SwaggerUIConfig,
# hosted services config
HostedServiceConfig,
CeleryBeatConfig,

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, NonNegativeInt
from pydantic_settings import BaseSettings
@@ -38,17 +40,17 @@ class HostedOpenAiConfig(BaseSettings):
Configuration for hosted OpenAI service
"""
HOSTED_OPENAI_API_KEY: str | None = Field(
HOSTED_OPENAI_API_KEY: Optional[str] = Field(
description="API key for hosted OpenAI service",
default=None,
)
HOSTED_OPENAI_API_BASE: str | None = Field(
HOSTED_OPENAI_API_BASE: Optional[str] = Field(
description="Base URL for hosted OpenAI API",
default=None,
)
HOSTED_OPENAI_API_ORGANIZATION: str | None = Field(
HOSTED_OPENAI_API_ORGANIZATION: Optional[str] = Field(
description="Organization ID for hosted OpenAI service",
default=None,
)
@@ -108,12 +110,12 @@ class HostedAzureOpenAiConfig(BaseSettings):
default=False,
)
HOSTED_AZURE_OPENAI_API_KEY: str | None = Field(
HOSTED_AZURE_OPENAI_API_KEY: Optional[str] = Field(
description="API key for hosted Azure OpenAI service",
default=None,
)
HOSTED_AZURE_OPENAI_API_BASE: str | None = Field(
HOSTED_AZURE_OPENAI_API_BASE: Optional[str] = Field(
description="Base URL for hosted Azure OpenAI API",
default=None,
)
@@ -129,12 +131,12 @@ class HostedAnthropicConfig(BaseSettings):
Configuration for hosted Anthropic service
"""
HOSTED_ANTHROPIC_API_BASE: str | None = Field(
HOSTED_ANTHROPIC_API_BASE: Optional[str] = Field(
description="Base URL for hosted Anthropic API",
default=None,
)
HOSTED_ANTHROPIC_API_KEY: str | None = Field(
HOSTED_ANTHROPIC_API_KEY: Optional[str] = Field(
description="API key for hosted Anthropic service",
default=None,
)

View File

@@ -1,5 +1,5 @@
import os
from typing import Any, Literal
from typing import Any, Literal, Optional
from urllib.parse import parse_qsl, quote_plus
from pydantic import Field, NonNegativeFloat, NonNegativeInt, PositiveFloat, PositiveInt, computed_field
@@ -78,18 +78,18 @@ class StorageConfig(BaseSettings):
class VectorStoreConfig(BaseSettings):
VECTOR_STORE: str | None = Field(
VECTOR_STORE: Optional[str] = Field(
description="Type of vector store to use for efficient similarity search."
" Set to None if not using a vector store.",
default=None,
)
VECTOR_STORE_WHITELIST_ENABLE: bool | None = Field(
VECTOR_STORE_WHITELIST_ENABLE: Optional[bool] = Field(
description="Enable whitelist for vector store.",
default=False,
)
VECTOR_INDEX_NAME_PREFIX: str | None = Field(
VECTOR_INDEX_NAME_PREFIX: Optional[str] = Field(
description="Prefix used to create collection name in vector database",
default="Vector_index",
)
@@ -215,7 +215,6 @@ class DatabaseConfig(BaseSettings):
"pool_pre_ping": self.SQLALCHEMY_POOL_PRE_PING,
"connect_args": connect_args,
"pool_use_lifo": self.SQLALCHEMY_POOL_USE_LIFO,
"pool_reset_on_return": None,
}
@@ -225,26 +224,26 @@ class CeleryConfig(DatabaseConfig):
default="redis",
)
CELERY_BROKER_URL: str | None = Field(
CELERY_BROKER_URL: Optional[str] = Field(
description="URL of the message broker for Celery tasks.",
default=None,
)
CELERY_USE_SENTINEL: bool | None = Field(
CELERY_USE_SENTINEL: Optional[bool] = Field(
description="Whether to use Redis Sentinel for high availability.",
default=False,
)
CELERY_SENTINEL_MASTER_NAME: str | None = Field(
CELERY_SENTINEL_MASTER_NAME: Optional[str] = Field(
description="Name of the Redis Sentinel master.",
default=None,
)
CELERY_SENTINEL_PASSWORD: str | None = Field(
CELERY_SENTINEL_PASSWORD: Optional[str] = Field(
description="Password of the Redis Sentinel master.",
default=None,
)
CELERY_SENTINEL_SOCKET_TIMEOUT: PositiveFloat | None = Field(
CELERY_SENTINEL_SOCKET_TIMEOUT: Optional[PositiveFloat] = Field(
description="Timeout for Redis Sentinel socket operations in seconds.",
default=0.1,
)
@@ -268,12 +267,12 @@ class InternalTestConfig(BaseSettings):
Configuration settings for Internal Test
"""
AWS_SECRET_ACCESS_KEY: str | None = Field(
AWS_SECRET_ACCESS_KEY: Optional[str] = Field(
description="Internal test AWS secret access key",
default=None,
)
AWS_ACCESS_KEY_ID: str | None = Field(
AWS_ACCESS_KEY_ID: Optional[str] = Field(
description="Internal test AWS access key ID",
default=None,
)
@@ -284,15 +283,15 @@ class DatasetQueueMonitorConfig(BaseSettings):
Configuration settings for Dataset Queue Monitor
"""
QUEUE_MONITOR_THRESHOLD: NonNegativeInt | None = Field(
QUEUE_MONITOR_THRESHOLD: Optional[NonNegativeInt] = Field(
description="Threshold for dataset queue monitor",
default=200,
)
QUEUE_MONITOR_ALERT_EMAILS: str | None = Field(
QUEUE_MONITOR_ALERT_EMAILS: Optional[str] = Field(
description="Emails for dataset queue monitor alert, separated by commas",
default=None,
)
QUEUE_MONITOR_INTERVAL: NonNegativeFloat | None = Field(
QUEUE_MONITOR_INTERVAL: Optional[NonNegativeFloat] = Field(
description="Interval for dataset queue monitor in minutes",
default=30,
)
@@ -300,7 +299,8 @@ class DatasetQueueMonitorConfig(BaseSettings):
class MiddlewareConfig(
# place the configs in alphabet order
CeleryConfig, # Note: CeleryConfig already inherits from DatabaseConfig
CeleryConfig,
DatabaseConfig,
KeywordStoreConfig,
RedisConfig,
# configs of storage and storage providers

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, NonNegativeInt, PositiveFloat, PositiveInt
from pydantic_settings import BaseSettings
@@ -17,12 +19,12 @@ class RedisConfig(BaseSettings):
default=6379,
)
REDIS_USERNAME: str | None = Field(
REDIS_USERNAME: Optional[str] = Field(
description="Username for Redis authentication (if required)",
default=None,
)
REDIS_PASSWORD: str | None = Field(
REDIS_PASSWORD: Optional[str] = Field(
description="Password for Redis authentication (if required)",
default=None,
)
@@ -42,47 +44,47 @@ class RedisConfig(BaseSettings):
default="CERT_NONE",
)
REDIS_SSL_CA_CERTS: str | None = Field(
REDIS_SSL_CA_CERTS: Optional[str] = Field(
description="Path to the CA certificate file for SSL verification",
default=None,
)
REDIS_SSL_CERTFILE: str | None = Field(
REDIS_SSL_CERTFILE: Optional[str] = Field(
description="Path to the client certificate file for SSL authentication",
default=None,
)
REDIS_SSL_KEYFILE: str | None = Field(
REDIS_SSL_KEYFILE: Optional[str] = Field(
description="Path to the client private key file for SSL authentication",
default=None,
)
REDIS_USE_SENTINEL: bool | None = Field(
REDIS_USE_SENTINEL: Optional[bool] = Field(
description="Enable Redis Sentinel mode for high availability",
default=False,
)
REDIS_SENTINELS: str | None = Field(
REDIS_SENTINELS: Optional[str] = Field(
description="Comma-separated list of Redis Sentinel nodes (host:port)",
default=None,
)
REDIS_SENTINEL_SERVICE_NAME: str | None = Field(
REDIS_SENTINEL_SERVICE_NAME: Optional[str] = Field(
description="Name of the Redis Sentinel service to monitor",
default=None,
)
REDIS_SENTINEL_USERNAME: str | None = Field(
REDIS_SENTINEL_USERNAME: Optional[str] = Field(
description="Username for Redis Sentinel authentication (if required)",
default=None,
)
REDIS_SENTINEL_PASSWORD: str | None = Field(
REDIS_SENTINEL_PASSWORD: Optional[str] = Field(
description="Password for Redis Sentinel authentication (if required)",
default=None,
)
REDIS_SENTINEL_SOCKET_TIMEOUT: PositiveFloat | None = Field(
REDIS_SENTINEL_SOCKET_TIMEOUT: Optional[PositiveFloat] = Field(
description="Socket timeout in seconds for Redis Sentinel connections",
default=0.1,
)
@@ -92,12 +94,12 @@ class RedisConfig(BaseSettings):
default=False,
)
REDIS_CLUSTERS: str | None = Field(
REDIS_CLUSTERS: Optional[str] = Field(
description="Comma-separated list of Redis Clusters nodes (host:port)",
default=None,
)
REDIS_CLUSTERS_PASSWORD: str | None = Field(
REDIS_CLUSTERS_PASSWORD: Optional[str] = Field(
description="Password for Redis Clusters authentication (if required)",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,37 +9,37 @@ class AliyunOSSStorageConfig(BaseSettings):
Configuration settings for Aliyun Object Storage Service (OSS)
"""
ALIYUN_OSS_BUCKET_NAME: str | None = Field(
ALIYUN_OSS_BUCKET_NAME: Optional[str] = Field(
description="Name of the Aliyun OSS bucket to store and retrieve objects",
default=None,
)
ALIYUN_OSS_ACCESS_KEY: str | None = Field(
ALIYUN_OSS_ACCESS_KEY: Optional[str] = Field(
description="Access key ID for authenticating with Aliyun OSS",
default=None,
)
ALIYUN_OSS_SECRET_KEY: str | None = Field(
ALIYUN_OSS_SECRET_KEY: Optional[str] = Field(
description="Secret access key for authenticating with Aliyun OSS",
default=None,
)
ALIYUN_OSS_ENDPOINT: str | None = Field(
ALIYUN_OSS_ENDPOINT: Optional[str] = Field(
description="URL of the Aliyun OSS endpoint for your chosen region",
default=None,
)
ALIYUN_OSS_REGION: str | None = Field(
ALIYUN_OSS_REGION: Optional[str] = Field(
description="Aliyun OSS region where your bucket is located (e.g., 'oss-cn-hangzhou')",
default=None,
)
ALIYUN_OSS_AUTH_VERSION: str | None = Field(
ALIYUN_OSS_AUTH_VERSION: Optional[str] = Field(
description="Version of the authentication protocol to use with Aliyun OSS (e.g., 'v4')",
default=None,
)
ALIYUN_OSS_PATH: str | None = Field(
ALIYUN_OSS_PATH: Optional[str] = Field(
description="Base path within the bucket to store objects (e.g., 'my-app-data/')",
default=None,
)

View File

@@ -1,4 +1,4 @@
from typing import Literal
from typing import Literal, Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -9,27 +9,27 @@ class S3StorageConfig(BaseSettings):
Configuration settings for S3-compatible object storage
"""
S3_ENDPOINT: str | None = Field(
S3_ENDPOINT: Optional[str] = Field(
description="URL of the S3-compatible storage endpoint (e.g., 'https://s3.amazonaws.com')",
default=None,
)
S3_REGION: str | None = Field(
S3_REGION: Optional[str] = Field(
description="Region where the S3 bucket is located (e.g., 'us-east-1')",
default=None,
)
S3_BUCKET_NAME: str | None = Field(
S3_BUCKET_NAME: Optional[str] = Field(
description="Name of the S3 bucket to store and retrieve objects",
default=None,
)
S3_ACCESS_KEY: str | None = Field(
S3_ACCESS_KEY: Optional[str] = Field(
description="Access key ID for authenticating with the S3 service",
default=None,
)
S3_SECRET_KEY: str | None = Field(
S3_SECRET_KEY: Optional[str] = Field(
description="Secret access key for authenticating with the S3 service",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,22 +9,22 @@ class AzureBlobStorageConfig(BaseSettings):
Configuration settings for Azure Blob Storage
"""
AZURE_BLOB_ACCOUNT_NAME: str | None = Field(
AZURE_BLOB_ACCOUNT_NAME: Optional[str] = Field(
description="Name of the Azure Storage account (e.g., 'mystorageaccount')",
default=None,
)
AZURE_BLOB_ACCOUNT_KEY: str | None = Field(
AZURE_BLOB_ACCOUNT_KEY: Optional[str] = Field(
description="Access key for authenticating with the Azure Storage account",
default=None,
)
AZURE_BLOB_CONTAINER_NAME: str | None = Field(
AZURE_BLOB_CONTAINER_NAME: Optional[str] = Field(
description="Name of the Azure Blob container to store and retrieve objects",
default=None,
)
AZURE_BLOB_ACCOUNT_URL: str | None = Field(
AZURE_BLOB_ACCOUNT_URL: Optional[str] = Field(
description="URL of the Azure Blob storage endpoint (e.g., 'https://mystorageaccount.blob.core.windows.net')",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,22 +9,22 @@ class BaiduOBSStorageConfig(BaseSettings):
Configuration settings for Baidu Object Storage Service (OBS)
"""
BAIDU_OBS_BUCKET_NAME: str | None = Field(
BAIDU_OBS_BUCKET_NAME: Optional[str] = Field(
description="Name of the Baidu OBS bucket to store and retrieve objects (e.g., 'my-obs-bucket')",
default=None,
)
BAIDU_OBS_ACCESS_KEY: str | None = Field(
BAIDU_OBS_ACCESS_KEY: Optional[str] = Field(
description="Access Key ID for authenticating with Baidu OBS",
default=None,
)
BAIDU_OBS_SECRET_KEY: str | None = Field(
BAIDU_OBS_SECRET_KEY: Optional[str] = Field(
description="Secret Access Key for authenticating with Baidu OBS",
default=None,
)
BAIDU_OBS_ENDPOINT: str | None = Field(
BAIDU_OBS_ENDPOINT: Optional[str] = Field(
description="URL of the Baidu OSS endpoint for your chosen region (e.g., 'https://.bj.bcebos.com')",
default=None,
)

View File

@@ -1,5 +1,7 @@
"""ClickZetta Volume Storage Configuration"""
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,17 +9,17 @@ from pydantic_settings import BaseSettings
class ClickZettaVolumeStorageConfig(BaseSettings):
"""Configuration for ClickZetta Volume storage."""
CLICKZETTA_VOLUME_USERNAME: str | None = Field(
CLICKZETTA_VOLUME_USERNAME: Optional[str] = Field(
description="Username for ClickZetta Volume authentication",
default=None,
)
CLICKZETTA_VOLUME_PASSWORD: str | None = Field(
CLICKZETTA_VOLUME_PASSWORD: Optional[str] = Field(
description="Password for ClickZetta Volume authentication",
default=None,
)
CLICKZETTA_VOLUME_INSTANCE: str | None = Field(
CLICKZETTA_VOLUME_INSTANCE: Optional[str] = Field(
description="ClickZetta instance identifier",
default=None,
)
@@ -47,7 +49,7 @@ class ClickZettaVolumeStorageConfig(BaseSettings):
default="user",
)
CLICKZETTA_VOLUME_NAME: str | None = Field(
CLICKZETTA_VOLUME_NAME: Optional[str] = Field(
description="ClickZetta volume name for external volumes",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,12 +9,12 @@ class GoogleCloudStorageConfig(BaseSettings):
Configuration settings for Google Cloud Storage
"""
GOOGLE_STORAGE_BUCKET_NAME: str | None = Field(
GOOGLE_STORAGE_BUCKET_NAME: Optional[str] = Field(
description="Name of the Google Cloud Storage bucket to store and retrieve objects (e.g., 'my-gcs-bucket')",
default=None,
)
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64: str | None = Field(
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64: Optional[str] = Field(
description="Base64-encoded JSON key file for Google Cloud service account authentication",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,22 +9,22 @@ class HuaweiCloudOBSStorageConfig(BaseSettings):
Configuration settings for Huawei Cloud Object Storage Service (OBS)
"""
HUAWEI_OBS_BUCKET_NAME: str | None = Field(
HUAWEI_OBS_BUCKET_NAME: Optional[str] = Field(
description="Name of the Huawei Cloud OBS bucket to store and retrieve objects (e.g., 'my-obs-bucket')",
default=None,
)
HUAWEI_OBS_ACCESS_KEY: str | None = Field(
HUAWEI_OBS_ACCESS_KEY: Optional[str] = Field(
description="Access Key ID for authenticating with Huawei Cloud OBS",
default=None,
)
HUAWEI_OBS_SECRET_KEY: str | None = Field(
HUAWEI_OBS_SECRET_KEY: Optional[str] = Field(
description="Secret Access Key for authenticating with Huawei Cloud OBS",
default=None,
)
HUAWEI_OBS_SERVER: str | None = Field(
HUAWEI_OBS_SERVER: Optional[str] = Field(
description="Endpoint URL for Huawei Cloud OBS (e.g., 'https://obs.cn-north-4.myhuaweicloud.com')",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,27 +9,27 @@ class OCIStorageConfig(BaseSettings):
Configuration settings for Oracle Cloud Infrastructure (OCI) Object Storage
"""
OCI_ENDPOINT: str | None = Field(
OCI_ENDPOINT: Optional[str] = Field(
description="URL of the OCI Object Storage endpoint (e.g., 'https://objectstorage.us-phoenix-1.oraclecloud.com')",
default=None,
)
OCI_REGION: str | None = Field(
OCI_REGION: Optional[str] = Field(
description="OCI region where the bucket is located (e.g., 'us-phoenix-1')",
default=None,
)
OCI_BUCKET_NAME: str | None = Field(
OCI_BUCKET_NAME: Optional[str] = Field(
description="Name of the OCI Object Storage bucket to store and retrieve objects (e.g., 'my-oci-bucket')",
default=None,
)
OCI_ACCESS_KEY: str | None = Field(
OCI_ACCESS_KEY: Optional[str] = Field(
description="Access key (also known as API key) for authenticating with OCI Object Storage",
default=None,
)
OCI_SECRET_KEY: str | None = Field(
OCI_SECRET_KEY: Optional[str] = Field(
description="Secret key associated with the access key for authenticating with OCI Object Storage",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,17 +9,17 @@ class SupabaseStorageConfig(BaseSettings):
Configuration settings for Supabase Object Storage Service
"""
SUPABASE_BUCKET_NAME: str | None = Field(
SUPABASE_BUCKET_NAME: Optional[str] = Field(
description="Name of the Supabase bucket to store and retrieve objects (e.g., 'dify-bucket')",
default=None,
)
SUPABASE_API_KEY: str | None = Field(
SUPABASE_API_KEY: Optional[str] = Field(
description="API KEY for authenticating with Supabase",
default=None,
)
SUPABASE_URL: str | None = Field(
SUPABASE_URL: Optional[str] = Field(
description="URL of the Supabase",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,27 +9,27 @@ class TencentCloudCOSStorageConfig(BaseSettings):
Configuration settings for Tencent Cloud Object Storage (COS)
"""
TENCENT_COS_BUCKET_NAME: str | None = Field(
TENCENT_COS_BUCKET_NAME: Optional[str] = Field(
description="Name of the Tencent Cloud COS bucket to store and retrieve objects",
default=None,
)
TENCENT_COS_REGION: str | None = Field(
TENCENT_COS_REGION: Optional[str] = Field(
description="Tencent Cloud region where the COS bucket is located (e.g., 'ap-guangzhou')",
default=None,
)
TENCENT_COS_SECRET_ID: str | None = Field(
TENCENT_COS_SECRET_ID: Optional[str] = Field(
description="SecretId for authenticating with Tencent Cloud COS (part of API credentials)",
default=None,
)
TENCENT_COS_SECRET_KEY: str | None = Field(
TENCENT_COS_SECRET_KEY: Optional[str] = Field(
description="SecretKey for authenticating with Tencent Cloud COS (part of API credentials)",
default=None,
)
TENCENT_COS_SCHEME: str | None = Field(
TENCENT_COS_SCHEME: Optional[str] = Field(
description="Protocol scheme for COS requests: 'https' (recommended) or 'http'",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,27 +9,27 @@ class VolcengineTOSStorageConfig(BaseSettings):
Configuration settings for Volcengine Tinder Object Storage (TOS)
"""
VOLCENGINE_TOS_BUCKET_NAME: str | None = Field(
VOLCENGINE_TOS_BUCKET_NAME: Optional[str] = Field(
description="Name of the Volcengine TOS bucket to store and retrieve objects (e.g., 'my-tos-bucket')",
default=None,
)
VOLCENGINE_TOS_ACCESS_KEY: str | None = Field(
VOLCENGINE_TOS_ACCESS_KEY: Optional[str] = Field(
description="Access Key ID for authenticating with Volcengine TOS",
default=None,
)
VOLCENGINE_TOS_SECRET_KEY: str | None = Field(
VOLCENGINE_TOS_SECRET_KEY: Optional[str] = Field(
description="Secret Access Key for authenticating with Volcengine TOS",
default=None,
)
VOLCENGINE_TOS_ENDPOINT: str | None = Field(
VOLCENGINE_TOS_ENDPOINT: Optional[str] = Field(
description="URL of the Volcengine TOS endpoint (e.g., 'https://tos-cn-beijing.volces.com')",
default=None,
)
VOLCENGINE_TOS_REGION: str | None = Field(
VOLCENGINE_TOS_REGION: Optional[str] = Field(
description="Volcengine region where the TOS bucket is located (e.g., 'cn-beijing')",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@@ -9,37 +11,37 @@ class AnalyticdbConfig(BaseSettings):
https://www.alibabacloud.com/help/en/analyticdb-for-postgresql/getting-started/create-an-instance-instances-with-vector-engine-optimization-enabled
"""
ANALYTICDB_KEY_ID: str | None = Field(
ANALYTICDB_KEY_ID: Optional[str] = Field(
default=None, description="The Access Key ID provided by Alibaba Cloud for API authentication."
)
ANALYTICDB_KEY_SECRET: str | None = Field(
ANALYTICDB_KEY_SECRET: Optional[str] = Field(
default=None, description="The Secret Access Key corresponding to the Access Key ID for secure API access."
)
ANALYTICDB_REGION_ID: str | None = Field(
ANALYTICDB_REGION_ID: Optional[str] = Field(
default=None,
description="The region where the AnalyticDB instance is deployed (e.g., 'cn-hangzhou', 'ap-southeast-1').",
)
ANALYTICDB_INSTANCE_ID: str | None = Field(
ANALYTICDB_INSTANCE_ID: Optional[str] = Field(
default=None,
description="The unique identifier of the AnalyticDB instance you want to connect to.",
)
ANALYTICDB_ACCOUNT: str | None = Field(
ANALYTICDB_ACCOUNT: Optional[str] = Field(
default=None,
description="The account name used to log in to the AnalyticDB instance"
" (usually the initial account created with the instance).",
)
ANALYTICDB_PASSWORD: str | None = Field(
ANALYTICDB_PASSWORD: Optional[str] = Field(
default=None, description="The password associated with the AnalyticDB account for database authentication."
)
ANALYTICDB_NAMESPACE: str | None = Field(
ANALYTICDB_NAMESPACE: Optional[str] = Field(
default=None, description="The namespace within AnalyticDB for schema isolation (if using namespace feature)."
)
ANALYTICDB_NAMESPACE_PASSWORD: str | None = Field(
ANALYTICDB_NAMESPACE_PASSWORD: Optional[str] = Field(
default=None,
description="The password for accessing the specified namespace within the AnalyticDB instance"
" (if namespace feature is enabled).",
)
ANALYTICDB_HOST: str | None = Field(
ANALYTICDB_HOST: Optional[str] = Field(
default=None, description="The host of the AnalyticDB instance you want to connect to."
)
ANALYTICDB_PORT: PositiveInt = Field(

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, NonNegativeInt, PositiveInt
from pydantic_settings import BaseSettings
@@ -7,7 +9,7 @@ class BaiduVectorDBConfig(BaseSettings):
Configuration settings for Baidu Vector Database
"""
BAIDU_VECTOR_DB_ENDPOINT: str | None = Field(
BAIDU_VECTOR_DB_ENDPOINT: Optional[str] = Field(
description="URL of the Baidu Vector Database service (e.g., 'http://vdb.bj.baidubce.com')",
default=None,
)
@@ -17,17 +19,17 @@ class BaiduVectorDBConfig(BaseSettings):
default=30000,
)
BAIDU_VECTOR_DB_ACCOUNT: str | None = Field(
BAIDU_VECTOR_DB_ACCOUNT: Optional[str] = Field(
description="Account for authenticating with the Baidu Vector Database",
default=None,
)
BAIDU_VECTOR_DB_API_KEY: str | None = Field(
BAIDU_VECTOR_DB_API_KEY: Optional[str] = Field(
description="API key for authenticating with the Baidu Vector Database service",
default=None,
)
BAIDU_VECTOR_DB_DATABASE: str | None = Field(
BAIDU_VECTOR_DB_DATABASE: Optional[str] = Field(
description="Name of the specific Baidu Vector Database to connect to",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@@ -7,7 +9,7 @@ class ChromaConfig(BaseSettings):
Configuration settings for Chroma vector database
"""
CHROMA_HOST: str | None = Field(
CHROMA_HOST: Optional[str] = Field(
description="Hostname or IP address of the Chroma server (e.g., 'localhost' or '192.168.1.100')",
default=None,
)
@@ -17,22 +19,22 @@ class ChromaConfig(BaseSettings):
default=8000,
)
CHROMA_TENANT: str | None = Field(
CHROMA_TENANT: Optional[str] = Field(
description="Tenant identifier for multi-tenancy support in Chroma",
default=None,
)
CHROMA_DATABASE: str | None = Field(
CHROMA_DATABASE: Optional[str] = Field(
description="Name of the Chroma database to connect to",
default=None,
)
CHROMA_AUTH_PROVIDER: str | None = Field(
CHROMA_AUTH_PROVIDER: Optional[str] = Field(
description="Authentication provider for Chroma (e.g., 'basic', 'token', or a custom provider)",
default=None,
)
CHROMA_AUTH_CREDENTIALS: str | None = Field(
CHROMA_AUTH_CREDENTIALS: Optional[str] = Field(
description="Authentication credentials for Chroma (format depends on the auth provider)",
default=None,
)

View File

@@ -1,68 +1,69 @@
from pydantic import Field
from pydantic_settings import BaseSettings
from typing import Optional
from pydantic import BaseModel, Field
class ClickzettaConfig(BaseSettings):
class ClickzettaConfig(BaseModel):
"""
Clickzetta Lakehouse vector database configuration
"""
CLICKZETTA_USERNAME: str | None = Field(
CLICKZETTA_USERNAME: Optional[str] = Field(
description="Username for authenticating with Clickzetta Lakehouse",
default=None,
)
CLICKZETTA_PASSWORD: str | None = Field(
CLICKZETTA_PASSWORD: Optional[str] = Field(
description="Password for authenticating with Clickzetta Lakehouse",
default=None,
)
CLICKZETTA_INSTANCE: str | None = Field(
CLICKZETTA_INSTANCE: Optional[str] = Field(
description="Clickzetta Lakehouse instance ID",
default=None,
)
CLICKZETTA_SERVICE: str | None = Field(
CLICKZETTA_SERVICE: Optional[str] = Field(
description="Clickzetta API service endpoint (e.g., 'api.clickzetta.com')",
default="api.clickzetta.com",
)
CLICKZETTA_WORKSPACE: str | None = Field(
CLICKZETTA_WORKSPACE: Optional[str] = Field(
description="Clickzetta workspace name",
default="default",
)
CLICKZETTA_VCLUSTER: str | None = Field(
CLICKZETTA_VCLUSTER: Optional[str] = Field(
description="Clickzetta virtual cluster name",
default="default_ap",
)
CLICKZETTA_SCHEMA: str | None = Field(
CLICKZETTA_SCHEMA: Optional[str] = Field(
description="Database schema name in Clickzetta",
default="public",
)
CLICKZETTA_BATCH_SIZE: int | None = Field(
CLICKZETTA_BATCH_SIZE: Optional[int] = Field(
description="Batch size for bulk insert operations",
default=100,
)
CLICKZETTA_ENABLE_INVERTED_INDEX: bool | None = Field(
CLICKZETTA_ENABLE_INVERTED_INDEX: Optional[bool] = Field(
description="Enable inverted index for full-text search capabilities",
default=True,
)
CLICKZETTA_ANALYZER_TYPE: str | None = Field(
CLICKZETTA_ANALYZER_TYPE: Optional[str] = Field(
description="Analyzer type for full-text search: keyword, english, chinese, unicode",
default="chinese",
)
CLICKZETTA_ANALYZER_MODE: str | None = Field(
CLICKZETTA_ANALYZER_MODE: Optional[str] = Field(
description="Analyzer mode for tokenization: max_word (fine-grained) or smart (intelligent)",
default="smart",
)
CLICKZETTA_VECTOR_DISTANCE_FUNCTION: str | None = Field(
CLICKZETTA_VECTOR_DISTANCE_FUNCTION: Optional[str] = Field(
description="Distance function for vector similarity: l2_distance or cosine_distance",
default="cosine_distance",
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,27 +9,27 @@ class CouchbaseConfig(BaseSettings):
Couchbase configs
"""
COUCHBASE_CONNECTION_STRING: str | None = Field(
COUCHBASE_CONNECTION_STRING: Optional[str] = Field(
description="COUCHBASE connection string",
default=None,
)
COUCHBASE_USER: str | None = Field(
COUCHBASE_USER: Optional[str] = Field(
description="COUCHBASE user",
default=None,
)
COUCHBASE_PASSWORD: str | None = Field(
COUCHBASE_PASSWORD: Optional[str] = Field(
description="COUCHBASE password",
default=None,
)
COUCHBASE_BUCKET_NAME: str | None = Field(
COUCHBASE_BUCKET_NAME: Optional[str] = Field(
description="COUCHBASE bucket name",
default=None,
)
COUCHBASE_SCOPE_NAME: str | None = Field(
COUCHBASE_SCOPE_NAME: Optional[str] = Field(
description="COUCHBASE scope name",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt, model_validator
from pydantic_settings import BaseSettings
@@ -8,7 +10,7 @@ class ElasticsearchConfig(BaseSettings):
Can load from environment variables or .env files.
"""
ELASTICSEARCH_HOST: str | None = Field(
ELASTICSEARCH_HOST: Optional[str] = Field(
description="Hostname or IP address of the Elasticsearch server (e.g., 'localhost' or '192.168.1.100')",
default="127.0.0.1",
)
@@ -18,28 +20,30 @@ class ElasticsearchConfig(BaseSettings):
default=9200,
)
ELASTICSEARCH_USERNAME: str | None = Field(
ELASTICSEARCH_USERNAME: Optional[str] = Field(
description="Username for authenticating with Elasticsearch (default is 'elastic')",
default="elastic",
)
ELASTICSEARCH_PASSWORD: str | None = Field(
ELASTICSEARCH_PASSWORD: Optional[str] = Field(
description="Password for authenticating with Elasticsearch (default is 'elastic')",
default="elastic",
)
# Elastic Cloud (optional)
ELASTICSEARCH_USE_CLOUD: bool | None = Field(
ELASTICSEARCH_USE_CLOUD: Optional[bool] = Field(
description="Set to True to use Elastic Cloud instead of self-hosted Elasticsearch", default=False
)
ELASTICSEARCH_CLOUD_URL: str | None = Field(
ELASTICSEARCH_CLOUD_URL: Optional[str] = Field(
description="Full URL for Elastic Cloud deployment (e.g., 'https://example.es.region.aws.found.io:443')",
default=None,
)
ELASTICSEARCH_API_KEY: str | None = Field(description="API key for authenticating with Elastic Cloud", default=None)
ELASTICSEARCH_API_KEY: Optional[str] = Field(
description="API key for authenticating with Elastic Cloud", default=None
)
# Common options
ELASTICSEARCH_CA_CERTS: str | None = Field(
ELASTICSEARCH_CA_CERTS: Optional[str] = Field(
description="Path to CA certificate file for SSL verification", default=None
)
ELASTICSEARCH_VERIFY_CERTS: bool = Field(

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,17 +9,17 @@ class HuaweiCloudConfig(BaseSettings):
Configuration settings for Huawei cloud search service
"""
HUAWEI_CLOUD_HOSTS: str | None = Field(
HUAWEI_CLOUD_HOSTS: Optional[str] = Field(
description="Hostname or IP address of the Huawei cloud search service instance",
default=None,
)
HUAWEI_CLOUD_USER: str | None = Field(
HUAWEI_CLOUD_USER: Optional[str] = Field(
description="Username for authenticating with Huawei cloud search service",
default=None,
)
HUAWEI_CLOUD_PASSWORD: str | None = Field(
HUAWEI_CLOUD_PASSWORD: Optional[str] = Field(
description="Password for authenticating with Huawei cloud search service",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,27 +9,27 @@ class LindormConfig(BaseSettings):
Lindorm configs
"""
LINDORM_URL: str | None = Field(
LINDORM_URL: Optional[str] = Field(
description="Lindorm url",
default=None,
)
LINDORM_USERNAME: str | None = Field(
LINDORM_USERNAME: Optional[str] = Field(
description="Lindorm user",
default=None,
)
LINDORM_PASSWORD: str | None = Field(
LINDORM_PASSWORD: Optional[str] = Field(
description="Lindorm password",
default=None,
)
LINDORM_INDEX_TYPE: str | None = Field(
DEFAULT_INDEX_TYPE: Optional[str] = Field(
description="Lindorm Vector Index Type, hnsw or flat is available in dify",
default="hnsw",
)
LINDORM_DISTANCE_TYPE: str | None = Field(
DEFAULT_DISTANCE_TYPE: Optional[str] = Field(
description="Vector Distance Type, support l2, cosinesimil, innerproduct", default="l2"
)
LINDORM_USING_UGC: bool | None = Field(
description="Using UGC index will store indexes with the same IndexType/Dimension in a single big index.",
default=True,
USING_UGC_INDEX: Optional[bool] = Field(
description="Using UGC index will store the same type of Index in a single index but can retrieve separately.",
default=False,
)
LINDORM_QUERY_TIMEOUT: float | None = Field(description="The lindorm search request timeout (s)", default=2.0)
LINDORM_QUERY_TIMEOUT: Optional[float] = Field(description="The lindorm search request timeout (s)", default=2.0)

View File

@@ -1,8 +1,7 @@
from pydantic import Field
from pydantic_settings import BaseSettings
from pydantic import BaseModel, Field
class MatrixoneConfig(BaseSettings):
class MatrixoneConfig(BaseModel):
"""Matrixone vector database configuration."""
MATRIXONE_HOST: str = Field(default="localhost", description="Host address of the Matrixone server")

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,22 +9,22 @@ class MilvusConfig(BaseSettings):
Configuration settings for Milvus vector database
"""
MILVUS_URI: str | None = Field(
MILVUS_URI: Optional[str] = Field(
description="URI for connecting to the Milvus server (e.g., 'http://localhost:19530' or 'https://milvus-instance.example.com:19530')",
default="http://127.0.0.1:19530",
)
MILVUS_TOKEN: str | None = Field(
MILVUS_TOKEN: Optional[str] = Field(
description="Authentication token for Milvus, if token-based authentication is enabled",
default=None,
)
MILVUS_USER: str | None = Field(
MILVUS_USER: Optional[str] = Field(
description="Username for authenticating with Milvus, if username/password authentication is enabled",
default=None,
)
MILVUS_PASSWORD: str | None = Field(
MILVUS_PASSWORD: Optional[str] = Field(
description="Password for authenticating with Milvus, if username/password authentication is enabled",
default=None,
)
@@ -38,7 +40,7 @@ class MilvusConfig(BaseSettings):
default=True,
)
MILVUS_ANALYZER_PARAMS: str | None = Field(
MILVUS_ANALYZER_PARAMS: Optional[str] = Field(
description='Milvus text analyzer parameters, e.g., {"type": "chinese"} for Chinese segmentation support.',
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@@ -7,27 +9,27 @@ class OceanBaseVectorConfig(BaseSettings):
Configuration settings for OceanBase Vector database
"""
OCEANBASE_VECTOR_HOST: str | None = Field(
OCEANBASE_VECTOR_HOST: Optional[str] = Field(
description="Hostname or IP address of the OceanBase Vector server (e.g. 'localhost')",
default=None,
)
OCEANBASE_VECTOR_PORT: PositiveInt | None = Field(
OCEANBASE_VECTOR_PORT: Optional[PositiveInt] = Field(
description="Port number on which the OceanBase Vector server is listening (default is 2881)",
default=2881,
)
OCEANBASE_VECTOR_USER: str | None = Field(
OCEANBASE_VECTOR_USER: Optional[str] = Field(
description="Username for authenticating with the OceanBase Vector database",
default=None,
)
OCEANBASE_VECTOR_PASSWORD: str | None = Field(
OCEANBASE_VECTOR_PASSWORD: Optional[str] = Field(
description="Password for authenticating with the OceanBase Vector database",
default=None,
)
OCEANBASE_VECTOR_DATABASE: str | None = Field(
OCEANBASE_VECTOR_DATABASE: Optional[str] = Field(
description="Name of the OceanBase Vector database to connect to",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@@ -7,7 +9,7 @@ class OpenGaussConfig(BaseSettings):
Configuration settings for OpenGauss
"""
OPENGAUSS_HOST: str | None = Field(
OPENGAUSS_HOST: Optional[str] = Field(
description="Hostname or IP address of the OpenGauss server(e.g., 'localhost')",
default=None,
)
@@ -17,17 +19,17 @@ class OpenGaussConfig(BaseSettings):
default=6600,
)
OPENGAUSS_USER: str | None = Field(
OPENGAUSS_USER: Optional[str] = Field(
description="Username for authenticating with the OpenGauss database",
default=None,
)
OPENGAUSS_PASSWORD: str | None = Field(
OPENGAUSS_PASSWORD: Optional[str] = Field(
description="Password for authenticating with the OpenGauss database",
default=None,
)
OPENGAUSS_DATABASE: str | None = Field(
OPENGAUSS_DATABASE: Optional[str] = Field(
description="Name of the OpenGauss database to connect to",
default=None,
)

View File

@@ -1,5 +1,5 @@
from enum import Enum
from typing import Literal
import enum
from typing import Literal, Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@@ -10,7 +10,7 @@ class OpenSearchConfig(BaseSettings):
Configuration settings for OpenSearch
"""
class AuthMethod(Enum):
class AuthMethod(enum.StrEnum):
"""
Authentication method for OpenSearch
"""
@@ -18,7 +18,7 @@ class OpenSearchConfig(BaseSettings):
BASIC = "basic"
AWS_MANAGED_IAM = "aws_managed_iam"
OPENSEARCH_HOST: str | None = Field(
OPENSEARCH_HOST: Optional[str] = Field(
description="Hostname or IP address of the OpenSearch server (e.g., 'localhost' or 'opensearch.example.com')",
default=None,
)
@@ -43,21 +43,21 @@ class OpenSearchConfig(BaseSettings):
default=AuthMethod.BASIC,
)
OPENSEARCH_USER: str | None = Field(
OPENSEARCH_USER: Optional[str] = Field(
description="Username for authenticating with OpenSearch",
default=None,
)
OPENSEARCH_PASSWORD: str | None = Field(
OPENSEARCH_PASSWORD: Optional[str] = Field(
description="Password for authenticating with OpenSearch",
default=None,
)
OPENSEARCH_AWS_REGION: str | None = Field(
OPENSEARCH_AWS_REGION: Optional[str] = Field(
description="AWS region for OpenSearch (e.g. 'us-west-2')",
default=None,
)
OPENSEARCH_AWS_SERVICE: Literal["es", "aoss"] | None = Field(
OPENSEARCH_AWS_SERVICE: Optional[Literal["es", "aoss"]] = Field(
description="AWS service for OpenSearch (e.g. 'aoss' for OpenSearch Serverless)", default=None
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,33 +9,33 @@ class OracleConfig(BaseSettings):
Configuration settings for Oracle database
"""
ORACLE_USER: str | None = Field(
ORACLE_USER: Optional[str] = Field(
description="Username for authenticating with the Oracle database",
default=None,
)
ORACLE_PASSWORD: str | None = Field(
ORACLE_PASSWORD: Optional[str] = Field(
description="Password for authenticating with the Oracle database",
default=None,
)
ORACLE_DSN: str | None = Field(
ORACLE_DSN: Optional[str] = Field(
description="Oracle database connection string. For traditional database, use format 'host:port/service_name'. "
"For autonomous database, use the service name from tnsnames.ora in the wallet",
default=None,
)
ORACLE_CONFIG_DIR: str | None = Field(
ORACLE_CONFIG_DIR: Optional[str] = Field(
description="Directory containing the tnsnames.ora configuration file. Only used in thin mode connection",
default=None,
)
ORACLE_WALLET_LOCATION: str | None = Field(
ORACLE_WALLET_LOCATION: Optional[str] = Field(
description="Oracle wallet directory path containing the wallet files for secure connection",
default=None,
)
ORACLE_WALLET_PASSWORD: str | None = Field(
ORACLE_WALLET_PASSWORD: Optional[str] = Field(
description="Password to decrypt the Oracle wallet, if it is encrypted",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@@ -7,7 +9,7 @@ class PGVectorConfig(BaseSettings):
Configuration settings for PGVector (PostgreSQL with vector extension)
"""
PGVECTOR_HOST: str | None = Field(
PGVECTOR_HOST: Optional[str] = Field(
description="Hostname or IP address of the PostgreSQL server with PGVector extension (e.g., 'localhost')",
default=None,
)
@@ -17,17 +19,17 @@ class PGVectorConfig(BaseSettings):
default=5433,
)
PGVECTOR_USER: str | None = Field(
PGVECTOR_USER: Optional[str] = Field(
description="Username for authenticating with the PostgreSQL database",
default=None,
)
PGVECTOR_PASSWORD: str | None = Field(
PGVECTOR_PASSWORD: Optional[str] = Field(
description="Password for authenticating with the PostgreSQL database",
default=None,
)
PGVECTOR_DATABASE: str | None = Field(
PGVECTOR_DATABASE: Optional[str] = Field(
description="Name of the PostgreSQL database to connect to",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@@ -7,7 +9,7 @@ class PGVectoRSConfig(BaseSettings):
Configuration settings for PGVecto.RS (Rust-based vector extension for PostgreSQL)
"""
PGVECTO_RS_HOST: str | None = Field(
PGVECTO_RS_HOST: Optional[str] = Field(
description="Hostname or IP address of the PostgreSQL server with PGVecto.RS extension (e.g., 'localhost')",
default=None,
)
@@ -17,17 +19,17 @@ class PGVectoRSConfig(BaseSettings):
default=5431,
)
PGVECTO_RS_USER: str | None = Field(
PGVECTO_RS_USER: Optional[str] = Field(
description="Username for authenticating with the PostgreSQL database using PGVecto.RS",
default=None,
)
PGVECTO_RS_PASSWORD: str | None = Field(
PGVECTO_RS_PASSWORD: Optional[str] = Field(
description="Password for authenticating with the PostgreSQL database using PGVecto.RS",
default=None,
)
PGVECTO_RS_DATABASE: str | None = Field(
PGVECTO_RS_DATABASE: Optional[str] = Field(
description="Name of the PostgreSQL database with PGVecto.RS extension to connect to",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, NonNegativeInt, PositiveInt
from pydantic_settings import BaseSettings
@@ -7,12 +9,12 @@ class QdrantConfig(BaseSettings):
Configuration settings for Qdrant vector database
"""
QDRANT_URL: str | None = Field(
QDRANT_URL: Optional[str] = Field(
description="URL of the Qdrant server (e.g., 'http://localhost:6333' or 'https://qdrant.example.com')",
default=None,
)
QDRANT_API_KEY: str | None = Field(
QDRANT_API_KEY: Optional[str] = Field(
description="API key for authenticating with the Qdrant server",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@@ -7,7 +9,7 @@ class RelytConfig(BaseSettings):
Configuration settings for Relyt database
"""
RELYT_HOST: str | None = Field(
RELYT_HOST: Optional[str] = Field(
description="Hostname or IP address of the Relyt server (e.g., 'localhost' or 'relyt.example.com')",
default=None,
)
@@ -17,17 +19,17 @@ class RelytConfig(BaseSettings):
default=9200,
)
RELYT_USER: str | None = Field(
RELYT_USER: Optional[str] = Field(
description="Username for authenticating with the Relyt database",
default=None,
)
RELYT_PASSWORD: str | None = Field(
RELYT_PASSWORD: Optional[str] = Field(
description="Password for authenticating with the Relyt database",
default=None,
)
RELYT_DATABASE: str | None = Field(
RELYT_DATABASE: Optional[str] = Field(
description="Name of the Relyt database to connect to (default is 'default')",
default="default",
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,22 +9,22 @@ class TableStoreConfig(BaseSettings):
Configuration settings for TableStore.
"""
TABLESTORE_ENDPOINT: str | None = Field(
TABLESTORE_ENDPOINT: Optional[str] = Field(
description="Endpoint address of the TableStore server (e.g. 'https://instance-name.cn-hangzhou.ots.aliyuncs.com')",
default=None,
)
TABLESTORE_INSTANCE_NAME: str | None = Field(
TABLESTORE_INSTANCE_NAME: Optional[str] = Field(
description="Instance name to access TableStore server (eg. 'instance-name')",
default=None,
)
TABLESTORE_ACCESS_KEY_ID: str | None = Field(
TABLESTORE_ACCESS_KEY_ID: Optional[str] = Field(
description="AccessKey id for the instance name",
default=None,
)
TABLESTORE_ACCESS_KEY_SECRET: str | None = Field(
TABLESTORE_ACCESS_KEY_SECRET: Optional[str] = Field(
description="AccessKey secret for the instance name",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, NonNegativeInt, PositiveInt
from pydantic_settings import BaseSettings
@@ -7,12 +9,12 @@ class TencentVectorDBConfig(BaseSettings):
Configuration settings for Tencent Vector Database
"""
TENCENT_VECTOR_DB_URL: str | None = Field(
TENCENT_VECTOR_DB_URL: Optional[str] = Field(
description="URL of the Tencent Vector Database service (e.g., 'https://vectordb.tencentcloudapi.com')",
default=None,
)
TENCENT_VECTOR_DB_API_KEY: str | None = Field(
TENCENT_VECTOR_DB_API_KEY: Optional[str] = Field(
description="API key for authenticating with the Tencent Vector Database service",
default=None,
)
@@ -22,12 +24,12 @@ class TencentVectorDBConfig(BaseSettings):
default=30,
)
TENCENT_VECTOR_DB_USERNAME: str | None = Field(
TENCENT_VECTOR_DB_USERNAME: Optional[str] = Field(
description="Username for authenticating with the Tencent Vector Database (if required)",
default=None,
)
TENCENT_VECTOR_DB_PASSWORD: str | None = Field(
TENCENT_VECTOR_DB_PASSWORD: Optional[str] = Field(
description="Password for authenticating with the Tencent Vector Database (if required)",
default=None,
)
@@ -42,7 +44,7 @@ class TencentVectorDBConfig(BaseSettings):
default=2,
)
TENCENT_VECTOR_DB_DATABASE: str | None = Field(
TENCENT_VECTOR_DB_DATABASE: Optional[str] = Field(
description="Name of the specific Tencent Vector Database to connect to",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, NonNegativeInt, PositiveInt
from pydantic_settings import BaseSettings
@@ -7,12 +9,12 @@ class TidbOnQdrantConfig(BaseSettings):
Tidb on Qdrant configs
"""
TIDB_ON_QDRANT_URL: str | None = Field(
TIDB_ON_QDRANT_URL: Optional[str] = Field(
description="Tidb on Qdrant url",
default=None,
)
TIDB_ON_QDRANT_API_KEY: str | None = Field(
TIDB_ON_QDRANT_API_KEY: Optional[str] = Field(
description="Tidb on Qdrant api key",
default=None,
)
@@ -32,37 +34,37 @@ class TidbOnQdrantConfig(BaseSettings):
default=6334,
)
TIDB_PUBLIC_KEY: str | None = Field(
TIDB_PUBLIC_KEY: Optional[str] = Field(
description="Tidb account public key",
default=None,
)
TIDB_PRIVATE_KEY: str | None = Field(
TIDB_PRIVATE_KEY: Optional[str] = Field(
description="Tidb account private key",
default=None,
)
TIDB_API_URL: str | None = Field(
TIDB_API_URL: Optional[str] = Field(
description="Tidb API url",
default=None,
)
TIDB_IAM_API_URL: str | None = Field(
TIDB_IAM_API_URL: Optional[str] = Field(
description="Tidb IAM API url",
default=None,
)
TIDB_REGION: str | None = Field(
TIDB_REGION: Optional[str] = Field(
description="Tidb serverless region",
default="regions/aws-us-east-1",
)
TIDB_PROJECT_ID: str | None = Field(
TIDB_PROJECT_ID: Optional[str] = Field(
description="Tidb project id",
default=None,
)
TIDB_SPEND_LIMIT: int | None = Field(
TIDB_SPEND_LIMIT: Optional[int] = Field(
description="Tidb spend limit",
default=100,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@@ -7,27 +9,27 @@ class TiDBVectorConfig(BaseSettings):
Configuration settings for TiDB Vector database
"""
TIDB_VECTOR_HOST: str | None = Field(
TIDB_VECTOR_HOST: Optional[str] = Field(
description="Hostname or IP address of the TiDB Vector server (e.g., 'localhost' or 'tidb.example.com')",
default=None,
)
TIDB_VECTOR_PORT: PositiveInt | None = Field(
TIDB_VECTOR_PORT: Optional[PositiveInt] = Field(
description="Port number on which the TiDB Vector server is listening (default is 4000)",
default=4000,
)
TIDB_VECTOR_USER: str | None = Field(
TIDB_VECTOR_USER: Optional[str] = Field(
description="Username for authenticating with the TiDB Vector database",
default=None,
)
TIDB_VECTOR_PASSWORD: str | None = Field(
TIDB_VECTOR_PASSWORD: Optional[str] = Field(
description="Password for authenticating with the TiDB Vector database",
default=None,
)
TIDB_VECTOR_DATABASE: str | None = Field(
TIDB_VECTOR_DATABASE: Optional[str] = Field(
description="Name of the TiDB Vector database to connect to",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,12 +9,12 @@ class UpstashConfig(BaseSettings):
Configuration settings for Upstash vector database
"""
UPSTASH_VECTOR_URL: str | None = Field(
UPSTASH_VECTOR_URL: Optional[str] = Field(
description="URL of the upstash server (e.g., 'https://vector.upstash.io')",
default=None,
)
UPSTASH_VECTOR_TOKEN: str | None = Field(
UPSTASH_VECTOR_TOKEN: Optional[str] = Field(
description="Token for authenticating with the upstash server",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@@ -7,7 +9,7 @@ class VastbaseVectorConfig(BaseSettings):
Configuration settings for Vector (Vastbase with vector extension)
"""
VASTBASE_HOST: str | None = Field(
VASTBASE_HOST: Optional[str] = Field(
description="Hostname or IP address of the Vastbase server with Vector extension (e.g., 'localhost')",
default=None,
)
@@ -17,17 +19,17 @@ class VastbaseVectorConfig(BaseSettings):
default=5432,
)
VASTBASE_USER: str | None = Field(
VASTBASE_USER: Optional[str] = Field(
description="Username for authenticating with the Vastbase database",
default=None,
)
VASTBASE_PASSWORD: str | None = Field(
VASTBASE_PASSWORD: Optional[str] = Field(
description="Password for authenticating with the Vastbase database",
default=None,
)
VASTBASE_DATABASE: str | None = Field(
VASTBASE_DATABASE: Optional[str] = Field(
description="Name of the Vastbase database to connect to",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -9,14 +11,14 @@ class VikingDBConfig(BaseSettings):
https://www.volcengine.com/docs/6291/65568
"""
VIKINGDB_ACCESS_KEY: str | None = Field(
VIKINGDB_ACCESS_KEY: Optional[str] = Field(
description="The Access Key provided by Volcengine VikingDB for API authentication."
"Refer to the following documentation for details on obtaining credentials:"
"https://www.volcengine.com/docs/6291/65568",
default=None,
)
VIKINGDB_SECRET_KEY: str | None = Field(
VIKINGDB_SECRET_KEY: Optional[str] = Field(
description="The Secret Key provided by Volcengine VikingDB for API authentication.",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@@ -7,12 +9,12 @@ class WeaviateConfig(BaseSettings):
Configuration settings for Weaviate vector database
"""
WEAVIATE_ENDPOINT: str | None = Field(
WEAVIATE_ENDPOINT: Optional[str] = Field(
description="URL of the Weaviate server (e.g., 'http://localhost:8080' or 'https://weaviate.example.com')",
default=None,
)
WEAVIATE_API_KEY: str | None = Field(
WEAVIATE_API_KEY: Optional[str] = Field(
description="API key for authenticating with the Weaviate server",
default=None,
)

View File

@@ -1,6 +1,6 @@
from pydantic import Field
from configs.packaging.pyproject import PyProjectTomlConfig
from configs.packaging.pyproject import PyProjectConfig, PyProjectTomlConfig
class PackagingInfo(PyProjectTomlConfig):

View File

@@ -1,5 +1,5 @@
from collections.abc import Mapping
from typing import Any
from typing import Any, Optional
from pydantic import Field
from pydantic.fields import FieldInfo
@@ -15,22 +15,22 @@ class ApolloSettingsSourceInfo(BaseSettings):
Packaging build information
"""
APOLLO_APP_ID: str | None = Field(
APOLLO_APP_ID: Optional[str] = Field(
description="apollo app_id",
default=None,
)
APOLLO_CLUSTER: str | None = Field(
APOLLO_CLUSTER: Optional[str] = Field(
description="apollo cluster",
default=None,
)
APOLLO_CONFIG_URL: str | None = Field(
APOLLO_CONFIG_URL: Optional[str] = Field(
description="apollo config url",
default=None,
)
APOLLO_NAMESPACE: str | None = Field(
APOLLO_NAMESPACE: Optional[str] = Field(
description="apollo namespace",
default=None,
)

View File

@@ -4,9 +4,8 @@ import logging
import os
import threading
import time
from collections.abc import Callable, Mapping
from collections.abc import Mapping
from pathlib import Path
from typing import Any
from .python_3x import http_request, makedirs_wrapper
from .utils import (
@@ -26,13 +25,13 @@ logger = logging.getLogger(__name__)
class ApolloClient:
def __init__(
self,
config_url: str,
app_id: str,
cluster: str = "default",
secret: str = "",
start_hot_update: bool = True,
change_listener: Callable[[str, str, str, Any], None] | None = None,
_notification_map: dict[str, int] | None = None,
config_url,
app_id,
cluster="default",
secret="",
start_hot_update=True,
change_listener=None,
_notification_map=None,
):
# Core routing parameters
self.config_url = config_url
@@ -48,17 +47,17 @@ class ApolloClient:
# Private control variables
self._cycle_time = 5
self._stopping = False
self._cache: dict[str, dict[str, Any]] = {}
self._no_key: dict[str, str] = {}
self._hash: dict[str, str] = {}
self._cache = {}
self._no_key = {}
self._hash = {}
self._pull_timeout = 75
self._cache_file_path = os.path.expanduser("~") + "/.dify/config/remote-settings/apollo/cache/"
self._long_poll_thread: threading.Thread | None = None
self._long_poll_thread = None
self._change_listener = change_listener # "add" "delete" "update"
if _notification_map is None:
_notification_map = {"application": -1}
self._notification_map = _notification_map
self.last_release_key: str | None = None
self.last_release_key = None
# Private startup method
self._path_checker()
if start_hot_update:
@@ -69,7 +68,7 @@ class ApolloClient:
heartbeat.daemon = True
heartbeat.start()
def get_json_from_net(self, namespace: str = "application") -> dict[str, Any] | None:
def get_json_from_net(self, namespace="application"):
url = "{}/configs/{}/{}/{}?releaseKey={}&ip={}".format(
self.config_url, self.app_id, self.cluster, namespace, "", self.ip
)
@@ -89,7 +88,7 @@ class ApolloClient:
logger.exception("an error occurred in get_json_from_net")
return None
def get_value(self, key: str, default_val: Any = None, namespace: str = "application") -> Any:
def get_value(self, key, default_val=None, namespace="application"):
try:
# read memory configuration
namespace_cache = self._cache.get(namespace)
@@ -105,8 +104,7 @@ class ApolloClient:
namespace_data = self.get_json_from_net(namespace)
val = get_value_from_dict(namespace_data, key)
if val is not None:
if namespace_data is not None:
self._update_cache_and_file(namespace_data, namespace)
self._update_cache_and_file(namespace_data, namespace)
return val
# read the file configuration
@@ -128,23 +126,23 @@ class ApolloClient:
# to ensure the real-time correctness of the function call.
# If the user does not have the same default val twice
# and the default val is used here, there may be a problem.
def _set_local_cache_none(self, namespace: str, key: str) -> None:
def _set_local_cache_none(self, namespace, key):
no_key = no_key_cache_key(namespace, key)
self._no_key[no_key] = key
def _start_hot_update(self) -> None:
def _start_hot_update(self):
self._long_poll_thread = threading.Thread(target=self._listener)
# When the asynchronous thread is started, the daemon thread will automatically exit
# when the main thread is launched.
self._long_poll_thread.daemon = True
self._long_poll_thread.start()
def stop(self) -> None:
def stop(self):
self._stopping = True
logger.info("Stopping listener...")
# Call the set callback function, and if it is abnormal, try it out
def _call_listener(self, namespace: str, old_kv: dict[str, Any] | None, new_kv: dict[str, Any] | None) -> None:
def _call_listener(self, namespace, old_kv, new_kv):
if self._change_listener is None:
return
if old_kv is None:
@@ -170,12 +168,12 @@ class ApolloClient:
except BaseException as e:
logger.warning(str(e))
def _path_checker(self) -> None:
def _path_checker(self):
if not os.path.isdir(self._cache_file_path):
makedirs_wrapper(self._cache_file_path)
# update the local cache and file cache
def _update_cache_and_file(self, namespace_data: dict[str, Any], namespace: str = "application") -> None:
def _update_cache_and_file(self, namespace_data, namespace="application"):
# update the local cache
self._cache[namespace] = namespace_data
# update the file cache
@@ -189,7 +187,7 @@ class ApolloClient:
self._hash[namespace] = new_hash
# get the configuration from the local file
def _get_local_cache(self, namespace: str = "application") -> dict[str, Any]:
def _get_local_cache(self, namespace="application"):
cache_file_path = os.path.join(self._cache_file_path, f"{self.app_id}_configuration_{namespace}.txt")
if os.path.isfile(cache_file_path):
with open(cache_file_path) as f:
@@ -197,8 +195,8 @@ class ApolloClient:
return result
return {}
def _long_poll(self) -> None:
notifications: list[dict[str, Any]] = []
def _long_poll(self):
notifications = []
for key in self._cache:
namespace_data = self._cache[key]
notification_id = -1
@@ -238,7 +236,7 @@ class ApolloClient:
except Exception as e:
logger.warning(str(e))
def _get_net_and_set_local(self, namespace: str, n_id: int, call_change: bool = False) -> None:
def _get_net_and_set_local(self, namespace, n_id, call_change=False):
namespace_data = self.get_json_from_net(namespace)
if not namespace_data:
return
@@ -250,7 +248,7 @@ class ApolloClient:
new_kv = namespace_data.get(CONFIGURATIONS)
self._call_listener(namespace, old_kv, new_kv)
def _listener(self) -> None:
def _listener(self):
logger.info("start long_poll")
while not self._stopping:
self._long_poll()
@@ -268,13 +266,13 @@ class ApolloClient:
headers["Timestamp"] = time_unix_now
return headers
def _heart_beat(self) -> None:
def _heart_beat(self):
while not self._stopping:
for namespace in self._notification_map:
self._do_heart_beat(namespace)
time.sleep(60 * 10) # 10 minutes
def _do_heart_beat(self, namespace: str) -> None:
def _do_heart_beat(self, namespace):
url = f"{self.config_url}/configs/{self.app_id}/{self.cluster}/{namespace}?ip={self.ip}"
try:
code, body = http_request(url, timeout=3, headers=self._sign_headers(url))
@@ -294,7 +292,7 @@ class ApolloClient:
logger.exception("an error occurred in _do_heart_beat")
return None
def get_all_dicts(self, namespace: str) -> dict[str, Any] | None:
def get_all_dicts(self, namespace):
namespace_data = self._cache.get(namespace)
if namespace_data is None:
net_namespace_data = self.get_json_from_net(namespace)

View File

@@ -2,8 +2,6 @@ import logging
import os
import ssl
import urllib.request
from collections.abc import Mapping
from typing import Any
from urllib import parse
from urllib.error import HTTPError
@@ -21,9 +19,9 @@ urllib.request.install_opener(opener)
logger = logging.getLogger(__name__)
def http_request(url: str, timeout: int | float, headers: Mapping[str, str] = {}) -> tuple[int, str | None]:
def http_request(url, timeout, headers={}):
try:
request = urllib.request.Request(url, headers=dict(headers))
request = urllib.request.Request(url, headers=headers)
res = urllib.request.urlopen(request, timeout=timeout)
body = res.read().decode("utf-8")
return res.code, body
@@ -35,9 +33,9 @@ def http_request(url: str, timeout: int | float, headers: Mapping[str, str] = {}
raise e
def url_encode(params: dict[str, Any]) -> str:
def url_encode(params):
return parse.urlencode(params)
def makedirs_wrapper(path: str) -> None:
def makedirs_wrapper(path):
os.makedirs(path, exist_ok=True)

View File

@@ -1,6 +1,5 @@
import hashlib
import socket
from typing import Any
from .python_3x import url_encode
@@ -11,7 +10,7 @@ NAMESPACE_NAME = "namespaceName"
# add timestamps uris and keys
def signature(timestamp: str, uri: str, secret: str) -> str:
def signature(timestamp, uri, secret):
import base64
import hmac
@@ -20,16 +19,16 @@ def signature(timestamp: str, uri: str, secret: str) -> str:
return base64.b64encode(hmac_code).decode()
def url_encode_wrapper(params: dict[str, Any]) -> str:
def url_encode_wrapper(params):
return url_encode(params)
def no_key_cache_key(namespace: str, key: str) -> str:
def no_key_cache_key(namespace, key):
return f"{namespace}{len(namespace)}{key}"
# Returns whether the obtained value is obtained, and None if it does not
def get_value_from_dict(namespace_cache: dict[str, Any] | None, key: str) -> Any | None:
def get_value_from_dict(namespace_cache, key):
if namespace_cache:
kv_data = namespace_cache.get(CONFIGURATIONS)
if kv_data is None:
@@ -39,7 +38,7 @@ def get_value_from_dict(namespace_cache: dict[str, Any] | None, key: str) -> Any
return None
def init_ip() -> str:
def init_ip():
ip = ""
s = None
try:

View File

@@ -11,5 +11,5 @@ class RemoteSettingsSource:
def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]:
raise NotImplementedError
def prepare_field_value(self, field_name: str, field: FieldInfo, value: Any, value_is_complex: bool):
def prepare_field_value(self, field_name: str, field: FieldInfo, value: Any, value_is_complex: bool) -> Any:
return value

View File

@@ -11,16 +11,16 @@ logger = logging.getLogger(__name__)
from configs.remote_settings_sources.base import RemoteSettingsSource
from .utils import parse_config
from .utils import _parse_config
class NacosSettingsSource(RemoteSettingsSource):
def __init__(self, configs: Mapping[str, Any]):
self.configs = configs
self.remote_configs: dict[str, str] = {}
self.remote_configs: dict[str, Any] = {}
self.async_init()
def async_init(self) -> None:
def async_init(self):
data_id = os.getenv("DIFY_ENV_NACOS_DATA_ID", "dify-api-env.properties")
group = os.getenv("DIFY_ENV_NACOS_GROUP", "nacos-dify")
tenant = os.getenv("DIFY_ENV_NACOS_NAMESPACE", "")
@@ -29,19 +29,22 @@ class NacosSettingsSource(RemoteSettingsSource):
try:
content = NacosHttpClient().http_request("/nacos/v1/cs/configs", method="GET", headers={}, params=params)
self.remote_configs = self._parse_config(content)
except Exception:
except Exception as e:
logger.exception("[get-access-token] exception occurred")
raise
def _parse_config(self, content: str) -> dict[str, str]:
def _parse_config(self, content: str) -> dict:
if not content:
return {}
try:
return parse_config(content)
return _parse_config(self, content)
except Exception as e:
raise RuntimeError(f"Failed to parse config: {e}")
def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]:
if not isinstance(self.remote_configs, dict):
raise ValueError(f"remote configs is not dict, but {type(self.remote_configs)}")
field_value = self.remote_configs.get(field_name)
if field_value is None:
return None, field_name, False

View File

@@ -17,26 +17,20 @@ class NacosHttpClient:
self.ak = os.getenv("DIFY_ENV_NACOS_ACCESS_KEY")
self.sk = os.getenv("DIFY_ENV_NACOS_SECRET_KEY")
self.server = os.getenv("DIFY_ENV_NACOS_SERVER_ADDR", "localhost:8848")
self.token: str | None = None
self.token = None
self.token_ttl = 18000
self.token_expire_time: float = 0
def http_request(
self, url: str, method: str = "GET", headers: dict[str, str] | None = None, params: dict[str, str] | None = None
) -> str:
if headers is None:
headers = {}
if params is None:
params = {}
def http_request(self, url, method="GET", headers=None, params=None):
try:
self._inject_auth_info(headers, params)
response = requests.request(method, url="http://" + self.server + url, headers=headers, params=params)
response.raise_for_status()
return response.text
except requests.RequestException as e:
except requests.exceptions.RequestException as e:
return f"Request to Nacos failed: {e}"
def _inject_auth_info(self, headers: dict[str, str], params: dict[str, str], module: str = "config") -> None:
def _inject_auth_info(self, headers, params, module="config"):
headers.update({"User-Agent": "Nacos-Http-Client-In-Dify:v0.0.1"})
if module == "login":
@@ -51,17 +45,16 @@ class NacosHttpClient:
headers["timeStamp"] = ts
if self.username and self.password:
self.get_access_token(force_refresh=False)
if self.token is not None:
params["accessToken"] = self.token
params["accessToken"] = self.token
def __do_sign(self, sign_str: str, sk: str) -> str:
def __do_sign(self, sign_str, sk):
return (
base64.encodebytes(hmac.new(sk.encode(), sign_str.encode(), digestmod=hashlib.sha1).digest())
.decode()
.strip()
)
def get_sign_str(self, group: str, tenant: str, ts: str) -> str:
def get_sign_str(self, group, tenant, ts):
sign_str = ""
if tenant:
sign_str = tenant + "+"
@@ -70,7 +63,7 @@ class NacosHttpClient:
sign_str += ts # Directly concatenate ts without conditional checks, because the nacos auth header forced it.
return sign_str
def get_access_token(self, force_refresh: bool = False) -> str | None:
def get_access_token(self, force_refresh=False):
current_time = time.time()
if self.token and not force_refresh and self.token_expire_time > current_time:
return self.token
@@ -84,7 +77,6 @@ class NacosHttpClient:
self.token = response_data.get("accessToken")
self.token_ttl = response_data.get("tokenTtl", 18000)
self.token_expire_time = current_time + self.token_ttl - 10
return self.token
except Exception:
except Exception as e:
logger.exception("[get-access-token] exception occur")
raise

View File

@@ -1,4 +1,4 @@
def parse_config(content: str) -> dict[str, str]:
def _parse_config(self, content: str) -> dict[str, str]:
config: dict[str, str] = {}
if not content:
return config

View File

@@ -16,14 +16,14 @@ AUDIO_EXTENSIONS = ["mp3", "m4a", "wav", "amr", "mpga"]
AUDIO_EXTENSIONS.extend([ext.upper() for ext in AUDIO_EXTENSIONS])
_doc_extensions: list[str]
if dify_config.ETL_TYPE == "Unstructured":
_doc_extensions = ["txt", "markdown", "md", "mdx", "pdf", "html", "htm", "xlsx", "xls", "vtt", "properties"]
_doc_extensions.extend(("doc", "docx", "csv", "eml", "msg", "pptx", "xml", "epub"))
DOCUMENT_EXTENSIONS = ["txt", "markdown", "md", "mdx", "pdf", "html", "htm", "xlsx", "xls", "vtt", "properties"]
DOCUMENT_EXTENSIONS.extend(("doc", "docx", "csv", "eml", "msg", "pptx", "xml", "epub"))
if dify_config.UNSTRUCTURED_API_URL:
_doc_extensions.append("ppt")
DOCUMENT_EXTENSIONS.append("ppt")
DOCUMENT_EXTENSIONS.extend([ext.upper() for ext in DOCUMENT_EXTENSIONS])
else:
_doc_extensions = [
DOCUMENT_EXTENSIONS = [
"txt",
"markdown",
"md",
@@ -38,4 +38,4 @@ else:
"vtt",
"properties",
]
DOCUMENT_EXTENSIONS = _doc_extensions + [ext.upper() for ext in _doc_extensions]
DOCUMENT_EXTENSIONS.extend([ext.upper() for ext in DOCUMENT_EXTENSIONS])

View File

@@ -19,7 +19,6 @@ language_timezone_mapping = {
"fa-IR": "Asia/Tehran",
"sl-SI": "Europe/Ljubljana",
"th-TH": "Asia/Bangkok",
"id-ID": "Asia/Jakarta",
}
languages = list(language_timezone_mapping.keys())

View File

@@ -7,7 +7,7 @@ default_app_templates: Mapping[AppMode, Mapping] = {
# workflow default mode
AppMode.WORKFLOW: {
"app": {
"mode": AppMode.WORKFLOW,
"mode": AppMode.WORKFLOW.value,
"enable_site": True,
"enable_api": True,
}
@@ -15,7 +15,7 @@ default_app_templates: Mapping[AppMode, Mapping] = {
# completion default mode
AppMode.COMPLETION: {
"app": {
"mode": AppMode.COMPLETION,
"mode": AppMode.COMPLETION.value,
"enable_site": True,
"enable_api": True,
},
@@ -44,7 +44,7 @@ default_app_templates: Mapping[AppMode, Mapping] = {
# chat default mode
AppMode.CHAT: {
"app": {
"mode": AppMode.CHAT,
"mode": AppMode.CHAT.value,
"enable_site": True,
"enable_api": True,
},
@@ -60,7 +60,7 @@ default_app_templates: Mapping[AppMode, Mapping] = {
# advanced-chat default mode
AppMode.ADVANCED_CHAT: {
"app": {
"mode": AppMode.ADVANCED_CHAT,
"mode": AppMode.ADVANCED_CHAT.value,
"enable_site": True,
"enable_api": True,
},
@@ -68,7 +68,7 @@ default_app_templates: Mapping[AppMode, Mapping] = {
# agent-chat default mode
AppMode.AGENT_CHAT: {
"app": {
"mode": AppMode.AGENT_CHAT,
"mode": AppMode.AGENT_CHAT.value,
"enable_site": True,
"enable_api": True,
},

View File

@@ -8,6 +8,7 @@ if TYPE_CHECKING:
from core.model_runtime.entities.model_entities import AIModelEntity
from core.plugin.entities.plugin_daemon import PluginModelProviderEntity
from core.tools.plugin_tool.provider import PluginToolProviderController
from core.workflow.entities.variable_pool import VariablePool
"""

View File

@@ -1,5 +1,4 @@
from flask import Blueprint
from flask_restx import Namespace
from libs.external_api import ExternalApi
@@ -27,16 +26,7 @@ from .files import FileApi, FilePreviewApi, FileSupportTypeApi
from .remote_files import RemoteFileInfoApi, RemoteFileUploadApi
bp = Blueprint("console", __name__, url_prefix="/console/api")
api = ExternalApi(
bp,
version="1.0",
title="Console API",
description="Console management APIs for app configuration, monitoring, and administration",
)
# Create namespace
console_ns = Namespace("console", description="Console management API operations", path="/")
api = ExternalApi(bp)
# File
api.add_resource(FileApi, "/files/upload")
@@ -53,16 +43,7 @@ api.add_resource(AppImportConfirmApi, "/apps/imports/<string:import_id>/confirm"
api.add_resource(AppImportCheckDependenciesApi, "/apps/imports/<string:app_id>/check-dependencies")
# Import other controllers
from . import (
admin,
apikey,
extension,
feature,
init_validate,
ping,
setup,
version,
)
from . import admin, apikey, extension, feature, ping, setup, version
# Import app controllers
from .app import (
@@ -89,16 +70,7 @@ from .app import (
)
# Import auth controllers
from .auth import (
activate,
data_source_bearer_auth,
data_source_oauth,
email_register,
forgot_password,
login,
oauth,
oauth_server,
)
from .auth import activate, data_source_bearer_auth, data_source_oauth, forgot_password, login, oauth
# Import billing controllers
from .billing import billing, compliance
@@ -112,6 +84,7 @@ from .datasets import (
external,
hit_testing,
metadata,
upload_file,
website,
)
@@ -123,23 +96,6 @@ from .explore import (
saved_message,
)
# Import tag controllers
from .tag import tags
# Import workspace controllers
from .workspace import (
account,
agent_providers,
endpoint,
load_balancing_config,
members,
model_providers,
models,
plugin,
tool_providers,
workspace,
)
# Explore Audio
api.add_resource(ChatAudioApi, "/installed-apps/<uuid:installed_app_id>/audio-to-text", endpoint="installed_app_audio")
api.add_resource(ChatTextApi, "/installed-apps/<uuid:installed_app_id>/text-to-audio", endpoint="installed_app_text")
@@ -211,71 +167,19 @@ api.add_resource(
InstalledAppWorkflowTaskStopApi, "/installed-apps/<uuid:installed_app_id>/workflows/tasks/<string:task_id>/stop"
)
api.add_namespace(console_ns)
# Import tag controllers
from .tag import tags
__all__ = [
"account",
"activate",
"admin",
"advanced_prompt_template",
"agent",
"agent_providers",
"annotation",
"api",
"apikey",
"app",
"audio",
"billing",
"bp",
"completion",
"compliance",
"console_ns",
"conversation",
"conversation_variables",
"data_source",
"data_source_bearer_auth",
"data_source_oauth",
"datasets",
"datasets_document",
"datasets_segments",
"email_register",
"endpoint",
"extension",
"external",
"feature",
"forgot_password",
"generator",
"hit_testing",
"init_validate",
"installed_app",
"load_balancing_config",
"login",
"mcp_server",
"members",
"message",
"metadata",
"model_config",
"model_providers",
"models",
"oauth",
"oauth_server",
"ops_trace",
"parameter",
"ping",
"plugin",
"recommended_app",
"saved_message",
"setup",
"site",
"statistic",
"tags",
"tool_providers",
"version",
"website",
"workflow",
"workflow_app_log",
"workflow_draft_variable",
"workflow_run",
"workflow_statistic",
"workspace",
]
# Import workspace controllers
from .workspace import (
account,
agent_providers,
endpoint,
load_balancing_config,
members,
model_providers,
models,
plugin,
tool_providers,
workspace,
)

View File

@@ -1,26 +1,22 @@
from collections.abc import Callable
from functools import wraps
from typing import ParamSpec, TypeVar
from flask import request
from flask_restx import Resource, fields, reqparse
from flask_restx import Resource, reqparse
from sqlalchemy import select
from sqlalchemy.orm import Session
from werkzeug.exceptions import NotFound, Unauthorized
P = ParamSpec("P")
R = TypeVar("R")
from configs import dify_config
from constants.languages import supported_language
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.wraps import only_edition_cloud
from extensions.ext_database import db
from models.model import App, InstalledApp, RecommendedApp
def admin_required(view: Callable[P, R]):
def admin_required(view):
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
def decorated(*args, **kwargs):
if not dify_config.ADMIN_API_KEY:
raise Unauthorized("API key is invalid.")
@@ -45,28 +41,7 @@ def admin_required(view: Callable[P, R]):
return decorated
@console_ns.route("/admin/insert-explore-apps")
class InsertExploreAppListApi(Resource):
@api.doc("insert_explore_app")
@api.doc(description="Insert or update an app in the explore list")
@api.expect(
api.model(
"InsertExploreAppRequest",
{
"app_id": fields.String(required=True, description="Application ID"),
"desc": fields.String(description="App description"),
"copyright": fields.String(description="Copyright information"),
"privacy_policy": fields.String(description="Privacy policy"),
"custom_disclaimer": fields.String(description="Custom disclaimer"),
"language": fields.String(required=True, description="Language code"),
"category": fields.String(required=True, description="App category"),
"position": fields.Integer(required=True, description="Display position"),
},
)
)
@api.response(200, "App updated successfully")
@api.response(201, "App inserted successfully")
@api.response(404, "App not found")
@only_edition_cloud
@admin_required
def post(self):
@@ -136,12 +111,7 @@ class InsertExploreAppListApi(Resource):
return {"result": "success"}, 200
@console_ns.route("/admin/insert-explore-apps/<uuid:app_id>")
class InsertExploreAppApi(Resource):
@api.doc("delete_explore_app")
@api.doc(description="Remove an app from the explore list")
@api.doc(params={"app_id": "Application ID to remove"})
@api.response(204, "App removed successfully")
@only_edition_cloud
@admin_required
def delete(self, app_id):
@@ -160,21 +130,21 @@ class InsertExploreAppApi(Resource):
app.is_public = False
with Session(db.engine) as session:
installed_apps = (
session.execute(
select(InstalledApp).where(
InstalledApp.app_id == recommended_app.app_id,
InstalledApp.tenant_id != InstalledApp.app_owner_tenant_id,
)
installed_apps = session.execute(
select(InstalledApp).where(
InstalledApp.app_id == recommended_app.app_id,
InstalledApp.tenant_id != InstalledApp.app_owner_tenant_id,
)
.scalars()
.all()
)
).all()
for installed_app in installed_apps:
session.delete(installed_app)
for installed_app in installed_apps:
db.session.delete(installed_app)
db.session.delete(recommended_app)
db.session.commit()
return {"result": "success"}, 204
api.add_resource(InsertExploreAppListApi, "/admin/insert-explore-apps")
api.add_resource(InsertExploreAppApi, "/admin/insert-explore-apps/<uuid:app_id>")

View File

@@ -1,7 +1,8 @@
from typing import Any, Optional
import flask_restx
from flask_login import current_user
from flask_restx import Resource, fields, marshal_with
from flask_restx._http import HTTPStatus
from sqlalchemy import select
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden
@@ -12,7 +13,7 @@ from libs.login import login_required
from models.dataset import Dataset
from models.model import ApiToken, App
from . import api, console_ns
from . import api
from .wraps import account_initialization_required, setup_required
api_key_fields = {
@@ -39,7 +40,7 @@ def _get_resource(resource_id, tenant_id, resource_model):
).scalar_one_or_none()
if resource is None:
flask_restx.abort(HTTPStatus.NOT_FOUND, message=f"{resource_model.__name__} not found.")
flask_restx.abort(404, message=f"{resource_model.__name__} not found.")
return resource
@@ -48,7 +49,7 @@ class BaseApiKeyListResource(Resource):
method_decorators = [account_initialization_required, login_required, setup_required]
resource_type: str | None = None
resource_model: type | None = None
resource_model: Optional[Any] = None
resource_id_field: str | None = None
token_prefix: str | None = None
max_keys = 10
@@ -58,11 +59,11 @@ class BaseApiKeyListResource(Resource):
assert self.resource_id_field is not None, "resource_id_field must be set"
resource_id = str(resource_id)
_get_resource(resource_id, current_user.current_tenant_id, self.resource_model)
keys = db.session.scalars(
select(ApiToken).where(
ApiToken.type == self.resource_type, getattr(ApiToken, self.resource_id_field) == resource_id
)
).all()
keys = (
db.session.query(ApiToken)
.where(ApiToken.type == self.resource_type, getattr(ApiToken, self.resource_id_field) == resource_id)
.all()
)
return {"items": keys}
@marshal_with(api_key_fields)
@@ -81,12 +82,12 @@ class BaseApiKeyListResource(Resource):
if current_key_count >= self.max_keys:
flask_restx.abort(
HTTPStatus.BAD_REQUEST,
400,
message=f"Cannot create more than {self.max_keys} API keys for this resource type.",
custom="max_keys_exceeded",
code="max_keys_exceeded",
)
key = ApiToken.generate_api_key(self.token_prefix or "", 24)
key = ApiToken.generate_api_key(self.token_prefix, 24)
api_token = ApiToken()
setattr(api_token, self.resource_id_field, resource_id)
api_token.tenant_id = current_user.current_tenant_id
@@ -101,7 +102,7 @@ class BaseApiKeyResource(Resource):
method_decorators = [account_initialization_required, login_required, setup_required]
resource_type: str | None = None
resource_model: type | None = None
resource_model: Optional[Any] = None
resource_id_field: str | None = None
def delete(self, resource_id, api_key_id):
@@ -125,7 +126,7 @@ class BaseApiKeyResource(Resource):
)
if key is None:
flask_restx.abort(HTTPStatus.NOT_FOUND, message="API key not found")
flask_restx.abort(404, message="API key not found")
db.session.query(ApiToken).where(ApiToken.id == api_key_id).delete()
db.session.commit()
@@ -133,25 +134,7 @@ class BaseApiKeyResource(Resource):
return {"result": "success"}, 204
@console_ns.route("/apps/<uuid:resource_id>/api-keys")
class AppApiKeyListResource(BaseApiKeyListResource):
@api.doc("get_app_api_keys")
@api.doc(description="Get all API keys for an app")
@api.doc(params={"resource_id": "App ID"})
@api.response(200, "Success", api_key_list)
def get(self, resource_id):
"""Get all API keys for an app"""
return super().get(resource_id)
@api.doc("create_app_api_key")
@api.doc(description="Create a new API key for an app")
@api.doc(params={"resource_id": "App ID"})
@api.response(201, "API key created successfully", api_key_fields)
@api.response(400, "Maximum keys exceeded")
def post(self, resource_id):
"""Create a new API key for an app"""
return super().post(resource_id)
def after_request(self, resp):
resp.headers["Access-Control-Allow-Origin"] = "*"
resp.headers["Access-Control-Allow-Credentials"] = "true"
@@ -163,16 +146,7 @@ class AppApiKeyListResource(BaseApiKeyListResource):
token_prefix = "app-"
@console_ns.route("/apps/<uuid:resource_id>/api-keys/<uuid:api_key_id>")
class AppApiKeyResource(BaseApiKeyResource):
@api.doc("delete_app_api_key")
@api.doc(description="Delete an API key for an app")
@api.doc(params={"resource_id": "App ID", "api_key_id": "API key ID"})
@api.response(204, "API key deleted successfully")
def delete(self, resource_id, api_key_id):
"""Delete an API key for an app"""
return super().delete(resource_id, api_key_id)
def after_request(self, resp):
resp.headers["Access-Control-Allow-Origin"] = "*"
resp.headers["Access-Control-Allow-Credentials"] = "true"
@@ -183,25 +157,7 @@ class AppApiKeyResource(BaseApiKeyResource):
resource_id_field = "app_id"
@console_ns.route("/datasets/<uuid:resource_id>/api-keys")
class DatasetApiKeyListResource(BaseApiKeyListResource):
@api.doc("get_dataset_api_keys")
@api.doc(description="Get all API keys for a dataset")
@api.doc(params={"resource_id": "Dataset ID"})
@api.response(200, "Success", api_key_list)
def get(self, resource_id):
"""Get all API keys for a dataset"""
return super().get(resource_id)
@api.doc("create_dataset_api_key")
@api.doc(description="Create a new API key for a dataset")
@api.doc(params={"resource_id": "Dataset ID"})
@api.response(201, "API key created successfully", api_key_fields)
@api.response(400, "Maximum keys exceeded")
def post(self, resource_id):
"""Create a new API key for a dataset"""
return super().post(resource_id)
def after_request(self, resp):
resp.headers["Access-Control-Allow-Origin"] = "*"
resp.headers["Access-Control-Allow-Credentials"] = "true"
@@ -213,16 +169,7 @@ class DatasetApiKeyListResource(BaseApiKeyListResource):
token_prefix = "ds-"
@console_ns.route("/datasets/<uuid:resource_id>/api-keys/<uuid:api_key_id>")
class DatasetApiKeyResource(BaseApiKeyResource):
@api.doc("delete_dataset_api_key")
@api.doc(description="Delete an API key for a dataset")
@api.doc(params={"resource_id": "Dataset ID", "api_key_id": "API key ID"})
@api.response(204, "API key deleted successfully")
def delete(self, resource_id, api_key_id):
"""Delete an API key for a dataset"""
return super().delete(resource_id, api_key_id)
def after_request(self, resp):
resp.headers["Access-Control-Allow-Origin"] = "*"
resp.headers["Access-Control-Allow-Credentials"] = "true"
@@ -231,3 +178,9 @@ class DatasetApiKeyResource(BaseApiKeyResource):
resource_type = "dataset"
resource_model = Dataset
resource_id_field = "dataset_id"
api.add_resource(AppApiKeyListResource, "/apps/<uuid:resource_id>/api-keys")
api.add_resource(AppApiKeyResource, "/apps/<uuid:resource_id>/api-keys/<uuid:api_key_id>")
api.add_resource(DatasetApiKeyListResource, "/datasets/<uuid:resource_id>/api-keys")
api.add_resource(DatasetApiKeyResource, "/datasets/<uuid:resource_id>/api-keys/<uuid:api_key_id>")

View File

@@ -1,26 +1,12 @@
from flask_restx import Resource, fields, reqparse
from flask_restx import Resource, reqparse
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.wraps import account_initialization_required, setup_required
from libs.login import login_required
from services.advanced_prompt_template_service import AdvancedPromptTemplateService
@console_ns.route("/app/prompt-templates")
class AdvancedPromptTemplateList(Resource):
@api.doc("get_advanced_prompt_templates")
@api.doc(description="Get advanced prompt templates based on app mode and model configuration")
@api.expect(
api.parser()
.add_argument("app_mode", type=str, required=True, location="args", help="Application mode")
.add_argument("model_mode", type=str, required=True, location="args", help="Model mode")
.add_argument("has_context", type=str, default="true", location="args", help="Whether has context")
.add_argument("model_name", type=str, required=True, location="args", help="Model name")
)
@api.response(
200, "Prompt templates retrieved successfully", fields.List(fields.Raw(description="Prompt template data"))
)
@api.response(400, "Invalid request parameters")
@setup_required
@login_required
@account_initialization_required
@@ -33,3 +19,6 @@ class AdvancedPromptTemplateList(Resource):
args = parser.parse_args()
return AdvancedPromptTemplateService.get_prompt(args)
api.add_resource(AdvancedPromptTemplateList, "/app/prompt-templates")

View File

@@ -1,6 +1,6 @@
from flask_restx import Resource, fields, reqparse
from flask_restx import Resource, reqparse
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from libs.helper import uuid_value
@@ -9,18 +9,7 @@ from models.model import AppMode
from services.agent_service import AgentService
@console_ns.route("/apps/<uuid:app_id>/agent/logs")
class AgentLogApi(Resource):
@api.doc("get_agent_logs")
@api.doc(description="Get agent execution logs for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("message_id", type=str, required=True, location="args", help="Message UUID")
.add_argument("conversation_id", type=str, required=True, location="args", help="Conversation UUID")
)
@api.response(200, "Agent logs retrieved successfully", fields.List(fields.Raw(description="Agent log entries")))
@api.response(400, "Invalid request parameters")
@setup_required
@login_required
@account_initialization_required
@@ -34,3 +23,6 @@ class AgentLogApi(Resource):
args = parser.parse_args()
return AgentService.get_agent_logs(app_model, args["conversation_id"], args["message_id"])
api.add_resource(AgentLogApi, "/apps/<uuid:app_id>/agent/logs")

View File

@@ -2,11 +2,11 @@ from typing import Literal
from flask import request
from flask_login import current_user
from flask_restx import Resource, fields, marshal, marshal_with, reqparse
from flask_restx import Resource, marshal, marshal_with, reqparse
from werkzeug.exceptions import Forbidden
from controllers.common.errors import NoFileUploadedError, TooManyFilesError
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.wraps import (
account_initialization_required,
cloud_edition_billing_resource_check,
@@ -21,23 +21,7 @@ from libs.login import login_required
from services.annotation_service import AppAnnotationService
@console_ns.route("/apps/<uuid:app_id>/annotation-reply/<string:action>")
class AnnotationReplyActionApi(Resource):
@api.doc("annotation_reply_action")
@api.doc(description="Enable or disable annotation reply for an app")
@api.doc(params={"app_id": "Application ID", "action": "Action to perform (enable/disable)"})
@api.expect(
api.model(
"AnnotationReplyActionRequest",
{
"score_threshold": fields.Float(required=True, description="Score threshold for annotation matching"),
"embedding_provider_name": fields.String(required=True, description="Embedding provider name"),
"embedding_model_name": fields.String(required=True, description="Embedding model name"),
},
)
)
@api.response(200, "Action completed successfully")
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@@ -59,13 +43,7 @@ class AnnotationReplyActionApi(Resource):
return result, 200
@console_ns.route("/apps/<uuid:app_id>/annotation-setting")
class AppAnnotationSettingDetailApi(Resource):
@api.doc("get_annotation_setting")
@api.doc(description="Get annotation settings for an app")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Annotation settings retrieved successfully")
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@@ -78,23 +56,7 @@ class AppAnnotationSettingDetailApi(Resource):
return result, 200
@console_ns.route("/apps/<uuid:app_id>/annotation-settings/<uuid:annotation_setting_id>")
class AppAnnotationSettingUpdateApi(Resource):
@api.doc("update_annotation_setting")
@api.doc(description="Update annotation settings for an app")
@api.doc(params={"app_id": "Application ID", "annotation_setting_id": "Annotation setting ID"})
@api.expect(
api.model(
"AnnotationSettingUpdateRequest",
{
"score_threshold": fields.Float(required=True, description="Score threshold"),
"embedding_provider_name": fields.String(required=True, description="Embedding provider"),
"embedding_model_name": fields.String(required=True, description="Embedding model"),
},
)
)
@api.response(200, "Settings updated successfully")
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@@ -113,13 +75,7 @@ class AppAnnotationSettingUpdateApi(Resource):
return result, 200
@console_ns.route("/apps/<uuid:app_id>/annotation-reply/<string:action>/status/<uuid:job_id>")
class AnnotationReplyActionStatusApi(Resource):
@api.doc("get_annotation_reply_action_status")
@api.doc(description="Get status of annotation reply action job")
@api.doc(params={"app_id": "Application ID", "job_id": "Job ID", "action": "Action type"})
@api.response(200, "Job status retrieved successfully")
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@@ -143,19 +99,7 @@ class AnnotationReplyActionStatusApi(Resource):
return {"job_id": job_id, "job_status": job_status, "error_msg": error_msg}, 200
@console_ns.route("/apps/<uuid:app_id>/annotations")
class AnnotationApi(Resource):
@api.doc("list_annotations")
@api.doc(description="Get annotations for an app with pagination")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("page", type=int, location="args", default=1, help="Page number")
.add_argument("limit", type=int, location="args", default=20, help="Page size")
.add_argument("keyword", type=str, location="args", default="", help="Search keyword")
)
@api.response(200, "Annotations retrieved successfully")
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@@ -178,21 +122,6 @@ class AnnotationApi(Resource):
}
return response, 200
@api.doc("create_annotation")
@api.doc(description="Create a new annotation for an app")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"CreateAnnotationRequest",
{
"question": fields.String(required=True, description="Question text"),
"answer": fields.String(required=True, description="Answer text"),
"annotation_reply": fields.Raw(description="Annotation reply data"),
},
)
)
@api.response(201, "Annotation created successfully", annotation_fields)
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@@ -239,13 +168,7 @@ class AnnotationApi(Resource):
return {"result": "success"}, 204
@console_ns.route("/apps/<uuid:app_id>/annotations/export")
class AnnotationExportApi(Resource):
@api.doc("export_annotations")
@api.doc(description="Export all annotations for an app")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Annotations exported successfully", fields.List(fields.Nested(annotation_fields)))
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@@ -259,14 +182,7 @@ class AnnotationExportApi(Resource):
return response, 200
@console_ns.route("/apps/<uuid:app_id>/annotations/<uuid:annotation_id>")
class AnnotationUpdateDeleteApi(Resource):
@api.doc("update_delete_annotation")
@api.doc(description="Update or delete an annotation")
@api.doc(params={"app_id": "Application ID", "annotation_id": "Annotation ID"})
@api.response(200, "Annotation updated successfully", annotation_fields)
@api.response(204, "Annotation deleted successfully")
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@@ -298,14 +214,7 @@ class AnnotationUpdateDeleteApi(Resource):
return {"result": "success"}, 204
@console_ns.route("/apps/<uuid:app_id>/annotations/batch-import")
class AnnotationBatchImportApi(Resource):
@api.doc("batch_import_annotations")
@api.doc(description="Batch import annotations from CSV file")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Batch import started successfully")
@api.response(403, "Insufficient permissions")
@api.response(400, "No file uploaded or too many files")
@setup_required
@login_required
@account_initialization_required
@@ -330,13 +239,7 @@ class AnnotationBatchImportApi(Resource):
return AppAnnotationService.batch_import_app_annotations(app_id, file)
@console_ns.route("/apps/<uuid:app_id>/annotations/batch-import-status/<uuid:job_id>")
class AnnotationBatchImportStatusApi(Resource):
@api.doc("get_batch_import_status")
@api.doc(description="Get status of batch import job")
@api.doc(params={"app_id": "Application ID", "job_id": "Job ID"})
@api.response(200, "Job status retrieved successfully")
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@@ -359,20 +262,7 @@ class AnnotationBatchImportStatusApi(Resource):
return {"job_id": job_id, "job_status": job_status, "error_msg": error_msg}, 200
@console_ns.route("/apps/<uuid:app_id>/annotations/<uuid:annotation_id>/hit-histories")
class AnnotationHitHistoryListApi(Resource):
@api.doc("list_annotation_hit_histories")
@api.doc(description="Get hit histories for an annotation")
@api.doc(params={"app_id": "Application ID", "annotation_id": "Annotation ID"})
@api.expect(
api.parser()
.add_argument("page", type=int, location="args", default=1, help="Page number")
.add_argument("limit", type=int, location="args", default=20, help="Page size")
)
@api.response(
200, "Hit histories retrieved successfully", fields.List(fields.Nested(annotation_hit_history_fields))
)
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@@ -395,3 +285,17 @@ class AnnotationHitHistoryListApi(Resource):
"page": page,
}
return response
api.add_resource(AnnotationReplyActionApi, "/apps/<uuid:app_id>/annotation-reply/<string:action>")
api.add_resource(
AnnotationReplyActionStatusApi, "/apps/<uuid:app_id>/annotation-reply/<string:action>/status/<uuid:job_id>"
)
api.add_resource(AnnotationApi, "/apps/<uuid:app_id>/annotations")
api.add_resource(AnnotationExportApi, "/apps/<uuid:app_id>/annotations/export")
api.add_resource(AnnotationUpdateDeleteApi, "/apps/<uuid:app_id>/annotations/<uuid:annotation_id>")
api.add_resource(AnnotationBatchImportApi, "/apps/<uuid:app_id>/annotations/batch-import")
api.add_resource(AnnotationBatchImportStatusApi, "/apps/<uuid:app_id>/annotations/batch-import-status/<uuid:job_id>")
api.add_resource(AnnotationHitHistoryListApi, "/apps/<uuid:app_id>/annotations/<uuid:annotation_id>/hit-histories")
api.add_resource(AppAnnotationSettingDetailApi, "/apps/<uuid:app_id>/annotation-setting")
api.add_resource(AppAnnotationSettingUpdateApi, "/apps/<uuid:app_id>/annotation-settings/<uuid:annotation_setting_id>")

View File

@@ -2,12 +2,12 @@ import uuid
from typing import cast
from flask_login import current_user
from flask_restx import Resource, fields, inputs, marshal, marshal_with, reqparse
from flask_restx import Resource, inputs, marshal, marshal_with, reqparse
from sqlalchemy import select
from sqlalchemy.orm import Session
from werkzeug.exceptions import BadRequest, Forbidden, abort
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import (
account_initialization_required,
@@ -34,27 +34,7 @@ def _validate_description_length(description):
return description
@console_ns.route("/apps")
class AppListApi(Resource):
@api.doc("list_apps")
@api.doc(description="Get list of applications with pagination and filtering")
@api.expect(
api.parser()
.add_argument("page", type=int, location="args", help="Page number (1-99999)", default=1)
.add_argument("limit", type=int, location="args", help="Page size (1-100)", default=20)
.add_argument(
"mode",
type=str,
location="args",
choices=["completion", "chat", "advanced-chat", "workflow", "agent-chat", "channel", "all"],
default="all",
help="App mode filter",
)
.add_argument("name", type=str, location="args", help="Filter by app name")
.add_argument("tag_ids", type=str, location="args", help="Comma-separated tag IDs")
.add_argument("is_created_by_me", type=bool, location="args", help="Filter by creator")
)
@api.response(200, "Success", app_pagination_fields)
@setup_required
@login_required
@account_initialization_required
@@ -111,24 +91,6 @@ class AppListApi(Resource):
return marshal(app_pagination, app_pagination_fields), 200
@api.doc("create_app")
@api.doc(description="Create a new application")
@api.expect(
api.model(
"CreateAppRequest",
{
"name": fields.String(required=True, description="App name"),
"description": fields.String(description="App description (max 400 chars)"),
"mode": fields.String(required=True, enum=ALLOW_CREATE_APP_MODES, description="App mode"),
"icon_type": fields.String(description="Icon type"),
"icon": fields.String(description="Icon"),
"icon_background": fields.String(description="Icon background color"),
},
)
)
@api.response(201, "App created successfully", app_detail_fields)
@api.response(403, "Insufficient permissions")
@api.response(400, "Invalid request parameters")
@setup_required
@login_required
@account_initialization_required
@@ -153,21 +115,12 @@ class AppListApi(Resource):
raise BadRequest("mode is required")
app_service = AppService()
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
if current_user.current_tenant_id is None:
raise ValueError("current_user.current_tenant_id cannot be None")
app = app_service.create_app(current_user.current_tenant_id, args, current_user)
return app, 201
@console_ns.route("/apps/<uuid:app_id>")
class AppApi(Resource):
@api.doc("get_app_detail")
@api.doc(description="Get application details")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Success", app_detail_fields_with_site)
@setup_required
@login_required
@account_initialization_required
@@ -186,26 +139,6 @@ class AppApi(Resource):
return app_model
@api.doc("update_app")
@api.doc(description="Update application details")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"UpdateAppRequest",
{
"name": fields.String(required=True, description="App name"),
"description": fields.String(description="App description (max 400 chars)"),
"icon_type": fields.String(description="Icon type"),
"icon": fields.String(description="Icon"),
"icon_background": fields.String(description="Icon background color"),
"use_icon_as_answer_icon": fields.Boolean(description="Use icon as answer icon"),
"max_active_requests": fields.Integer(description="Maximum active requests"),
},
)
)
@api.response(200, "App updated successfully", app_detail_fields_with_site)
@api.response(403, "Insufficient permissions")
@api.response(400, "Invalid request parameters")
@setup_required
@login_required
@account_initialization_required
@@ -228,31 +161,14 @@ class AppApi(Resource):
args = parser.parse_args()
app_service = AppService()
# Construct ArgsDict from parsed arguments
from services.app_service import AppService as AppServiceType
args_dict: AppServiceType.ArgsDict = {
"name": args["name"],
"description": args.get("description", ""),
"icon_type": args.get("icon_type", ""),
"icon": args.get("icon", ""),
"icon_background": args.get("icon_background", ""),
"use_icon_as_answer_icon": args.get("use_icon_as_answer_icon", False),
"max_active_requests": args.get("max_active_requests", 0),
}
app_model = app_service.update_app(app_model, args_dict)
app_model = app_service.update_app(app_model, args)
return app_model
@api.doc("delete_app")
@api.doc(description="Delete application")
@api.doc(params={"app_id": "Application ID"})
@api.response(204, "App deleted successfully")
@api.response(403, "Insufficient permissions")
@get_app_model
@setup_required
@login_required
@account_initialization_required
@get_app_model
def delete(self, app_model):
"""Delete app"""
# The role of the current user in the ta table must be admin, owner, or editor
@@ -265,25 +181,7 @@ class AppApi(Resource):
return {"result": "success"}, 204
@console_ns.route("/apps/<uuid:app_id>/copy")
class AppCopyApi(Resource):
@api.doc("copy_app")
@api.doc(description="Create a copy of an existing application")
@api.doc(params={"app_id": "Application ID to copy"})
@api.expect(
api.model(
"CopyAppRequest",
{
"name": fields.String(description="Name for the copied app"),
"description": fields.String(description="Description for the copied app"),
"icon_type": fields.String(description="Icon type"),
"icon": fields.String(description="Icon"),
"icon_background": fields.String(description="Icon background color"),
},
)
)
@api.response(201, "App copied successfully", app_detail_fields_with_site)
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@@ -325,26 +223,11 @@ class AppCopyApi(Resource):
return app, 201
@console_ns.route("/apps/<uuid:app_id>/export")
class AppExportApi(Resource):
@api.doc("export_app")
@api.doc(description="Export application configuration as DSL")
@api.doc(params={"app_id": "Application ID to export"})
@api.expect(
api.parser()
.add_argument("include_secret", type=bool, location="args", default=False, help="Include secrets in export")
.add_argument("workflow_id", type=str, location="args", help="Specific workflow ID to export")
)
@api.response(
200,
"App exported successfully",
api.model("AppExportResponse", {"data": fields.String(description="DSL export data")}),
)
@api.response(403, "Insufficient permissions")
@get_app_model
@setup_required
@login_required
@account_initialization_required
@get_app_model
def get(self, app_model):
"""Export app"""
# The role of the current user in the ta table must be admin, owner, or editor
@@ -354,23 +237,12 @@ class AppExportApi(Resource):
# Add include_secret params
parser = reqparse.RequestParser()
parser.add_argument("include_secret", type=inputs.boolean, default=False, location="args")
parser.add_argument("workflow_id", type=str, location="args")
args = parser.parse_args()
return {
"data": AppDslService.export_dsl(
app_model=app_model, include_secret=args["include_secret"], workflow_id=args.get("workflow_id")
)
}
return {"data": AppDslService.export_dsl(app_model=app_model, include_secret=args["include_secret"])}
@console_ns.route("/apps/<uuid:app_id>/name")
class AppNameApi(Resource):
@api.doc("check_app_name")
@api.doc(description="Check if app name is available")
@api.doc(params={"app_id": "Application ID"})
@api.expect(api.parser().add_argument("name", type=str, required=True, location="args", help="Name to check"))
@api.response(200, "Name availability checked")
@setup_required
@login_required
@account_initialization_required
@@ -386,28 +258,12 @@ class AppNameApi(Resource):
args = parser.parse_args()
app_service = AppService()
app_model = app_service.update_app_name(app_model, args["name"])
app_model = app_service.update_app_name(app_model, args.get("name"))
return app_model
@console_ns.route("/apps/<uuid:app_id>/icon")
class AppIconApi(Resource):
@api.doc("update_app_icon")
@api.doc(description="Update application icon")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"AppIconRequest",
{
"icon": fields.String(required=True, description="Icon data"),
"icon_type": fields.String(description="Icon type"),
"icon_background": fields.String(description="Icon background color"),
},
)
)
@api.response(200, "Icon updated successfully")
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@@ -424,23 +280,12 @@ class AppIconApi(Resource):
args = parser.parse_args()
app_service = AppService()
app_model = app_service.update_app_icon(app_model, args.get("icon") or "", args.get("icon_background") or "")
app_model = app_service.update_app_icon(app_model, args.get("icon"), args.get("icon_background"))
return app_model
@console_ns.route("/apps/<uuid:app_id>/site-enable")
class AppSiteStatus(Resource):
@api.doc("update_app_site_status")
@api.doc(description="Enable or disable app site")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"AppSiteStatusRequest", {"enable_site": fields.Boolean(required=True, description="Enable or disable site")}
)
)
@api.response(200, "Site status updated successfully", app_detail_fields)
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@@ -456,23 +301,12 @@ class AppSiteStatus(Resource):
args = parser.parse_args()
app_service = AppService()
app_model = app_service.update_app_site_status(app_model, args["enable_site"])
app_model = app_service.update_app_site_status(app_model, args.get("enable_site"))
return app_model
@console_ns.route("/apps/<uuid:app_id>/api-enable")
class AppApiStatus(Resource):
@api.doc("update_app_api_status")
@api.doc(description="Enable or disable app API")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"AppApiStatusRequest", {"enable_api": fields.Boolean(required=True, description="Enable or disable API")}
)
)
@api.response(200, "API status updated successfully", app_detail_fields)
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@@ -488,17 +322,12 @@ class AppApiStatus(Resource):
args = parser.parse_args()
app_service = AppService()
app_model = app_service.update_app_api_status(app_model, args["enable_api"])
app_model = app_service.update_app_api_status(app_model, args.get("enable_api"))
return app_model
@console_ns.route("/apps/<uuid:app_id>/trace")
class AppTraceApi(Resource):
@api.doc("get_app_trace")
@api.doc(description="Get app tracing configuration")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Trace configuration retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@@ -508,20 +337,6 @@ class AppTraceApi(Resource):
return app_trace_config
@api.doc("update_app_trace")
@api.doc(description="Update app tracing configuration")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"AppTraceRequest",
{
"enabled": fields.Boolean(required=True, description="Enable or disable tracing"),
"tracing_provider": fields.String(required=True, description="Tracing provider"),
},
)
)
@api.response(200, "Trace configuration updated successfully")
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@@ -541,3 +356,14 @@ class AppTraceApi(Resource):
)
return {"result": "success"}
api.add_resource(AppListApi, "/apps")
api.add_resource(AppApi, "/apps/<uuid:app_id>")
api.add_resource(AppCopyApi, "/apps/<uuid:app_id>/copy")
api.add_resource(AppExportApi, "/apps/<uuid:app_id>/export")
api.add_resource(AppNameApi, "/apps/<uuid:app_id>/name")
api.add_resource(AppIconApi, "/apps/<uuid:app_id>/icon")
api.add_resource(AppSiteStatus, "/apps/<uuid:app_id>/site-enable")
api.add_resource(AppApiStatus, "/apps/<uuid:app_id>/api-enable")
api.add_resource(AppTraceApi, "/apps/<uuid:app_id>/trace")

View File

@@ -1,11 +1,11 @@
import logging
from flask import request
from flask_restx import Resource, fields, reqparse
from flask_restx import Resource, reqparse
from werkzeug.exceptions import InternalServerError
import services
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.error import (
AppUnavailableError,
AudioTooLargeError,
@@ -31,21 +31,8 @@ from services.errors.audio import (
UnsupportedAudioTypeServiceError,
)
logger = logging.getLogger(__name__)
@console_ns.route("/apps/<uuid:app_id>/audio-to-text")
class ChatMessageAudioApi(Resource):
@api.doc("chat_message_audio_transcript")
@api.doc(description="Transcript audio to text for chat messages")
@api.doc(params={"app_id": "App ID"})
@api.response(
200,
"Audio transcription successful",
api.model("AudioTranscriptResponse", {"text": fields.String(description="Transcribed text from audio")}),
)
@api.response(400, "Bad request - No audio uploaded or unsupported type")
@api.response(413, "Audio file too large")
@setup_required
@login_required
@account_initialization_required
@@ -62,7 +49,7 @@ class ChatMessageAudioApi(Resource):
return response
except services.errors.app_model_config.AppModelConfigBrokenError:
logger.exception("App model config broken.")
logging.exception("App model config broken.")
raise AppUnavailableError()
except NoAudioUploadedServiceError:
raise NoAudioUploadedError()
@@ -83,32 +70,15 @@ class ChatMessageAudioApi(Resource):
except ValueError as e:
raise e
except Exception as e:
logger.exception("Failed to handle post request to ChatMessageAudioApi")
logging.exception("Failed to handle post request to ChatMessageAudioApi")
raise InternalServerError()
@console_ns.route("/apps/<uuid:app_id>/text-to-audio")
class ChatMessageTextApi(Resource):
@api.doc("chat_message_text_to_speech")
@api.doc(description="Convert text to speech for chat messages")
@api.doc(params={"app_id": "App ID"})
@api.expect(
api.model(
"TextToSpeechRequest",
{
"message_id": fields.String(description="Message ID"),
"text": fields.String(required=True, description="Text to convert to speech"),
"voice": fields.String(description="Voice to use for TTS"),
"streaming": fields.Boolean(description="Whether to stream the audio"),
},
)
)
@api.response(200, "Text to speech conversion successful")
@api.response(400, "Bad request - Invalid parameters")
@get_app_model
@setup_required
@login_required
@account_initialization_required
@get_app_model
def post(self, app_model: App):
try:
parser = reqparse.RequestParser()
@@ -127,7 +97,7 @@ class ChatMessageTextApi(Resource):
)
return response
except services.errors.app_model_config.AppModelConfigBrokenError:
logger.exception("App model config broken.")
logging.exception("App model config broken.")
raise AppUnavailableError()
except NoAudioUploadedServiceError:
raise NoAudioUploadedError()
@@ -148,22 +118,15 @@ class ChatMessageTextApi(Resource):
except ValueError as e:
raise e
except Exception as e:
logger.exception("Failed to handle post request to ChatMessageTextApi")
logging.exception("Failed to handle post request to ChatMessageTextApi")
raise InternalServerError()
@console_ns.route("/apps/<uuid:app_id>/text-to-audio/voices")
class TextModesApi(Resource):
@api.doc("get_text_to_speech_voices")
@api.doc(description="Get available TTS voices for a specific language")
@api.doc(params={"app_id": "App ID"})
@api.expect(api.parser().add_argument("language", type=str, required=True, location="args", help="Language code"))
@api.response(200, "TTS voices retrieved successfully", fields.List(fields.Raw(description="Available voices")))
@api.response(400, "Invalid language parameter")
@get_app_model
@setup_required
@login_required
@account_initialization_required
@get_app_model
def get(self, app_model):
try:
parser = reqparse.RequestParser()
@@ -197,5 +160,10 @@ class TextModesApi(Resource):
except ValueError as e:
raise e
except Exception as e:
logger.exception("Failed to handle get request to TextModesApi")
logging.exception("Failed to handle get request to TextModesApi")
raise InternalServerError()
api.add_resource(ChatMessageAudioApi, "/apps/<uuid:app_id>/audio-to-text")
api.add_resource(ChatMessageTextApi, "/apps/<uuid:app_id>/text-to-audio")
api.add_resource(TextModesApi, "/apps/<uuid:app_id>/text-to-audio/voices")

View File

@@ -1,11 +1,12 @@
import logging
import flask_login
from flask import request
from flask_restx import Resource, fields, reqparse
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
from flask_restx import Resource, reqparse
from werkzeug.exceptions import InternalServerError, NotFound
import services
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.error import (
AppUnavailableError,
CompletionRequestError,
@@ -28,37 +29,14 @@ from core.helper.trace_id_helper import get_external_trace_id
from core.model_runtime.errors.invoke import InvokeError
from libs import helper
from libs.helper import uuid_value
from libs.login import current_user, login_required
from models import Account
from libs.login import login_required
from models.model import AppMode
from services.app_generate_service import AppGenerateService
from services.errors.llm import InvokeRateLimitError
logger = logging.getLogger(__name__)
# define completion message api for user
@console_ns.route("/apps/<uuid:app_id>/completion-messages")
class CompletionMessageApi(Resource):
@api.doc("create_completion_message")
@api.doc(description="Generate completion message for debugging")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"CompletionMessageRequest",
{
"inputs": fields.Raw(required=True, description="Input variables"),
"query": fields.String(description="Query text", default=""),
"files": fields.List(fields.Raw(), description="Uploaded files"),
"model_config": fields.Raw(required=True, description="Model configuration"),
"response_mode": fields.String(enum=["blocking", "streaming"], description="Response mode"),
"retriever_from": fields.String(default="dev", description="Retriever source"),
},
)
)
@api.response(200, "Completion generated successfully")
@api.response(400, "Invalid request parameters")
@api.response(404, "App not found")
@setup_required
@login_required
@account_initialization_required
@@ -76,11 +54,11 @@ class CompletionMessageApi(Resource):
streaming = args["response_mode"] != "blocking"
args["auto_generate_name"] = False
account = flask_login.current_user
try:
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account or EndUser instance")
response = AppGenerateService.generate(
app_model=app_model, user=current_user, args=args, invoke_from=InvokeFrom.DEBUGGER, streaming=streaming
app_model=app_model, user=account, args=args, invoke_from=InvokeFrom.DEBUGGER, streaming=streaming
)
return helper.compact_generate_response(response)
@@ -89,7 +67,7 @@ class CompletionMessageApi(Resource):
except services.errors.conversation.ConversationCompletedError:
raise ConversationCompletedError()
except services.errors.app_model_config.AppModelConfigBrokenError:
logger.exception("App model config broken.")
logging.exception("App model config broken.")
raise AppUnavailableError()
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)
@@ -102,62 +80,29 @@ class CompletionMessageApi(Resource):
except ValueError as e:
raise e
except Exception as e:
logger.exception("internal server error.")
logging.exception("internal server error.")
raise InternalServerError()
@console_ns.route("/apps/<uuid:app_id>/completion-messages/<string:task_id>/stop")
class CompletionMessageStopApi(Resource):
@api.doc("stop_completion_message")
@api.doc(description="Stop a running completion message generation")
@api.doc(params={"app_id": "Application ID", "task_id": "Task ID to stop"})
@api.response(200, "Task stopped successfully")
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=AppMode.COMPLETION)
def post(self, app_model, task_id):
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
AppQueueManager.set_stop_flag(task_id, InvokeFrom.DEBUGGER, current_user.id)
account = flask_login.current_user
AppQueueManager.set_stop_flag(task_id, InvokeFrom.DEBUGGER, account.id)
return {"result": "success"}, 200
@console_ns.route("/apps/<uuid:app_id>/chat-messages")
class ChatMessageApi(Resource):
@api.doc("create_chat_message")
@api.doc(description="Generate chat message for debugging")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"ChatMessageRequest",
{
"inputs": fields.Raw(required=True, description="Input variables"),
"query": fields.String(required=True, description="User query"),
"files": fields.List(fields.Raw(), description="Uploaded files"),
"model_config": fields.Raw(required=True, description="Model configuration"),
"conversation_id": fields.String(description="Conversation ID"),
"parent_message_id": fields.String(description="Parent message ID"),
"response_mode": fields.String(enum=["blocking", "streaming"], description="Response mode"),
"retriever_from": fields.String(default="dev", description="Retriever source"),
},
)
)
@api.response(200, "Chat message generated successfully")
@api.response(400, "Invalid request parameters")
@api.response(404, "App or conversation not found")
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT])
def post(self, app_model):
if not isinstance(current_user, Account):
raise Forbidden()
if not current_user.has_edit_permission:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("inputs", type=dict, required=True, location="json")
parser.add_argument("query", type=str, required=True, location="json")
@@ -176,11 +121,11 @@ class ChatMessageApi(Resource):
if external_trace_id:
args["external_trace_id"] = external_trace_id
account = flask_login.current_user
try:
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account or EndUser instance")
response = AppGenerateService.generate(
app_model=app_model, user=current_user, args=args, invoke_from=InvokeFrom.DEBUGGER, streaming=streaming
app_model=app_model, user=account, args=args, invoke_from=InvokeFrom.DEBUGGER, streaming=streaming
)
return helper.compact_generate_response(response)
@@ -189,7 +134,7 @@ class ChatMessageApi(Resource):
except services.errors.conversation.ConversationCompletedError:
raise ConversationCompletedError()
except services.errors.app_model_config.AppModelConfigBrokenError:
logger.exception("App model config broken.")
logging.exception("App model config broken.")
raise AppUnavailableError()
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)
@@ -204,23 +149,24 @@ class ChatMessageApi(Resource):
except ValueError as e:
raise e
except Exception as e:
logger.exception("internal server error.")
logging.exception("internal server error.")
raise InternalServerError()
@console_ns.route("/apps/<uuid:app_id>/chat-messages/<string:task_id>/stop")
class ChatMessageStopApi(Resource):
@api.doc("stop_chat_message")
@api.doc(description="Stop a running chat message generation")
@api.doc(params={"app_id": "Application ID", "task_id": "Task ID to stop"})
@api.response(200, "Task stopped successfully")
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])
def post(self, app_model, task_id):
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
AppQueueManager.set_stop_flag(task_id, InvokeFrom.DEBUGGER, current_user.id)
account = flask_login.current_user
AppQueueManager.set_stop_flag(task_id, InvokeFrom.DEBUGGER, account.id)
return {"result": "success"}, 200
api.add_resource(CompletionMessageApi, "/apps/<uuid:app_id>/completion-messages")
api.add_resource(CompletionMessageStopApi, "/apps/<uuid:app_id>/completion-messages/<string:task_id>/stop")
api.add_resource(ChatMessageApi, "/apps/<uuid:app_id>/chat-messages")
api.add_resource(ChatMessageStopApi, "/apps/<uuid:app_id>/chat-messages/<string:task_id>/stop")

View File

@@ -8,7 +8,7 @@ from sqlalchemy import func, or_
from sqlalchemy.orm import joinedload
from werkzeug.exceptions import Forbidden, NotFound
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from core.app.entities.app_invoke_entities import InvokeFrom
@@ -22,35 +22,13 @@ from fields.conversation_fields import (
from libs.datetime_utils import naive_utc_now
from libs.helper import DatetimeString
from libs.login import login_required
from models import Account, Conversation, EndUser, Message, MessageAnnotation
from models import Conversation, EndUser, Message, MessageAnnotation
from models.model import AppMode
from services.conversation_service import ConversationService
from services.errors.conversation import ConversationNotExistsError
@console_ns.route("/apps/<uuid:app_id>/completion-conversations")
class CompletionConversationApi(Resource):
@api.doc("list_completion_conversations")
@api.doc(description="Get completion conversations with pagination and filtering")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("keyword", type=str, location="args", help="Search keyword")
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
.add_argument(
"annotation_status",
type=str,
location="args",
choices=["annotated", "not_annotated", "all"],
default="all",
help="Annotation status filter",
)
.add_argument("page", type=int, location="args", default=1, help="Page number")
.add_argument("limit", type=int, location="args", default=20, help="Page size (1-100)")
)
@api.response(200, "Success", conversation_pagination_fields)
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@@ -123,14 +101,7 @@ class CompletionConversationApi(Resource):
return conversations
@console_ns.route("/apps/<uuid:app_id>/completion-conversations/<uuid:conversation_id>")
class CompletionConversationDetailApi(Resource):
@api.doc("get_completion_conversation")
@api.doc(description="Get completion conversation details with messages")
@api.doc(params={"app_id": "Application ID", "conversation_id": "Conversation ID"})
@api.response(200, "Success", conversation_message_detail_fields)
@api.response(403, "Insufficient permissions")
@api.response(404, "Conversation not found")
@setup_required
@login_required
@account_initialization_required
@@ -143,24 +114,16 @@ class CompletionConversationDetailApi(Resource):
return _get_conversation(app_model, conversation_id)
@api.doc("delete_completion_conversation")
@api.doc(description="Delete a completion conversation")
@api.doc(params={"app_id": "Application ID", "conversation_id": "Conversation ID"})
@api.response(204, "Conversation deleted successfully")
@api.response(403, "Insufficient permissions")
@api.response(404, "Conversation not found")
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=AppMode.COMPLETION)
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])
def delete(self, app_model, conversation_id):
if not current_user.is_editor:
raise Forbidden()
conversation_id = str(conversation_id)
try:
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
ConversationService.delete(app_model, conversation_id, current_user)
except ConversationNotExistsError:
raise NotFound("Conversation Not Exists.")
@@ -168,38 +131,7 @@ class CompletionConversationDetailApi(Resource):
return {"result": "success"}, 204
@console_ns.route("/apps/<uuid:app_id>/chat-conversations")
class ChatConversationApi(Resource):
@api.doc("list_chat_conversations")
@api.doc(description="Get chat conversations with pagination, filtering and summary")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("keyword", type=str, location="args", help="Search keyword")
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
.add_argument(
"annotation_status",
type=str,
location="args",
choices=["annotated", "not_annotated", "all"],
default="all",
help="Annotation status filter",
)
.add_argument("message_count_gte", type=int, location="args", help="Minimum message count")
.add_argument("page", type=int, location="args", default=1, help="Page number")
.add_argument("limit", type=int, location="args", default=20, help="Page size (1-100)")
.add_argument(
"sort_by",
type=str,
location="args",
choices=["created_at", "-created_at", "updated_at", "-updated_at"],
default="-updated_at",
help="Sort field and direction",
)
)
@api.response(200, "Success", conversation_with_summary_pagination_fields)
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@@ -307,7 +239,7 @@ class ChatConversationApi(Resource):
.having(func.count(Message.id) >= args["message_count_gte"])
)
if app_model.mode == AppMode.ADVANCED_CHAT:
if app_model.mode == AppMode.ADVANCED_CHAT.value:
query = query.where(Conversation.invoke_from != InvokeFrom.DEBUGGER.value)
match args["sort_by"]:
@@ -327,14 +259,7 @@ class ChatConversationApi(Resource):
return conversations
@console_ns.route("/apps/<uuid:app_id>/chat-conversations/<uuid:conversation_id>")
class ChatConversationDetailApi(Resource):
@api.doc("get_chat_conversation")
@api.doc(description="Get chat conversation details")
@api.doc(params={"app_id": "Application ID", "conversation_id": "Conversation ID"})
@api.response(200, "Success", conversation_detail_fields)
@api.response(403, "Insufficient permissions")
@api.response(404, "Conversation not found")
@setup_required
@login_required
@account_initialization_required
@@ -347,12 +272,6 @@ class ChatConversationDetailApi(Resource):
return _get_conversation(app_model, conversation_id)
@api.doc("delete_chat_conversation")
@api.doc(description="Delete a chat conversation")
@api.doc(params={"app_id": "Application ID", "conversation_id": "Conversation ID"})
@api.response(204, "Conversation deleted successfully")
@api.response(403, "Insufficient permissions")
@api.response(404, "Conversation not found")
@setup_required
@login_required
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])
@@ -363,8 +282,6 @@ class ChatConversationDetailApi(Resource):
conversation_id = str(conversation_id)
try:
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
ConversationService.delete(app_model, conversation_id, current_user)
except ConversationNotExistsError:
raise NotFound("Conversation Not Exists.")
@@ -372,6 +289,12 @@ class ChatConversationDetailApi(Resource):
return {"result": "success"}, 204
api.add_resource(CompletionConversationApi, "/apps/<uuid:app_id>/completion-conversations")
api.add_resource(CompletionConversationDetailApi, "/apps/<uuid:app_id>/completion-conversations/<uuid:conversation_id>")
api.add_resource(ChatConversationApi, "/apps/<uuid:app_id>/chat-conversations")
api.add_resource(ChatConversationDetailApi, "/apps/<uuid:app_id>/chat-conversations/<uuid:conversation_id>")
def _get_conversation(app_model, conversation_id):
conversation = (
db.session.query(Conversation)

Some files were not shown because too many files have changed in this diff Show More