Compare commits

..

16 Commits

Author SHA1 Message Date
GareArc
7f6d48175a Refactor WorkflowService to handle missing default credentials gracefully
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2025-09-18 23:56:04 -07:00
GareArc
ac9e9b26d0 Enhance LLM model configuration validation to include active status check
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2025-09-15 02:16:59 -07:00
zxhlyh
07ca4071d4 Fix/enterprise model credential (#25703) 2025-09-15 11:51:20 +08:00
zxhlyh
34c05bcd1a fix: favicon 2025-09-15 11:37:27 +08:00
hjlarry
8f7ed7df8c add credential status migration file 2025-09-15 10:35:03 +08:00
autofix-ci[bot]
548884c71b [autofix.ci] apply automated fixes 2025-09-15 02:24:44 +00:00
GareArc
aec72ba2d5 Merge remote-tracking branch 'origin/main' into release/e-1.8.1 2025-09-14 19:23:37 -07:00
zxhlyh
941ad3b4a0 Fix/enterprise model credential (#25611) 2025-09-12 18:35:45 +08:00
zxhlyh
d97e936590 fix: model modal 2025-09-12 18:34:32 +08:00
GareArc
50fec8fc3c fix: add comments for API contract compatibility in PluginCredentialType enum 2025-09-12 02:57:38 -07:00
zxhlyh
2f3d09c1ca fix: credential item style 2025-09-12 17:11:26 +08:00
zxhlyh
134dfe7c9b fix: model modal 2025-09-12 16:50:23 +08:00
zxhlyh
8a43aedc15 Fix/enterprise model credential (#25591) 2025-09-12 15:42:16 +08:00
zxhlyh
f07abfb781 fix: unavailable credential in plugin detail 2025-09-11 17:01:40 +08:00
zxhlyh
ba1a20ee59 fix: help url in model modal
Some checks are pending
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Waiting to run
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Waiting to run
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Waiting to run
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Waiting to run
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Blocked by required conditions
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Blocked by required conditions
2025-09-10 17:47:31 +08:00
zxhlyh
3c463c1e3a fix: credential in tool node & switch credential in load balancing 2025-09-10 15:49:49 +08:00
723 changed files with 15442 additions and 26649 deletions

View File

@@ -1,12 +0,0 @@
version: 2
updates:
- package-ecosystem: "npm"
directory: "/web"
schedule:
interval: "weekly"
open-pull-requests-limit: 2
- package-ecosystem: "uv"
directory: "/api"
schedule:
interval: "weekly"
open-pull-requests-limit: 2

View File

@@ -22,33 +22,13 @@ jobs:
# Fix lint errors
uv run ruff check --fix .
# Format code
uv run ruff format ..
uv run ruff format .
- name: ast-grep
run: |
uvx --from ast-grep-cli sg --pattern 'db.session.query($WHATEVER).filter($HERE)' --rewrite 'db.session.query($WHATEVER).where($HERE)' -l py --update-all
uvx --from ast-grep-cli sg --pattern 'session.query($WHATEVER).filter($HERE)' --rewrite 'session.query($WHATEVER).where($HERE)' -l py --update-all
# Convert Optional[T] to T | None (ignoring quoted types)
cat > /tmp/optional-rule.yml << 'EOF'
id: convert-optional-to-union
language: python
rule:
kind: generic_type
all:
- has:
kind: identifier
pattern: Optional
- has:
kind: type_parameter
has:
kind: type
pattern: $T
fix: $T | None
EOF
uvx --from ast-grep-cli sg scan --inline-rules "$(cat /tmp/optional-rule.yml)" --update-all
# Fix forward references that were incorrectly converted (Python doesn't support "Type" | None syntax)
find . -name "*.py" -type f -exec sed -i.bak -E 's/"([^"]+)" \| None/Optional["\1"]/g; s/'"'"'([^'"'"']+)'"'"' \| None/Optional['"'"'\1'"'"']/g' {} \;
find . -name "*.py.bak" -type f -delete
- name: mdformat
run: |

View File

@@ -12,6 +12,7 @@ permissions:
statuses: write
contents: read
jobs:
python-style:
name: Python Style
@@ -43,10 +44,6 @@ jobs:
if: steps.changed-files.outputs.any_changed == 'true'
run: uv sync --project api --dev
- name: Run Import Linter
if: steps.changed-files.outputs.any_changed == 'true'
run: uv run --directory api --dev lint-imports
- name: Run Basedpyright Checks
if: steps.changed-files.outputs.any_changed == 'true'
run: dev/basedpyright-check

View File

@@ -1,87 +0,0 @@
# AGENTS.md
## Project Overview
Dify is an open-source platform for developing LLM applications with an intuitive interface combining agentic AI workflows, RAG pipelines, agent capabilities, and model management.
The codebase consists of:
- **Backend API** (`/api`): Python Flask application with Domain-Driven Design architecture
- **Frontend Web** (`/web`): Next.js 15 application with TypeScript and React 19
- **Docker deployment** (`/docker`): Containerized deployment configurations
## Development Commands
### Backend (API)
All Python commands must be prefixed with `uv run --project api`:
```bash
# Start development servers
./dev/start-api # Start API server
./dev/start-worker # Start Celery worker
# Run tests
uv run --project api pytest # Run all tests
uv run --project api pytest tests/unit_tests/ # Unit tests only
uv run --project api pytest tests/integration_tests/ # Integration tests
# Code quality
./dev/reformat # Run all formatters and linters
uv run --project api ruff check --fix ./ # Fix linting issues
uv run --project api ruff format ./ # Format code
uv run --directory api basedpyright # Type checking
```
### Frontend (Web)
```bash
cd web
pnpm lint # Run ESLint
pnpm eslint-fix # Fix ESLint issues
pnpm test # Run Jest tests
```
## Testing Guidelines
### Backend Testing
- Use `pytest` for all backend tests
- Write tests first (TDD approach)
- Test structure: Arrange-Act-Assert
## Code Style Requirements
### Python
- Use type hints for all functions and class attributes
- No `Any` types unless absolutely necessary
- Implement special methods (`__repr__`, `__str__`) appropriately
### TypeScript/JavaScript
- Strict TypeScript configuration
- ESLint with Prettier integration
- Avoid `any` type
## Important Notes
- **Environment Variables**: Always use UV for Python commands: `uv run --project api <command>`
- **Comments**: Only write meaningful comments that explain "why", not "what"
- **File Creation**: Always prefer editing existing files over creating new ones
- **Documentation**: Don't create documentation files unless explicitly requested
- **Code Quality**: Always run `./dev/reformat` before committing backend changes
## Common Development Tasks
### Adding a New API Endpoint
1. Create controller in `/api/controllers/`
1. Add service logic in `/api/services/`
1. Update routes in controller's `__init__.py`
1. Write tests in `/api/tests/`
## Project-Specific Conventions
- All async tasks use Celery with Redis as broker
- **Internationalization**: Frontend supports multiple languages with English (`web/i18n/en-US/`) as the source. All user-facing text must use i18n keys, no hardcoded strings. Edit corresponding module files in `en-US/` directory for translations.

1
AGENTS.md Symbolic link
View File

@@ -0,0 +1 @@
CLAUDE.md

View File

@@ -1 +0,0 @@
AGENTS.md

89
CLAUDE.md Normal file
View File

@@ -0,0 +1,89 @@
# CLAUDE.md
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
## Project Overview
Dify is an open-source platform for developing LLM applications with an intuitive interface combining agentic AI workflows, RAG pipelines, agent capabilities, and model management.
The codebase consists of:
- **Backend API** (`/api`): Python Flask application with Domain-Driven Design architecture
- **Frontend Web** (`/web`): Next.js 15 application with TypeScript and React 19
- **Docker deployment** (`/docker`): Containerized deployment configurations
## Development Commands
### Backend (API)
All Python commands must be prefixed with `uv run --project api`:
```bash
# Start development servers
./dev/start-api # Start API server
./dev/start-worker # Start Celery worker
# Run tests
uv run --project api pytest # Run all tests
uv run --project api pytest tests/unit_tests/ # Unit tests only
uv run --project api pytest tests/integration_tests/ # Integration tests
# Code quality
./dev/reformat # Run all formatters and linters
uv run --project api ruff check --fix ./ # Fix linting issues
uv run --project api ruff format ./ # Format code
uv run --directory api basedpyright # Type checking
```
### Frontend (Web)
```bash
cd web
pnpm lint # Run ESLint
pnpm eslint-fix # Fix ESLint issues
pnpm test # Run Jest tests
```
## Testing Guidelines
### Backend Testing
- Use `pytest` for all backend tests
- Write tests first (TDD approach)
- Test structure: Arrange-Act-Assert
## Code Style Requirements
### Python
- Use type hints for all functions and class attributes
- No `Any` types unless absolutely necessary
- Implement special methods (`__repr__`, `__str__`) appropriately
### TypeScript/JavaScript
- Strict TypeScript configuration
- ESLint with Prettier integration
- Avoid `any` type
## Important Notes
- **Environment Variables**: Always use UV for Python commands: `uv run --project api <command>`
- **Comments**: Only write meaningful comments that explain "why", not "what"
- **File Creation**: Always prefer editing existing files over creating new ones
- **Documentation**: Don't create documentation files unless explicitly requested
- **Code Quality**: Always run `./dev/reformat` before committing backend changes
## Common Development Tasks
### Adding a New API Endpoint
1. Create controller in `/api/controllers/`
1. Add service logic in `/api/services/`
1. Update routes in controller's `__init__.py`
1. Write tests in `/api/tests/`
## Project-Specific Conventions
- All async tasks use Celery with Redis as broker
- **Internationalization**: Frontend supports multiple languages with English (`web/i18n/en-US/`) as the source. All user-facing text must use i18n keys, no hardcoded strings. Edit corresponding module files in `en-US/` directory for translations.

View File

@@ -328,7 +328,7 @@ MATRIXONE_DATABASE=dify
LINDORM_URL=http://ld-*******************-proxy-search-pub.lindorm.aliyuncs.com:30070
LINDORM_USERNAME=admin
LINDORM_PASSWORD=admin
LINDORM_USING_UGC=True
USING_UGC_INDEX=False
LINDORM_QUERY_TIMEOUT=1
# OceanBase Vector configuration
@@ -461,16 +461,6 @@ WORKFLOW_CALL_MAX_DEPTH=5
WORKFLOW_PARALLEL_DEPTH_LIMIT=3
MAX_VARIABLE_SIZE=204800
# GraphEngine Worker Pool Configuration
# Minimum number of workers per GraphEngine instance (default: 1)
GRAPH_ENGINE_MIN_WORKERS=1
# Maximum number of workers per GraphEngine instance (default: 10)
GRAPH_ENGINE_MAX_WORKERS=10
# Queue depth threshold that triggers worker scale up (default: 3)
GRAPH_ENGINE_SCALE_UP_THRESHOLD=3
# Seconds of idle time before scaling down workers (default: 5.0)
GRAPH_ENGINE_SCALE_DOWN_IDLE_TIME=5.0
# Workflow storage configuration
# Options: rdbms, hybrid
# rdbms: Use only the relational database (default)

View File

@@ -1,105 +0,0 @@
[importlinter]
root_packages =
core
configs
controllers
models
tasks
services
[importlinter:contract:workflow]
name = Workflow
type=layers
layers =
graph_engine
graph_events
graph
nodes
node_events
entities
containers =
core.workflow
ignore_imports =
core.workflow.nodes.base.node -> core.workflow.graph_events
core.workflow.nodes.iteration.iteration_node -> core.workflow.graph_events
core.workflow.nodes.loop.loop_node -> core.workflow.graph_events
core.workflow.nodes.node_factory -> core.workflow.graph
core.workflow.nodes.iteration.iteration_node -> core.workflow.graph_engine
core.workflow.nodes.iteration.iteration_node -> core.workflow.graph
core.workflow.nodes.iteration.iteration_node -> core.workflow.graph_engine.command_channels
core.workflow.nodes.loop.loop_node -> core.workflow.graph_engine
core.workflow.nodes.loop.loop_node -> core.workflow.graph
core.workflow.nodes.loop.loop_node -> core.workflow.graph_engine.command_channels
[importlinter:contract:rsc]
name = RSC
type = layers
layers =
graph_engine
response_coordinator
containers =
core.workflow.graph_engine
[importlinter:contract:worker]
name = Worker
type = layers
layers =
graph_engine
worker
containers =
core.workflow.graph_engine
[importlinter:contract:graph-engine-architecture]
name = Graph Engine Architecture
type = layers
layers =
graph_engine
orchestration
command_processing
event_management
error_handler
graph_traversal
graph_state_manager
worker_management
domain
containers =
core.workflow.graph_engine
[importlinter:contract:domain-isolation]
name = Domain Model Isolation
type = forbidden
source_modules =
core.workflow.graph_engine.domain
forbidden_modules =
core.workflow.graph_engine.worker_management
core.workflow.graph_engine.command_channels
core.workflow.graph_engine.layers
core.workflow.graph_engine.protocols
[importlinter:contract:worker-management]
name = Worker Management
type = forbidden
source_modules =
core.workflow.graph_engine.worker_management
forbidden_modules =
core.workflow.graph_engine.orchestration
core.workflow.graph_engine.command_processing
core.workflow.graph_engine.event_management
[importlinter:contract:graph-traversal-components]
name = Graph Traversal Components
type = layers
layers =
edge_processor
skip_propagator
containers =
core.workflow.graph_engine.graph_traversal
[importlinter:contract:command-channels]
name = Command Channels Independence
type = independence
modules =
core.workflow.graph_engine.command_channels.in_memory_channel
core.workflow.graph_engine.command_channels.redis_channel

View File

@@ -5,7 +5,7 @@ line-length = 120
quote-style = "double"
[lint]
preview = true
preview = false
select = [
"B", # flake8-bugbear rules
"C4", # flake8-comprehensions
@@ -65,7 +65,6 @@ ignore = [
"B006", # mutable-argument-default
"B007", # unused-loop-control-variable
"B026", # star-arg-unpacking-after-keyword-arg
"B901", # allow return in yield
"B903", # class-as-data-structure
"B904", # raise-without-from-inside-except
"B905", # zip-without-explicit-strict

View File

@@ -1,9 +1,8 @@
import base64
import json
import logging
import operator
import secrets
from typing import Any
from typing import Any, Optional
import click
import sqlalchemy as sa
@@ -14,6 +13,7 @@ from sqlalchemy.exc import SQLAlchemyError
from configs import dify_config
from constants.languages import languages
from core.plugin.entities.plugin import ToolProviderID
from core.rag.datasource.vdb.vector_factory import Vector
from core.rag.datasource.vdb.vector_type import VectorType
from core.rag.index_processor.constant.built_in_field import BuiltInField
@@ -31,7 +31,6 @@ from models.dataset import Dataset, DatasetCollectionBinding, DatasetMetadata, D
from models.dataset import Document as DatasetDocument
from models.model import Account, App, AppAnnotationSetting, AppMode, Conversation, MessageAnnotation
from models.provider import Provider, ProviderModel
from models.provider_ids import ToolProviderID
from models.tools import ToolOAuthSystemClient
from services.account_service import AccountService, RegisterService, TenantService
from services.clear_free_plan_tenant_expired_logs import ClearFreePlanTenantExpiredLogs
@@ -640,7 +639,7 @@ def old_metadata_migration():
@click.option("--email", prompt=True, help="Tenant account email.")
@click.option("--name", prompt=True, help="Workspace name.")
@click.option("--language", prompt=True, help="Account language, default: en-US.")
def create_tenant(email: str, language: str | None = None, name: str | None = None):
def create_tenant(email: str, language: Optional[str] = None, name: Optional[str] = None):
"""
Create tenant account
"""
@@ -954,7 +953,7 @@ def clear_orphaned_file_records(force: bool):
click.echo(click.style("- Deleting orphaned message_files records", fg="white"))
query = "DELETE FROM message_files WHERE id IN :ids"
with db.engine.begin() as conn:
conn.execute(sa.text(query), {"ids": tuple(record["id"] for record in orphaned_message_files)})
conn.execute(sa.text(query), {"ids": tuple([record["id"] for record in orphaned_message_files])})
click.echo(
click.style(f"Removed {len(orphaned_message_files)} orphaned message_files records.", fg="green")
)
@@ -1308,7 +1307,7 @@ def cleanup_orphaned_draft_variables(
if dry_run:
logger.info("DRY RUN: Would delete the following:")
for app_id, count in sorted(stats["orphaned_by_app"].items(), key=operator.itemgetter(1), reverse=True)[
for app_id, count in sorted(stats["orphaned_by_app"].items(), key=lambda x: x[1], reverse=True)[
:10
]: # Show top 10
logger.info(" App %s: %s variables", app_id, count)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,28 +9,28 @@ class NotionConfig(BaseSettings):
Configuration settings for Notion integration
"""
NOTION_CLIENT_ID: str | None = Field(
NOTION_CLIENT_ID: Optional[str] = Field(
description="Client ID for Notion API authentication. Required for OAuth 2.0 flow.",
default=None,
)
NOTION_CLIENT_SECRET: str | None = Field(
NOTION_CLIENT_SECRET: Optional[str] = Field(
description="Client secret for Notion API authentication. Required for OAuth 2.0 flow.",
default=None,
)
NOTION_INTEGRATION_TYPE: str | None = Field(
NOTION_INTEGRATION_TYPE: Optional[str] = Field(
description="Type of Notion integration."
" Set to 'internal' for internal integrations, or None for public integrations.",
default=None,
)
NOTION_INTERNAL_SECRET: str | None = Field(
NOTION_INTERNAL_SECRET: Optional[str] = Field(
description="Secret key for internal Notion integrations. Required when NOTION_INTEGRATION_TYPE is 'internal'.",
default=None,
)
NOTION_INTEGRATION_TOKEN: str | None = Field(
NOTION_INTEGRATION_TOKEN: Optional[str] = Field(
description="Integration token for Notion API access. Used for direct API calls without OAuth flow.",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, NonNegativeFloat
from pydantic_settings import BaseSettings
@@ -7,7 +9,7 @@ class SentryConfig(BaseSettings):
Configuration settings for Sentry error tracking and performance monitoring
"""
SENTRY_DSN: str | None = Field(
SENTRY_DSN: Optional[str] = Field(
description="Sentry Data Source Name (DSN)."
" This is the unique identifier of your Sentry project, used to send events to the correct project.",
default=None,

View File

@@ -1,4 +1,4 @@
from typing import Literal
from typing import Literal, Optional
from pydantic import (
AliasChoices,
@@ -57,7 +57,7 @@ class SecurityConfig(BaseSettings):
default=False,
)
ADMIN_API_KEY: str | None = Field(
ADMIN_API_KEY: Optional[str] = Field(
description="admin api key for authentication",
default=None,
)
@@ -97,17 +97,17 @@ class CodeExecutionSandboxConfig(BaseSettings):
default="dify-sandbox",
)
CODE_EXECUTION_CONNECT_TIMEOUT: float | None = Field(
CODE_EXECUTION_CONNECT_TIMEOUT: Optional[float] = Field(
description="Connection timeout in seconds for code execution requests",
default=10.0,
)
CODE_EXECUTION_READ_TIMEOUT: float | None = Field(
CODE_EXECUTION_READ_TIMEOUT: Optional[float] = Field(
description="Read timeout in seconds for code execution requests",
default=60.0,
)
CODE_EXECUTION_WRITE_TIMEOUT: float | None = Field(
CODE_EXECUTION_WRITE_TIMEOUT: Optional[float] = Field(
description="Write timeout in seconds for code execution request",
default=10.0,
)
@@ -368,17 +368,17 @@ class HttpConfig(BaseSettings):
default=3,
)
SSRF_PROXY_ALL_URL: str | None = Field(
SSRF_PROXY_ALL_URL: Optional[str] = Field(
description="Proxy URL for HTTP or HTTPS requests to prevent Server-Side Request Forgery (SSRF)",
default=None,
)
SSRF_PROXY_HTTP_URL: str | None = Field(
SSRF_PROXY_HTTP_URL: Optional[str] = Field(
description="Proxy URL for HTTP requests to prevent Server-Side Request Forgery (SSRF)",
default=None,
)
SSRF_PROXY_HTTPS_URL: str | None = Field(
SSRF_PROXY_HTTPS_URL: Optional[str] = Field(
description="Proxy URL for HTTPS requests to prevent Server-Side Request Forgery (SSRF)",
default=None,
)
@@ -420,7 +420,7 @@ class InnerAPIConfig(BaseSettings):
default=False,
)
INNER_API_KEY: str | None = Field(
INNER_API_KEY: Optional[str] = Field(
description="API key for accessing the internal API",
default=None,
)
@@ -436,7 +436,7 @@ class LoggingConfig(BaseSettings):
default="INFO",
)
LOG_FILE: str | None = Field(
LOG_FILE: Optional[str] = Field(
description="File path for log output.",
default=None,
)
@@ -456,12 +456,12 @@ class LoggingConfig(BaseSettings):
default="%(asctime)s.%(msecs)03d %(levelname)s [%(threadName)s] [%(filename)s:%(lineno)d] - %(message)s",
)
LOG_DATEFORMAT: str | None = Field(
LOG_DATEFORMAT: Optional[str] = Field(
description="Date format string for log timestamps",
default=None,
)
LOG_TZ: str | None = Field(
LOG_TZ: Optional[str] = Field(
description="Timezone for log timestamps (e.g., 'America/New_York')",
default="UTC",
)
@@ -535,28 +535,6 @@ class WorkflowConfig(BaseSettings):
default=200 * 1024,
)
# GraphEngine Worker Pool Configuration
GRAPH_ENGINE_MIN_WORKERS: PositiveInt = Field(
description="Minimum number of workers per GraphEngine instance",
default=1,
)
GRAPH_ENGINE_MAX_WORKERS: PositiveInt = Field(
description="Maximum number of workers per GraphEngine instance",
default=10,
)
GRAPH_ENGINE_SCALE_UP_THRESHOLD: PositiveInt = Field(
description="Queue depth threshold that triggers worker scale up",
default=3,
)
GRAPH_ENGINE_SCALE_DOWN_IDLE_TIME: float = Field(
description="Seconds of idle time before scaling down workers",
default=5.0,
ge=0.1,
)
class WorkflowNodeExecutionConfig(BaseSettings):
"""
@@ -617,22 +595,22 @@ class AuthConfig(BaseSettings):
default="/console/api/oauth/authorize",
)
GITHUB_CLIENT_ID: str | None = Field(
GITHUB_CLIENT_ID: Optional[str] = Field(
description="GitHub OAuth client ID",
default=None,
)
GITHUB_CLIENT_SECRET: str | None = Field(
GITHUB_CLIENT_SECRET: Optional[str] = Field(
description="GitHub OAuth client secret",
default=None,
)
GOOGLE_CLIENT_ID: str | None = Field(
GOOGLE_CLIENT_ID: Optional[str] = Field(
description="Google OAuth client ID",
default=None,
)
GOOGLE_CLIENT_SECRET: str | None = Field(
GOOGLE_CLIENT_SECRET: Optional[str] = Field(
description="Google OAuth client secret",
default=None,
)
@@ -700,42 +678,42 @@ class MailConfig(BaseSettings):
Configuration for email services
"""
MAIL_TYPE: str | None = Field(
MAIL_TYPE: Optional[str] = Field(
description="Email service provider type ('smtp' or 'resend' or 'sendGrid), default to None.",
default=None,
)
MAIL_DEFAULT_SEND_FROM: str | None = Field(
MAIL_DEFAULT_SEND_FROM: Optional[str] = Field(
description="Default email address to use as the sender",
default=None,
)
RESEND_API_KEY: str | None = Field(
RESEND_API_KEY: Optional[str] = Field(
description="API key for Resend email service",
default=None,
)
RESEND_API_URL: str | None = Field(
RESEND_API_URL: Optional[str] = Field(
description="API URL for Resend email service",
default=None,
)
SMTP_SERVER: str | None = Field(
SMTP_SERVER: Optional[str] = Field(
description="SMTP server hostname",
default=None,
)
SMTP_PORT: int | None = Field(
SMTP_PORT: Optional[int] = Field(
description="SMTP server port number",
default=465,
)
SMTP_USERNAME: str | None = Field(
SMTP_USERNAME: Optional[str] = Field(
description="Username for SMTP authentication",
default=None,
)
SMTP_PASSWORD: str | None = Field(
SMTP_PASSWORD: Optional[str] = Field(
description="Password for SMTP authentication",
default=None,
)
@@ -755,7 +733,7 @@ class MailConfig(BaseSettings):
default=50,
)
SENDGRID_API_KEY: str | None = Field(
SENDGRID_API_KEY: Optional[str] = Field(
description="API key for SendGrid service",
default=None,
)
@@ -778,17 +756,17 @@ class RagEtlConfig(BaseSettings):
default="database",
)
UNSTRUCTURED_API_URL: str | None = Field(
UNSTRUCTURED_API_URL: Optional[str] = Field(
description="API URL for Unstructured.io service",
default=None,
)
UNSTRUCTURED_API_KEY: str | None = Field(
UNSTRUCTURED_API_KEY: Optional[str] = Field(
description="API key for Unstructured.io service",
default="",
)
SCARF_NO_ANALYTICS: str | None = Field(
SCARF_NO_ANALYTICS: Optional[str] = Field(
description="This is about whether to disable Scarf analytics in Unstructured library.",
default="false",
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, NonNegativeInt
from pydantic_settings import BaseSettings
@@ -38,17 +40,17 @@ class HostedOpenAiConfig(BaseSettings):
Configuration for hosted OpenAI service
"""
HOSTED_OPENAI_API_KEY: str | None = Field(
HOSTED_OPENAI_API_KEY: Optional[str] = Field(
description="API key for hosted OpenAI service",
default=None,
)
HOSTED_OPENAI_API_BASE: str | None = Field(
HOSTED_OPENAI_API_BASE: Optional[str] = Field(
description="Base URL for hosted OpenAI API",
default=None,
)
HOSTED_OPENAI_API_ORGANIZATION: str | None = Field(
HOSTED_OPENAI_API_ORGANIZATION: Optional[str] = Field(
description="Organization ID for hosted OpenAI service",
default=None,
)
@@ -108,12 +110,12 @@ class HostedAzureOpenAiConfig(BaseSettings):
default=False,
)
HOSTED_AZURE_OPENAI_API_KEY: str | None = Field(
HOSTED_AZURE_OPENAI_API_KEY: Optional[str] = Field(
description="API key for hosted Azure OpenAI service",
default=None,
)
HOSTED_AZURE_OPENAI_API_BASE: str | None = Field(
HOSTED_AZURE_OPENAI_API_BASE: Optional[str] = Field(
description="Base URL for hosted Azure OpenAI API",
default=None,
)
@@ -129,12 +131,12 @@ class HostedAnthropicConfig(BaseSettings):
Configuration for hosted Anthropic service
"""
HOSTED_ANTHROPIC_API_BASE: str | None = Field(
HOSTED_ANTHROPIC_API_BASE: Optional[str] = Field(
description="Base URL for hosted Anthropic API",
default=None,
)
HOSTED_ANTHROPIC_API_KEY: str | None = Field(
HOSTED_ANTHROPIC_API_KEY: Optional[str] = Field(
description="API key for hosted Anthropic service",
default=None,
)

View File

@@ -1,5 +1,5 @@
import os
from typing import Any, Literal
from typing import Any, Literal, Optional
from urllib.parse import parse_qsl, quote_plus
from pydantic import Field, NonNegativeFloat, NonNegativeInt, PositiveFloat, PositiveInt, computed_field
@@ -78,18 +78,18 @@ class StorageConfig(BaseSettings):
class VectorStoreConfig(BaseSettings):
VECTOR_STORE: str | None = Field(
VECTOR_STORE: Optional[str] = Field(
description="Type of vector store to use for efficient similarity search."
" Set to None if not using a vector store.",
default=None,
)
VECTOR_STORE_WHITELIST_ENABLE: bool | None = Field(
VECTOR_STORE_WHITELIST_ENABLE: Optional[bool] = Field(
description="Enable whitelist for vector store.",
default=False,
)
VECTOR_INDEX_NAME_PREFIX: str | None = Field(
VECTOR_INDEX_NAME_PREFIX: Optional[str] = Field(
description="Prefix used to create collection name in vector database",
default="Vector_index",
)
@@ -225,26 +225,26 @@ class CeleryConfig(DatabaseConfig):
default="redis",
)
CELERY_BROKER_URL: str | None = Field(
CELERY_BROKER_URL: Optional[str] = Field(
description="URL of the message broker for Celery tasks.",
default=None,
)
CELERY_USE_SENTINEL: bool | None = Field(
CELERY_USE_SENTINEL: Optional[bool] = Field(
description="Whether to use Redis Sentinel for high availability.",
default=False,
)
CELERY_SENTINEL_MASTER_NAME: str | None = Field(
CELERY_SENTINEL_MASTER_NAME: Optional[str] = Field(
description="Name of the Redis Sentinel master.",
default=None,
)
CELERY_SENTINEL_PASSWORD: str | None = Field(
CELERY_SENTINEL_PASSWORD: Optional[str] = Field(
description="Password of the Redis Sentinel master.",
default=None,
)
CELERY_SENTINEL_SOCKET_TIMEOUT: PositiveFloat | None = Field(
CELERY_SENTINEL_SOCKET_TIMEOUT: Optional[PositiveFloat] = Field(
description="Timeout for Redis Sentinel socket operations in seconds.",
default=0.1,
)
@@ -268,12 +268,12 @@ class InternalTestConfig(BaseSettings):
Configuration settings for Internal Test
"""
AWS_SECRET_ACCESS_KEY: str | None = Field(
AWS_SECRET_ACCESS_KEY: Optional[str] = Field(
description="Internal test AWS secret access key",
default=None,
)
AWS_ACCESS_KEY_ID: str | None = Field(
AWS_ACCESS_KEY_ID: Optional[str] = Field(
description="Internal test AWS access key ID",
default=None,
)
@@ -284,15 +284,15 @@ class DatasetQueueMonitorConfig(BaseSettings):
Configuration settings for Dataset Queue Monitor
"""
QUEUE_MONITOR_THRESHOLD: NonNegativeInt | None = Field(
QUEUE_MONITOR_THRESHOLD: Optional[NonNegativeInt] = Field(
description="Threshold for dataset queue monitor",
default=200,
)
QUEUE_MONITOR_ALERT_EMAILS: str | None = Field(
QUEUE_MONITOR_ALERT_EMAILS: Optional[str] = Field(
description="Emails for dataset queue monitor alert, separated by commas",
default=None,
)
QUEUE_MONITOR_INTERVAL: NonNegativeFloat | None = Field(
QUEUE_MONITOR_INTERVAL: Optional[NonNegativeFloat] = Field(
description="Interval for dataset queue monitor in minutes",
default=30,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, NonNegativeInt, PositiveFloat, PositiveInt
from pydantic_settings import BaseSettings
@@ -17,12 +19,12 @@ class RedisConfig(BaseSettings):
default=6379,
)
REDIS_USERNAME: str | None = Field(
REDIS_USERNAME: Optional[str] = Field(
description="Username for Redis authentication (if required)",
default=None,
)
REDIS_PASSWORD: str | None = Field(
REDIS_PASSWORD: Optional[str] = Field(
description="Password for Redis authentication (if required)",
default=None,
)
@@ -42,47 +44,47 @@ class RedisConfig(BaseSettings):
default="CERT_NONE",
)
REDIS_SSL_CA_CERTS: str | None = Field(
REDIS_SSL_CA_CERTS: Optional[str] = Field(
description="Path to the CA certificate file for SSL verification",
default=None,
)
REDIS_SSL_CERTFILE: str | None = Field(
REDIS_SSL_CERTFILE: Optional[str] = Field(
description="Path to the client certificate file for SSL authentication",
default=None,
)
REDIS_SSL_KEYFILE: str | None = Field(
REDIS_SSL_KEYFILE: Optional[str] = Field(
description="Path to the client private key file for SSL authentication",
default=None,
)
REDIS_USE_SENTINEL: bool | None = Field(
REDIS_USE_SENTINEL: Optional[bool] = Field(
description="Enable Redis Sentinel mode for high availability",
default=False,
)
REDIS_SENTINELS: str | None = Field(
REDIS_SENTINELS: Optional[str] = Field(
description="Comma-separated list of Redis Sentinel nodes (host:port)",
default=None,
)
REDIS_SENTINEL_SERVICE_NAME: str | None = Field(
REDIS_SENTINEL_SERVICE_NAME: Optional[str] = Field(
description="Name of the Redis Sentinel service to monitor",
default=None,
)
REDIS_SENTINEL_USERNAME: str | None = Field(
REDIS_SENTINEL_USERNAME: Optional[str] = Field(
description="Username for Redis Sentinel authentication (if required)",
default=None,
)
REDIS_SENTINEL_PASSWORD: str | None = Field(
REDIS_SENTINEL_PASSWORD: Optional[str] = Field(
description="Password for Redis Sentinel authentication (if required)",
default=None,
)
REDIS_SENTINEL_SOCKET_TIMEOUT: PositiveFloat | None = Field(
REDIS_SENTINEL_SOCKET_TIMEOUT: Optional[PositiveFloat] = Field(
description="Socket timeout in seconds for Redis Sentinel connections",
default=0.1,
)
@@ -92,12 +94,12 @@ class RedisConfig(BaseSettings):
default=False,
)
REDIS_CLUSTERS: str | None = Field(
REDIS_CLUSTERS: Optional[str] = Field(
description="Comma-separated list of Redis Clusters nodes (host:port)",
default=None,
)
REDIS_CLUSTERS_PASSWORD: str | None = Field(
REDIS_CLUSTERS_PASSWORD: Optional[str] = Field(
description="Password for Redis Clusters authentication (if required)",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,37 +9,37 @@ class AliyunOSSStorageConfig(BaseSettings):
Configuration settings for Aliyun Object Storage Service (OSS)
"""
ALIYUN_OSS_BUCKET_NAME: str | None = Field(
ALIYUN_OSS_BUCKET_NAME: Optional[str] = Field(
description="Name of the Aliyun OSS bucket to store and retrieve objects",
default=None,
)
ALIYUN_OSS_ACCESS_KEY: str | None = Field(
ALIYUN_OSS_ACCESS_KEY: Optional[str] = Field(
description="Access key ID for authenticating with Aliyun OSS",
default=None,
)
ALIYUN_OSS_SECRET_KEY: str | None = Field(
ALIYUN_OSS_SECRET_KEY: Optional[str] = Field(
description="Secret access key for authenticating with Aliyun OSS",
default=None,
)
ALIYUN_OSS_ENDPOINT: str | None = Field(
ALIYUN_OSS_ENDPOINT: Optional[str] = Field(
description="URL of the Aliyun OSS endpoint for your chosen region",
default=None,
)
ALIYUN_OSS_REGION: str | None = Field(
ALIYUN_OSS_REGION: Optional[str] = Field(
description="Aliyun OSS region where your bucket is located (e.g., 'oss-cn-hangzhou')",
default=None,
)
ALIYUN_OSS_AUTH_VERSION: str | None = Field(
ALIYUN_OSS_AUTH_VERSION: Optional[str] = Field(
description="Version of the authentication protocol to use with Aliyun OSS (e.g., 'v4')",
default=None,
)
ALIYUN_OSS_PATH: str | None = Field(
ALIYUN_OSS_PATH: Optional[str] = Field(
description="Base path within the bucket to store objects (e.g., 'my-app-data/')",
default=None,
)

View File

@@ -1,4 +1,4 @@
from typing import Literal
from typing import Literal, Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -9,27 +9,27 @@ class S3StorageConfig(BaseSettings):
Configuration settings for S3-compatible object storage
"""
S3_ENDPOINT: str | None = Field(
S3_ENDPOINT: Optional[str] = Field(
description="URL of the S3-compatible storage endpoint (e.g., 'https://s3.amazonaws.com')",
default=None,
)
S3_REGION: str | None = Field(
S3_REGION: Optional[str] = Field(
description="Region where the S3 bucket is located (e.g., 'us-east-1')",
default=None,
)
S3_BUCKET_NAME: str | None = Field(
S3_BUCKET_NAME: Optional[str] = Field(
description="Name of the S3 bucket to store and retrieve objects",
default=None,
)
S3_ACCESS_KEY: str | None = Field(
S3_ACCESS_KEY: Optional[str] = Field(
description="Access key ID for authenticating with the S3 service",
default=None,
)
S3_SECRET_KEY: str | None = Field(
S3_SECRET_KEY: Optional[str] = Field(
description="Secret access key for authenticating with the S3 service",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,22 +9,22 @@ class AzureBlobStorageConfig(BaseSettings):
Configuration settings for Azure Blob Storage
"""
AZURE_BLOB_ACCOUNT_NAME: str | None = Field(
AZURE_BLOB_ACCOUNT_NAME: Optional[str] = Field(
description="Name of the Azure Storage account (e.g., 'mystorageaccount')",
default=None,
)
AZURE_BLOB_ACCOUNT_KEY: str | None = Field(
AZURE_BLOB_ACCOUNT_KEY: Optional[str] = Field(
description="Access key for authenticating with the Azure Storage account",
default=None,
)
AZURE_BLOB_CONTAINER_NAME: str | None = Field(
AZURE_BLOB_CONTAINER_NAME: Optional[str] = Field(
description="Name of the Azure Blob container to store and retrieve objects",
default=None,
)
AZURE_BLOB_ACCOUNT_URL: str | None = Field(
AZURE_BLOB_ACCOUNT_URL: Optional[str] = Field(
description="URL of the Azure Blob storage endpoint (e.g., 'https://mystorageaccount.blob.core.windows.net')",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,22 +9,22 @@ class BaiduOBSStorageConfig(BaseSettings):
Configuration settings for Baidu Object Storage Service (OBS)
"""
BAIDU_OBS_BUCKET_NAME: str | None = Field(
BAIDU_OBS_BUCKET_NAME: Optional[str] = Field(
description="Name of the Baidu OBS bucket to store and retrieve objects (e.g., 'my-obs-bucket')",
default=None,
)
BAIDU_OBS_ACCESS_KEY: str | None = Field(
BAIDU_OBS_ACCESS_KEY: Optional[str] = Field(
description="Access Key ID for authenticating with Baidu OBS",
default=None,
)
BAIDU_OBS_SECRET_KEY: str | None = Field(
BAIDU_OBS_SECRET_KEY: Optional[str] = Field(
description="Secret Access Key for authenticating with Baidu OBS",
default=None,
)
BAIDU_OBS_ENDPOINT: str | None = Field(
BAIDU_OBS_ENDPOINT: Optional[str] = Field(
description="URL of the Baidu OSS endpoint for your chosen region (e.g., 'https://.bj.bcebos.com')",
default=None,
)

View File

@@ -1,5 +1,7 @@
"""ClickZetta Volume Storage Configuration"""
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,17 +9,17 @@ from pydantic_settings import BaseSettings
class ClickZettaVolumeStorageConfig(BaseSettings):
"""Configuration for ClickZetta Volume storage."""
CLICKZETTA_VOLUME_USERNAME: str | None = Field(
CLICKZETTA_VOLUME_USERNAME: Optional[str] = Field(
description="Username for ClickZetta Volume authentication",
default=None,
)
CLICKZETTA_VOLUME_PASSWORD: str | None = Field(
CLICKZETTA_VOLUME_PASSWORD: Optional[str] = Field(
description="Password for ClickZetta Volume authentication",
default=None,
)
CLICKZETTA_VOLUME_INSTANCE: str | None = Field(
CLICKZETTA_VOLUME_INSTANCE: Optional[str] = Field(
description="ClickZetta instance identifier",
default=None,
)
@@ -47,7 +49,7 @@ class ClickZettaVolumeStorageConfig(BaseSettings):
default="user",
)
CLICKZETTA_VOLUME_NAME: str | None = Field(
CLICKZETTA_VOLUME_NAME: Optional[str] = Field(
description="ClickZetta volume name for external volumes",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,12 +9,12 @@ class GoogleCloudStorageConfig(BaseSettings):
Configuration settings for Google Cloud Storage
"""
GOOGLE_STORAGE_BUCKET_NAME: str | None = Field(
GOOGLE_STORAGE_BUCKET_NAME: Optional[str] = Field(
description="Name of the Google Cloud Storage bucket to store and retrieve objects (e.g., 'my-gcs-bucket')",
default=None,
)
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64: str | None = Field(
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64: Optional[str] = Field(
description="Base64-encoded JSON key file for Google Cloud service account authentication",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,22 +9,22 @@ class HuaweiCloudOBSStorageConfig(BaseSettings):
Configuration settings for Huawei Cloud Object Storage Service (OBS)
"""
HUAWEI_OBS_BUCKET_NAME: str | None = Field(
HUAWEI_OBS_BUCKET_NAME: Optional[str] = Field(
description="Name of the Huawei Cloud OBS bucket to store and retrieve objects (e.g., 'my-obs-bucket')",
default=None,
)
HUAWEI_OBS_ACCESS_KEY: str | None = Field(
HUAWEI_OBS_ACCESS_KEY: Optional[str] = Field(
description="Access Key ID for authenticating with Huawei Cloud OBS",
default=None,
)
HUAWEI_OBS_SECRET_KEY: str | None = Field(
HUAWEI_OBS_SECRET_KEY: Optional[str] = Field(
description="Secret Access Key for authenticating with Huawei Cloud OBS",
default=None,
)
HUAWEI_OBS_SERVER: str | None = Field(
HUAWEI_OBS_SERVER: Optional[str] = Field(
description="Endpoint URL for Huawei Cloud OBS (e.g., 'https://obs.cn-north-4.myhuaweicloud.com')",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,27 +9,27 @@ class OCIStorageConfig(BaseSettings):
Configuration settings for Oracle Cloud Infrastructure (OCI) Object Storage
"""
OCI_ENDPOINT: str | None = Field(
OCI_ENDPOINT: Optional[str] = Field(
description="URL of the OCI Object Storage endpoint (e.g., 'https://objectstorage.us-phoenix-1.oraclecloud.com')",
default=None,
)
OCI_REGION: str | None = Field(
OCI_REGION: Optional[str] = Field(
description="OCI region where the bucket is located (e.g., 'us-phoenix-1')",
default=None,
)
OCI_BUCKET_NAME: str | None = Field(
OCI_BUCKET_NAME: Optional[str] = Field(
description="Name of the OCI Object Storage bucket to store and retrieve objects (e.g., 'my-oci-bucket')",
default=None,
)
OCI_ACCESS_KEY: str | None = Field(
OCI_ACCESS_KEY: Optional[str] = Field(
description="Access key (also known as API key) for authenticating with OCI Object Storage",
default=None,
)
OCI_SECRET_KEY: str | None = Field(
OCI_SECRET_KEY: Optional[str] = Field(
description="Secret key associated with the access key for authenticating with OCI Object Storage",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,17 +9,17 @@ class SupabaseStorageConfig(BaseSettings):
Configuration settings for Supabase Object Storage Service
"""
SUPABASE_BUCKET_NAME: str | None = Field(
SUPABASE_BUCKET_NAME: Optional[str] = Field(
description="Name of the Supabase bucket to store and retrieve objects (e.g., 'dify-bucket')",
default=None,
)
SUPABASE_API_KEY: str | None = Field(
SUPABASE_API_KEY: Optional[str] = Field(
description="API KEY for authenticating with Supabase",
default=None,
)
SUPABASE_URL: str | None = Field(
SUPABASE_URL: Optional[str] = Field(
description="URL of the Supabase",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,27 +9,27 @@ class TencentCloudCOSStorageConfig(BaseSettings):
Configuration settings for Tencent Cloud Object Storage (COS)
"""
TENCENT_COS_BUCKET_NAME: str | None = Field(
TENCENT_COS_BUCKET_NAME: Optional[str] = Field(
description="Name of the Tencent Cloud COS bucket to store and retrieve objects",
default=None,
)
TENCENT_COS_REGION: str | None = Field(
TENCENT_COS_REGION: Optional[str] = Field(
description="Tencent Cloud region where the COS bucket is located (e.g., 'ap-guangzhou')",
default=None,
)
TENCENT_COS_SECRET_ID: str | None = Field(
TENCENT_COS_SECRET_ID: Optional[str] = Field(
description="SecretId for authenticating with Tencent Cloud COS (part of API credentials)",
default=None,
)
TENCENT_COS_SECRET_KEY: str | None = Field(
TENCENT_COS_SECRET_KEY: Optional[str] = Field(
description="SecretKey for authenticating with Tencent Cloud COS (part of API credentials)",
default=None,
)
TENCENT_COS_SCHEME: str | None = Field(
TENCENT_COS_SCHEME: Optional[str] = Field(
description="Protocol scheme for COS requests: 'https' (recommended) or 'http'",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,27 +9,27 @@ class VolcengineTOSStorageConfig(BaseSettings):
Configuration settings for Volcengine Tinder Object Storage (TOS)
"""
VOLCENGINE_TOS_BUCKET_NAME: str | None = Field(
VOLCENGINE_TOS_BUCKET_NAME: Optional[str] = Field(
description="Name of the Volcengine TOS bucket to store and retrieve objects (e.g., 'my-tos-bucket')",
default=None,
)
VOLCENGINE_TOS_ACCESS_KEY: str | None = Field(
VOLCENGINE_TOS_ACCESS_KEY: Optional[str] = Field(
description="Access Key ID for authenticating with Volcengine TOS",
default=None,
)
VOLCENGINE_TOS_SECRET_KEY: str | None = Field(
VOLCENGINE_TOS_SECRET_KEY: Optional[str] = Field(
description="Secret Access Key for authenticating with Volcengine TOS",
default=None,
)
VOLCENGINE_TOS_ENDPOINT: str | None = Field(
VOLCENGINE_TOS_ENDPOINT: Optional[str] = Field(
description="URL of the Volcengine TOS endpoint (e.g., 'https://tos-cn-beijing.volces.com')",
default=None,
)
VOLCENGINE_TOS_REGION: str | None = Field(
VOLCENGINE_TOS_REGION: Optional[str] = Field(
description="Volcengine region where the TOS bucket is located (e.g., 'cn-beijing')",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@@ -9,37 +11,37 @@ class AnalyticdbConfig(BaseSettings):
https://www.alibabacloud.com/help/en/analyticdb-for-postgresql/getting-started/create-an-instance-instances-with-vector-engine-optimization-enabled
"""
ANALYTICDB_KEY_ID: str | None = Field(
ANALYTICDB_KEY_ID: Optional[str] = Field(
default=None, description="The Access Key ID provided by Alibaba Cloud for API authentication."
)
ANALYTICDB_KEY_SECRET: str | None = Field(
ANALYTICDB_KEY_SECRET: Optional[str] = Field(
default=None, description="The Secret Access Key corresponding to the Access Key ID for secure API access."
)
ANALYTICDB_REGION_ID: str | None = Field(
ANALYTICDB_REGION_ID: Optional[str] = Field(
default=None,
description="The region where the AnalyticDB instance is deployed (e.g., 'cn-hangzhou', 'ap-southeast-1').",
)
ANALYTICDB_INSTANCE_ID: str | None = Field(
ANALYTICDB_INSTANCE_ID: Optional[str] = Field(
default=None,
description="The unique identifier of the AnalyticDB instance you want to connect to.",
)
ANALYTICDB_ACCOUNT: str | None = Field(
ANALYTICDB_ACCOUNT: Optional[str] = Field(
default=None,
description="The account name used to log in to the AnalyticDB instance"
" (usually the initial account created with the instance).",
)
ANALYTICDB_PASSWORD: str | None = Field(
ANALYTICDB_PASSWORD: Optional[str] = Field(
default=None, description="The password associated with the AnalyticDB account for database authentication."
)
ANALYTICDB_NAMESPACE: str | None = Field(
ANALYTICDB_NAMESPACE: Optional[str] = Field(
default=None, description="The namespace within AnalyticDB for schema isolation (if using namespace feature)."
)
ANALYTICDB_NAMESPACE_PASSWORD: str | None = Field(
ANALYTICDB_NAMESPACE_PASSWORD: Optional[str] = Field(
default=None,
description="The password for accessing the specified namespace within the AnalyticDB instance"
" (if namespace feature is enabled).",
)
ANALYTICDB_HOST: str | None = Field(
ANALYTICDB_HOST: Optional[str] = Field(
default=None, description="The host of the AnalyticDB instance you want to connect to."
)
ANALYTICDB_PORT: PositiveInt = Field(

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, NonNegativeInt, PositiveInt
from pydantic_settings import BaseSettings
@@ -7,7 +9,7 @@ class BaiduVectorDBConfig(BaseSettings):
Configuration settings for Baidu Vector Database
"""
BAIDU_VECTOR_DB_ENDPOINT: str | None = Field(
BAIDU_VECTOR_DB_ENDPOINT: Optional[str] = Field(
description="URL of the Baidu Vector Database service (e.g., 'http://vdb.bj.baidubce.com')",
default=None,
)
@@ -17,17 +19,17 @@ class BaiduVectorDBConfig(BaseSettings):
default=30000,
)
BAIDU_VECTOR_DB_ACCOUNT: str | None = Field(
BAIDU_VECTOR_DB_ACCOUNT: Optional[str] = Field(
description="Account for authenticating with the Baidu Vector Database",
default=None,
)
BAIDU_VECTOR_DB_API_KEY: str | None = Field(
BAIDU_VECTOR_DB_API_KEY: Optional[str] = Field(
description="API key for authenticating with the Baidu Vector Database service",
default=None,
)
BAIDU_VECTOR_DB_DATABASE: str | None = Field(
BAIDU_VECTOR_DB_DATABASE: Optional[str] = Field(
description="Name of the specific Baidu Vector Database to connect to",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@@ -7,7 +9,7 @@ class ChromaConfig(BaseSettings):
Configuration settings for Chroma vector database
"""
CHROMA_HOST: str | None = Field(
CHROMA_HOST: Optional[str] = Field(
description="Hostname or IP address of the Chroma server (e.g., 'localhost' or '192.168.1.100')",
default=None,
)
@@ -17,22 +19,22 @@ class ChromaConfig(BaseSettings):
default=8000,
)
CHROMA_TENANT: str | None = Field(
CHROMA_TENANT: Optional[str] = Field(
description="Tenant identifier for multi-tenancy support in Chroma",
default=None,
)
CHROMA_DATABASE: str | None = Field(
CHROMA_DATABASE: Optional[str] = Field(
description="Name of the Chroma database to connect to",
default=None,
)
CHROMA_AUTH_PROVIDER: str | None = Field(
CHROMA_AUTH_PROVIDER: Optional[str] = Field(
description="Authentication provider for Chroma (e.g., 'basic', 'token', or a custom provider)",
default=None,
)
CHROMA_AUTH_CREDENTIALS: str | None = Field(
CHROMA_AUTH_CREDENTIALS: Optional[str] = Field(
description="Authentication credentials for Chroma (format depends on the auth provider)",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,62 +9,62 @@ class ClickzettaConfig(BaseSettings):
Clickzetta Lakehouse vector database configuration
"""
CLICKZETTA_USERNAME: str | None = Field(
CLICKZETTA_USERNAME: Optional[str] = Field(
description="Username for authenticating with Clickzetta Lakehouse",
default=None,
)
CLICKZETTA_PASSWORD: str | None = Field(
CLICKZETTA_PASSWORD: Optional[str] = Field(
description="Password for authenticating with Clickzetta Lakehouse",
default=None,
)
CLICKZETTA_INSTANCE: str | None = Field(
CLICKZETTA_INSTANCE: Optional[str] = Field(
description="Clickzetta Lakehouse instance ID",
default=None,
)
CLICKZETTA_SERVICE: str | None = Field(
CLICKZETTA_SERVICE: Optional[str] = Field(
description="Clickzetta API service endpoint (e.g., 'api.clickzetta.com')",
default="api.clickzetta.com",
)
CLICKZETTA_WORKSPACE: str | None = Field(
CLICKZETTA_WORKSPACE: Optional[str] = Field(
description="Clickzetta workspace name",
default="default",
)
CLICKZETTA_VCLUSTER: str | None = Field(
CLICKZETTA_VCLUSTER: Optional[str] = Field(
description="Clickzetta virtual cluster name",
default="default_ap",
)
CLICKZETTA_SCHEMA: str | None = Field(
CLICKZETTA_SCHEMA: Optional[str] = Field(
description="Database schema name in Clickzetta",
default="public",
)
CLICKZETTA_BATCH_SIZE: int | None = Field(
CLICKZETTA_BATCH_SIZE: Optional[int] = Field(
description="Batch size for bulk insert operations",
default=100,
)
CLICKZETTA_ENABLE_INVERTED_INDEX: bool | None = Field(
CLICKZETTA_ENABLE_INVERTED_INDEX: Optional[bool] = Field(
description="Enable inverted index for full-text search capabilities",
default=True,
)
CLICKZETTA_ANALYZER_TYPE: str | None = Field(
CLICKZETTA_ANALYZER_TYPE: Optional[str] = Field(
description="Analyzer type for full-text search: keyword, english, chinese, unicode",
default="chinese",
)
CLICKZETTA_ANALYZER_MODE: str | None = Field(
CLICKZETTA_ANALYZER_MODE: Optional[str] = Field(
description="Analyzer mode for tokenization: max_word (fine-grained) or smart (intelligent)",
default="smart",
)
CLICKZETTA_VECTOR_DISTANCE_FUNCTION: str | None = Field(
CLICKZETTA_VECTOR_DISTANCE_FUNCTION: Optional[str] = Field(
description="Distance function for vector similarity: l2_distance or cosine_distance",
default="cosine_distance",
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,27 +9,27 @@ class CouchbaseConfig(BaseSettings):
Couchbase configs
"""
COUCHBASE_CONNECTION_STRING: str | None = Field(
COUCHBASE_CONNECTION_STRING: Optional[str] = Field(
description="COUCHBASE connection string",
default=None,
)
COUCHBASE_USER: str | None = Field(
COUCHBASE_USER: Optional[str] = Field(
description="COUCHBASE user",
default=None,
)
COUCHBASE_PASSWORD: str | None = Field(
COUCHBASE_PASSWORD: Optional[str] = Field(
description="COUCHBASE password",
default=None,
)
COUCHBASE_BUCKET_NAME: str | None = Field(
COUCHBASE_BUCKET_NAME: Optional[str] = Field(
description="COUCHBASE bucket name",
default=None,
)
COUCHBASE_SCOPE_NAME: str | None = Field(
COUCHBASE_SCOPE_NAME: Optional[str] = Field(
description="COUCHBASE scope name",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt, model_validator
from pydantic_settings import BaseSettings
@@ -8,7 +10,7 @@ class ElasticsearchConfig(BaseSettings):
Can load from environment variables or .env files.
"""
ELASTICSEARCH_HOST: str | None = Field(
ELASTICSEARCH_HOST: Optional[str] = Field(
description="Hostname or IP address of the Elasticsearch server (e.g., 'localhost' or '192.168.1.100')",
default="127.0.0.1",
)
@@ -18,28 +20,30 @@ class ElasticsearchConfig(BaseSettings):
default=9200,
)
ELASTICSEARCH_USERNAME: str | None = Field(
ELASTICSEARCH_USERNAME: Optional[str] = Field(
description="Username for authenticating with Elasticsearch (default is 'elastic')",
default="elastic",
)
ELASTICSEARCH_PASSWORD: str | None = Field(
ELASTICSEARCH_PASSWORD: Optional[str] = Field(
description="Password for authenticating with Elasticsearch (default is 'elastic')",
default="elastic",
)
# Elastic Cloud (optional)
ELASTICSEARCH_USE_CLOUD: bool | None = Field(
ELASTICSEARCH_USE_CLOUD: Optional[bool] = Field(
description="Set to True to use Elastic Cloud instead of self-hosted Elasticsearch", default=False
)
ELASTICSEARCH_CLOUD_URL: str | None = Field(
ELASTICSEARCH_CLOUD_URL: Optional[str] = Field(
description="Full URL for Elastic Cloud deployment (e.g., 'https://example.es.region.aws.found.io:443')",
default=None,
)
ELASTICSEARCH_API_KEY: str | None = Field(description="API key for authenticating with Elastic Cloud", default=None)
ELASTICSEARCH_API_KEY: Optional[str] = Field(
description="API key for authenticating with Elastic Cloud", default=None
)
# Common options
ELASTICSEARCH_CA_CERTS: str | None = Field(
ELASTICSEARCH_CA_CERTS: Optional[str] = Field(
description="Path to CA certificate file for SSL verification", default=None
)
ELASTICSEARCH_VERIFY_CERTS: bool = Field(

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,17 +9,17 @@ class HuaweiCloudConfig(BaseSettings):
Configuration settings for Huawei cloud search service
"""
HUAWEI_CLOUD_HOSTS: str | None = Field(
HUAWEI_CLOUD_HOSTS: Optional[str] = Field(
description="Hostname or IP address of the Huawei cloud search service instance",
default=None,
)
HUAWEI_CLOUD_USER: str | None = Field(
HUAWEI_CLOUD_USER: Optional[str] = Field(
description="Username for authenticating with Huawei cloud search service",
default=None,
)
HUAWEI_CLOUD_PASSWORD: str | None = Field(
HUAWEI_CLOUD_PASSWORD: Optional[str] = Field(
description="Password for authenticating with Huawei cloud search service",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,27 +9,27 @@ class LindormConfig(BaseSettings):
Lindorm configs
"""
LINDORM_URL: str | None = Field(
LINDORM_URL: Optional[str] = Field(
description="Lindorm url",
default=None,
)
LINDORM_USERNAME: str | None = Field(
LINDORM_USERNAME: Optional[str] = Field(
description="Lindorm user",
default=None,
)
LINDORM_PASSWORD: str | None = Field(
LINDORM_PASSWORD: Optional[str] = Field(
description="Lindorm password",
default=None,
)
LINDORM_INDEX_TYPE: str | None = Field(
DEFAULT_INDEX_TYPE: Optional[str] = Field(
description="Lindorm Vector Index Type, hnsw or flat is available in dify",
default="hnsw",
)
LINDORM_DISTANCE_TYPE: str | None = Field(
DEFAULT_DISTANCE_TYPE: Optional[str] = Field(
description="Vector Distance Type, support l2, cosinesimil, innerproduct", default="l2"
)
LINDORM_USING_UGC: bool | None = Field(
description="Using UGC index will store indexes with the same IndexType/Dimension in a single big index.",
default=True,
USING_UGC_INDEX: Optional[bool] = Field(
description="Using UGC index will store the same type of Index in a single index but can retrieve separately.",
default=False,
)
LINDORM_QUERY_TIMEOUT: float | None = Field(description="The lindorm search request timeout (s)", default=2.0)
LINDORM_QUERY_TIMEOUT: Optional[float] = Field(description="The lindorm search request timeout (s)", default=2.0)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,22 +9,22 @@ class MilvusConfig(BaseSettings):
Configuration settings for Milvus vector database
"""
MILVUS_URI: str | None = Field(
MILVUS_URI: Optional[str] = Field(
description="URI for connecting to the Milvus server (e.g., 'http://localhost:19530' or 'https://milvus-instance.example.com:19530')",
default="http://127.0.0.1:19530",
)
MILVUS_TOKEN: str | None = Field(
MILVUS_TOKEN: Optional[str] = Field(
description="Authentication token for Milvus, if token-based authentication is enabled",
default=None,
)
MILVUS_USER: str | None = Field(
MILVUS_USER: Optional[str] = Field(
description="Username for authenticating with Milvus, if username/password authentication is enabled",
default=None,
)
MILVUS_PASSWORD: str | None = Field(
MILVUS_PASSWORD: Optional[str] = Field(
description="Password for authenticating with Milvus, if username/password authentication is enabled",
default=None,
)
@@ -38,7 +40,7 @@ class MilvusConfig(BaseSettings):
default=True,
)
MILVUS_ANALYZER_PARAMS: str | None = Field(
MILVUS_ANALYZER_PARAMS: Optional[str] = Field(
description='Milvus text analyzer parameters, e.g., {"type": "chinese"} for Chinese segmentation support.',
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@@ -7,27 +9,27 @@ class OceanBaseVectorConfig(BaseSettings):
Configuration settings for OceanBase Vector database
"""
OCEANBASE_VECTOR_HOST: str | None = Field(
OCEANBASE_VECTOR_HOST: Optional[str] = Field(
description="Hostname or IP address of the OceanBase Vector server (e.g. 'localhost')",
default=None,
)
OCEANBASE_VECTOR_PORT: PositiveInt | None = Field(
OCEANBASE_VECTOR_PORT: Optional[PositiveInt] = Field(
description="Port number on which the OceanBase Vector server is listening (default is 2881)",
default=2881,
)
OCEANBASE_VECTOR_USER: str | None = Field(
OCEANBASE_VECTOR_USER: Optional[str] = Field(
description="Username for authenticating with the OceanBase Vector database",
default=None,
)
OCEANBASE_VECTOR_PASSWORD: str | None = Field(
OCEANBASE_VECTOR_PASSWORD: Optional[str] = Field(
description="Password for authenticating with the OceanBase Vector database",
default=None,
)
OCEANBASE_VECTOR_DATABASE: str | None = Field(
OCEANBASE_VECTOR_DATABASE: Optional[str] = Field(
description="Name of the OceanBase Vector database to connect to",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@@ -7,7 +9,7 @@ class OpenGaussConfig(BaseSettings):
Configuration settings for OpenGauss
"""
OPENGAUSS_HOST: str | None = Field(
OPENGAUSS_HOST: Optional[str] = Field(
description="Hostname or IP address of the OpenGauss server(e.g., 'localhost')",
default=None,
)
@@ -17,17 +19,17 @@ class OpenGaussConfig(BaseSettings):
default=6600,
)
OPENGAUSS_USER: str | None = Field(
OPENGAUSS_USER: Optional[str] = Field(
description="Username for authenticating with the OpenGauss database",
default=None,
)
OPENGAUSS_PASSWORD: str | None = Field(
OPENGAUSS_PASSWORD: Optional[str] = Field(
description="Password for authenticating with the OpenGauss database",
default=None,
)
OPENGAUSS_DATABASE: str | None = Field(
OPENGAUSS_DATABASE: Optional[str] = Field(
description="Name of the OpenGauss database to connect to",
default=None,
)

View File

@@ -1,5 +1,5 @@
from enum import Enum
from typing import Literal
from typing import Literal, Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@@ -18,7 +18,7 @@ class OpenSearchConfig(BaseSettings):
BASIC = "basic"
AWS_MANAGED_IAM = "aws_managed_iam"
OPENSEARCH_HOST: str | None = Field(
OPENSEARCH_HOST: Optional[str] = Field(
description="Hostname or IP address of the OpenSearch server (e.g., 'localhost' or 'opensearch.example.com')",
default=None,
)
@@ -43,21 +43,21 @@ class OpenSearchConfig(BaseSettings):
default=AuthMethod.BASIC,
)
OPENSEARCH_USER: str | None = Field(
OPENSEARCH_USER: Optional[str] = Field(
description="Username for authenticating with OpenSearch",
default=None,
)
OPENSEARCH_PASSWORD: str | None = Field(
OPENSEARCH_PASSWORD: Optional[str] = Field(
description="Password for authenticating with OpenSearch",
default=None,
)
OPENSEARCH_AWS_REGION: str | None = Field(
OPENSEARCH_AWS_REGION: Optional[str] = Field(
description="AWS region for OpenSearch (e.g. 'us-west-2')",
default=None,
)
OPENSEARCH_AWS_SERVICE: Literal["es", "aoss"] | None = Field(
OPENSEARCH_AWS_SERVICE: Optional[Literal["es", "aoss"]] = Field(
description="AWS service for OpenSearch (e.g. 'aoss' for OpenSearch Serverless)", default=None
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,33 +9,33 @@ class OracleConfig(BaseSettings):
Configuration settings for Oracle database
"""
ORACLE_USER: str | None = Field(
ORACLE_USER: Optional[str] = Field(
description="Username for authenticating with the Oracle database",
default=None,
)
ORACLE_PASSWORD: str | None = Field(
ORACLE_PASSWORD: Optional[str] = Field(
description="Password for authenticating with the Oracle database",
default=None,
)
ORACLE_DSN: str | None = Field(
ORACLE_DSN: Optional[str] = Field(
description="Oracle database connection string. For traditional database, use format 'host:port/service_name'. "
"For autonomous database, use the service name from tnsnames.ora in the wallet",
default=None,
)
ORACLE_CONFIG_DIR: str | None = Field(
ORACLE_CONFIG_DIR: Optional[str] = Field(
description="Directory containing the tnsnames.ora configuration file. Only used in thin mode connection",
default=None,
)
ORACLE_WALLET_LOCATION: str | None = Field(
ORACLE_WALLET_LOCATION: Optional[str] = Field(
description="Oracle wallet directory path containing the wallet files for secure connection",
default=None,
)
ORACLE_WALLET_PASSWORD: str | None = Field(
ORACLE_WALLET_PASSWORD: Optional[str] = Field(
description="Password to decrypt the Oracle wallet, if it is encrypted",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@@ -7,7 +9,7 @@ class PGVectorConfig(BaseSettings):
Configuration settings for PGVector (PostgreSQL with vector extension)
"""
PGVECTOR_HOST: str | None = Field(
PGVECTOR_HOST: Optional[str] = Field(
description="Hostname or IP address of the PostgreSQL server with PGVector extension (e.g., 'localhost')",
default=None,
)
@@ -17,17 +19,17 @@ class PGVectorConfig(BaseSettings):
default=5433,
)
PGVECTOR_USER: str | None = Field(
PGVECTOR_USER: Optional[str] = Field(
description="Username for authenticating with the PostgreSQL database",
default=None,
)
PGVECTOR_PASSWORD: str | None = Field(
PGVECTOR_PASSWORD: Optional[str] = Field(
description="Password for authenticating with the PostgreSQL database",
default=None,
)
PGVECTOR_DATABASE: str | None = Field(
PGVECTOR_DATABASE: Optional[str] = Field(
description="Name of the PostgreSQL database to connect to",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@@ -7,7 +9,7 @@ class PGVectoRSConfig(BaseSettings):
Configuration settings for PGVecto.RS (Rust-based vector extension for PostgreSQL)
"""
PGVECTO_RS_HOST: str | None = Field(
PGVECTO_RS_HOST: Optional[str] = Field(
description="Hostname or IP address of the PostgreSQL server with PGVecto.RS extension (e.g., 'localhost')",
default=None,
)
@@ -17,17 +19,17 @@ class PGVectoRSConfig(BaseSettings):
default=5431,
)
PGVECTO_RS_USER: str | None = Field(
PGVECTO_RS_USER: Optional[str] = Field(
description="Username for authenticating with the PostgreSQL database using PGVecto.RS",
default=None,
)
PGVECTO_RS_PASSWORD: str | None = Field(
PGVECTO_RS_PASSWORD: Optional[str] = Field(
description="Password for authenticating with the PostgreSQL database using PGVecto.RS",
default=None,
)
PGVECTO_RS_DATABASE: str | None = Field(
PGVECTO_RS_DATABASE: Optional[str] = Field(
description="Name of the PostgreSQL database with PGVecto.RS extension to connect to",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, NonNegativeInt, PositiveInt
from pydantic_settings import BaseSettings
@@ -7,12 +9,12 @@ class QdrantConfig(BaseSettings):
Configuration settings for Qdrant vector database
"""
QDRANT_URL: str | None = Field(
QDRANT_URL: Optional[str] = Field(
description="URL of the Qdrant server (e.g., 'http://localhost:6333' or 'https://qdrant.example.com')",
default=None,
)
QDRANT_API_KEY: str | None = Field(
QDRANT_API_KEY: Optional[str] = Field(
description="API key for authenticating with the Qdrant server",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@@ -7,7 +9,7 @@ class RelytConfig(BaseSettings):
Configuration settings for Relyt database
"""
RELYT_HOST: str | None = Field(
RELYT_HOST: Optional[str] = Field(
description="Hostname or IP address of the Relyt server (e.g., 'localhost' or 'relyt.example.com')",
default=None,
)
@@ -17,17 +19,17 @@ class RelytConfig(BaseSettings):
default=9200,
)
RELYT_USER: str | None = Field(
RELYT_USER: Optional[str] = Field(
description="Username for authenticating with the Relyt database",
default=None,
)
RELYT_PASSWORD: str | None = Field(
RELYT_PASSWORD: Optional[str] = Field(
description="Password for authenticating with the Relyt database",
default=None,
)
RELYT_DATABASE: str | None = Field(
RELYT_DATABASE: Optional[str] = Field(
description="Name of the Relyt database to connect to (default is 'default')",
default="default",
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,22 +9,22 @@ class TableStoreConfig(BaseSettings):
Configuration settings for TableStore.
"""
TABLESTORE_ENDPOINT: str | None = Field(
TABLESTORE_ENDPOINT: Optional[str] = Field(
description="Endpoint address of the TableStore server (e.g. 'https://instance-name.cn-hangzhou.ots.aliyuncs.com')",
default=None,
)
TABLESTORE_INSTANCE_NAME: str | None = Field(
TABLESTORE_INSTANCE_NAME: Optional[str] = Field(
description="Instance name to access TableStore server (eg. 'instance-name')",
default=None,
)
TABLESTORE_ACCESS_KEY_ID: str | None = Field(
TABLESTORE_ACCESS_KEY_ID: Optional[str] = Field(
description="AccessKey id for the instance name",
default=None,
)
TABLESTORE_ACCESS_KEY_SECRET: str | None = Field(
TABLESTORE_ACCESS_KEY_SECRET: Optional[str] = Field(
description="AccessKey secret for the instance name",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, NonNegativeInt, PositiveInt
from pydantic_settings import BaseSettings
@@ -7,12 +9,12 @@ class TencentVectorDBConfig(BaseSettings):
Configuration settings for Tencent Vector Database
"""
TENCENT_VECTOR_DB_URL: str | None = Field(
TENCENT_VECTOR_DB_URL: Optional[str] = Field(
description="URL of the Tencent Vector Database service (e.g., 'https://vectordb.tencentcloudapi.com')",
default=None,
)
TENCENT_VECTOR_DB_API_KEY: str | None = Field(
TENCENT_VECTOR_DB_API_KEY: Optional[str] = Field(
description="API key for authenticating with the Tencent Vector Database service",
default=None,
)
@@ -22,12 +24,12 @@ class TencentVectorDBConfig(BaseSettings):
default=30,
)
TENCENT_VECTOR_DB_USERNAME: str | None = Field(
TENCENT_VECTOR_DB_USERNAME: Optional[str] = Field(
description="Username for authenticating with the Tencent Vector Database (if required)",
default=None,
)
TENCENT_VECTOR_DB_PASSWORD: str | None = Field(
TENCENT_VECTOR_DB_PASSWORD: Optional[str] = Field(
description="Password for authenticating with the Tencent Vector Database (if required)",
default=None,
)
@@ -42,7 +44,7 @@ class TencentVectorDBConfig(BaseSettings):
default=2,
)
TENCENT_VECTOR_DB_DATABASE: str | None = Field(
TENCENT_VECTOR_DB_DATABASE: Optional[str] = Field(
description="Name of the specific Tencent Vector Database to connect to",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, NonNegativeInt, PositiveInt
from pydantic_settings import BaseSettings
@@ -7,12 +9,12 @@ class TidbOnQdrantConfig(BaseSettings):
Tidb on Qdrant configs
"""
TIDB_ON_QDRANT_URL: str | None = Field(
TIDB_ON_QDRANT_URL: Optional[str] = Field(
description="Tidb on Qdrant url",
default=None,
)
TIDB_ON_QDRANT_API_KEY: str | None = Field(
TIDB_ON_QDRANT_API_KEY: Optional[str] = Field(
description="Tidb on Qdrant api key",
default=None,
)
@@ -32,37 +34,37 @@ class TidbOnQdrantConfig(BaseSettings):
default=6334,
)
TIDB_PUBLIC_KEY: str | None = Field(
TIDB_PUBLIC_KEY: Optional[str] = Field(
description="Tidb account public key",
default=None,
)
TIDB_PRIVATE_KEY: str | None = Field(
TIDB_PRIVATE_KEY: Optional[str] = Field(
description="Tidb account private key",
default=None,
)
TIDB_API_URL: str | None = Field(
TIDB_API_URL: Optional[str] = Field(
description="Tidb API url",
default=None,
)
TIDB_IAM_API_URL: str | None = Field(
TIDB_IAM_API_URL: Optional[str] = Field(
description="Tidb IAM API url",
default=None,
)
TIDB_REGION: str | None = Field(
TIDB_REGION: Optional[str] = Field(
description="Tidb serverless region",
default="regions/aws-us-east-1",
)
TIDB_PROJECT_ID: str | None = Field(
TIDB_PROJECT_ID: Optional[str] = Field(
description="Tidb project id",
default=None,
)
TIDB_SPEND_LIMIT: int | None = Field(
TIDB_SPEND_LIMIT: Optional[int] = Field(
description="Tidb spend limit",
default=100,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@@ -7,27 +9,27 @@ class TiDBVectorConfig(BaseSettings):
Configuration settings for TiDB Vector database
"""
TIDB_VECTOR_HOST: str | None = Field(
TIDB_VECTOR_HOST: Optional[str] = Field(
description="Hostname or IP address of the TiDB Vector server (e.g., 'localhost' or 'tidb.example.com')",
default=None,
)
TIDB_VECTOR_PORT: PositiveInt | None = Field(
TIDB_VECTOR_PORT: Optional[PositiveInt] = Field(
description="Port number on which the TiDB Vector server is listening (default is 4000)",
default=4000,
)
TIDB_VECTOR_USER: str | None = Field(
TIDB_VECTOR_USER: Optional[str] = Field(
description="Username for authenticating with the TiDB Vector database",
default=None,
)
TIDB_VECTOR_PASSWORD: str | None = Field(
TIDB_VECTOR_PASSWORD: Optional[str] = Field(
description="Password for authenticating with the TiDB Vector database",
default=None,
)
TIDB_VECTOR_DATABASE: str | None = Field(
TIDB_VECTOR_DATABASE: Optional[str] = Field(
description="Name of the TiDB Vector database to connect to",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -7,12 +9,12 @@ class UpstashConfig(BaseSettings):
Configuration settings for Upstash vector database
"""
UPSTASH_VECTOR_URL: str | None = Field(
UPSTASH_VECTOR_URL: Optional[str] = Field(
description="URL of the upstash server (e.g., 'https://vector.upstash.io')",
default=None,
)
UPSTASH_VECTOR_TOKEN: str | None = Field(
UPSTASH_VECTOR_TOKEN: Optional[str] = Field(
description="Token for authenticating with the upstash server",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@@ -7,7 +9,7 @@ class VastbaseVectorConfig(BaseSettings):
Configuration settings for Vector (Vastbase with vector extension)
"""
VASTBASE_HOST: str | None = Field(
VASTBASE_HOST: Optional[str] = Field(
description="Hostname or IP address of the Vastbase server with Vector extension (e.g., 'localhost')",
default=None,
)
@@ -17,17 +19,17 @@ class VastbaseVectorConfig(BaseSettings):
default=5432,
)
VASTBASE_USER: str | None = Field(
VASTBASE_USER: Optional[str] = Field(
description="Username for authenticating with the Vastbase database",
default=None,
)
VASTBASE_PASSWORD: str | None = Field(
VASTBASE_PASSWORD: Optional[str] = Field(
description="Password for authenticating with the Vastbase database",
default=None,
)
VASTBASE_DATABASE: str | None = Field(
VASTBASE_DATABASE: Optional[str] = Field(
description="Name of the Vastbase database to connect to",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@@ -9,14 +11,14 @@ class VikingDBConfig(BaseSettings):
https://www.volcengine.com/docs/6291/65568
"""
VIKINGDB_ACCESS_KEY: str | None = Field(
VIKINGDB_ACCESS_KEY: Optional[str] = Field(
description="The Access Key provided by Volcengine VikingDB for API authentication."
"Refer to the following documentation for details on obtaining credentials:"
"https://www.volcengine.com/docs/6291/65568",
default=None,
)
VIKINGDB_SECRET_KEY: str | None = Field(
VIKINGDB_SECRET_KEY: Optional[str] = Field(
description="The Secret Key provided by Volcengine VikingDB for API authentication.",
default=None,
)

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@@ -7,12 +9,12 @@ class WeaviateConfig(BaseSettings):
Configuration settings for Weaviate vector database
"""
WEAVIATE_ENDPOINT: str | None = Field(
WEAVIATE_ENDPOINT: Optional[str] = Field(
description="URL of the Weaviate server (e.g., 'http://localhost:8080' or 'https://weaviate.example.com')",
default=None,
)
WEAVIATE_API_KEY: str | None = Field(
WEAVIATE_API_KEY: Optional[str] = Field(
description="API key for authenticating with the Weaviate server",
default=None,
)

View File

@@ -1,5 +1,5 @@
from collections.abc import Mapping
from typing import Any
from typing import Any, Optional
from pydantic import Field
from pydantic.fields import FieldInfo
@@ -15,22 +15,22 @@ class ApolloSettingsSourceInfo(BaseSettings):
Packaging build information
"""
APOLLO_APP_ID: str | None = Field(
APOLLO_APP_ID: Optional[str] = Field(
description="apollo app_id",
default=None,
)
APOLLO_CLUSTER: str | None = Field(
APOLLO_CLUSTER: Optional[str] = Field(
description="apollo cluster",
default=None,
)
APOLLO_CONFIG_URL: str | None = Field(
APOLLO_CONFIG_URL: Optional[str] = Field(
description="apollo config url",
default=None,
)
APOLLO_NAMESPACE: str | None = Field(
APOLLO_NAMESPACE: Optional[str] = Field(
description="apollo namespace",
default=None,
)

View File

@@ -29,7 +29,7 @@ def no_key_cache_key(namespace: str, key: str) -> str:
# Returns whether the obtained value is obtained, and None if it does not
def get_value_from_dict(namespace_cache: dict[str, Any] | None, key: str) -> Any:
def get_value_from_dict(namespace_cache: dict[str, Any] | None, key: str) -> Any | None:
if namespace_cache:
kv_data = namespace_cache.get(CONFIGURATIONS)
if kv_data is None:

View File

@@ -1,3 +1,5 @@
from typing import Optional
import flask_restx
from flask_login import current_user
from flask_restx import Resource, fields, marshal_with
@@ -48,7 +50,7 @@ class BaseApiKeyListResource(Resource):
method_decorators = [account_initialization_required, login_required, setup_required]
resource_type: str | None = None
resource_model: type | None = None
resource_model: Optional[type] = None
resource_id_field: str | None = None
token_prefix: str | None = None
max_keys = 10
@@ -101,7 +103,7 @@ class BaseApiKeyResource(Resource):
method_decorators = [account_initialization_required, login_required, setup_required]
resource_type: str | None = None
resource_model: type | None = None
resource_model: Optional[type] = None
resource_id_field: str | None = None
def delete(self, resource_id, api_key_id):

View File

@@ -16,10 +16,7 @@ from core.helper.code_executor.javascript.javascript_code_provider import Javasc
from core.helper.code_executor.python3.python3_code_provider import Python3CodeProvider
from core.llm_generator.llm_generator import LLMGenerator
from core.model_runtime.errors.invoke import InvokeError
from extensions.ext_database import db
from libs.login import login_required
from models import App
from services.workflow_service import WorkflowService
@console_ns.route("/rule-generate")
@@ -208,6 +205,9 @@ class InstructionGenerateApi(Resource):
try:
# Generate from nothing for a workflow node
if (args["current"] == code_template or args["current"] == "") and args["node_id"] != "":
from models import App, db
from services.workflow_service import WorkflowService
app = db.session.query(App).where(App.id == args["flow_id"]).first()
if not app:
return {"error": f"app {args['flow_id']} not found"}, 400

View File

@@ -4,13 +4,13 @@ from collections.abc import Sequence
from typing import cast
from flask import abort, request
from flask_restx import Resource, fields, inputs, marshal_with, reqparse
from flask_restx import Resource, inputs, marshal_with, reqparse
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
import services
from configs import dify_config
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.error import ConversationCompletedError, DraftWorkflowNotExist, DraftWorkflowNotSync
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
@@ -20,7 +20,6 @@ from core.app.apps.base_app_queue_manager import AppQueueManager
from core.app.entities.app_invoke_entities import InvokeFrom
from core.file.models import File
from core.helper.trace_id_helper import get_external_trace_id
from core.workflow.graph_engine.manager import GraphEngineManager
from extensions.ext_database import db
from factories import file_factory, variable_factory
from fields.workflow_fields import workflow_fields, workflow_pagination_fields
@@ -58,13 +57,7 @@ def _parse_file(workflow: Workflow, files: list[dict] | None = None) -> Sequence
return file_objs
@console_ns.route("/apps/<uuid:app_id>/workflows/draft")
class DraftWorkflowApi(Resource):
@api.doc("get_draft_workflow")
@api.doc(description="Get draft workflow for an application")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Draft workflow retrieved successfully", workflow_fields)
@api.response(404, "Draft workflow not found")
@setup_required
@login_required
@account_initialization_required
@@ -93,23 +86,6 @@ class DraftWorkflowApi(Resource):
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
@api.doc("sync_draft_workflow")
@api.doc(description="Sync draft workflow configuration")
@api.expect(
api.model(
"SyncDraftWorkflowRequest",
{
"graph": fields.Raw(required=True, description="Workflow graph configuration"),
"features": fields.Raw(required=True, description="Workflow features configuration"),
"hash": fields.String(description="Workflow hash for validation"),
"environment_variables": fields.List(fields.Raw, required=True, description="Environment variables"),
"conversation_variables": fields.List(fields.Raw, description="Conversation variables"),
},
)
)
@api.response(200, "Draft workflow synced successfully", workflow_fields)
@api.response(400, "Invalid workflow configuration")
@api.response(403, "Permission denied")
def post(self, app_model: App):
"""
Sync draft workflow
@@ -183,25 +159,7 @@ class DraftWorkflowApi(Resource):
}
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflows/draft/run")
class AdvancedChatDraftWorkflowRunApi(Resource):
@api.doc("run_advanced_chat_draft_workflow")
@api.doc(description="Run draft workflow for advanced chat application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"AdvancedChatWorkflowRunRequest",
{
"query": fields.String(required=True, description="User query"),
"inputs": fields.Raw(description="Input variables"),
"files": fields.List(fields.Raw, description="File uploads"),
"conversation_id": fields.String(description="Conversation ID"),
},
)
)
@api.response(200, "Workflow run started successfully")
@api.response(400, "Invalid request parameters")
@api.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@@ -250,23 +208,7 @@ class AdvancedChatDraftWorkflowRunApi(Resource):
raise InternalServerError()
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflows/draft/iteration/nodes/<string:node_id>/run")
class AdvancedChatDraftRunIterationNodeApi(Resource):
@api.doc("run_advanced_chat_draft_iteration_node")
@api.doc(description="Run draft workflow iteration node for advanced chat")
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
@api.expect(
api.model(
"IterationNodeRunRequest",
{
"task_id": fields.String(required=True, description="Task ID"),
"inputs": fields.Raw(description="Input variables"),
},
)
)
@api.response(200, "Iteration node run started successfully")
@api.response(403, "Permission denied")
@api.response(404, "Node not found")
@setup_required
@login_required
@account_initialization_required
@@ -302,23 +244,7 @@ class AdvancedChatDraftRunIterationNodeApi(Resource):
raise InternalServerError()
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/iteration/nodes/<string:node_id>/run")
class WorkflowDraftRunIterationNodeApi(Resource):
@api.doc("run_workflow_draft_iteration_node")
@api.doc(description="Run draft workflow iteration node")
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
@api.expect(
api.model(
"WorkflowIterationNodeRunRequest",
{
"task_id": fields.String(required=True, description="Task ID"),
"inputs": fields.Raw(description="Input variables"),
},
)
)
@api.response(200, "Workflow iteration node run started successfully")
@api.response(403, "Permission denied")
@api.response(404, "Node not found")
@setup_required
@login_required
@account_initialization_required
@@ -354,23 +280,7 @@ class WorkflowDraftRunIterationNodeApi(Resource):
raise InternalServerError()
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflows/draft/loop/nodes/<string:node_id>/run")
class AdvancedChatDraftRunLoopNodeApi(Resource):
@api.doc("run_advanced_chat_draft_loop_node")
@api.doc(description="Run draft workflow loop node for advanced chat")
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
@api.expect(
api.model(
"LoopNodeRunRequest",
{
"task_id": fields.String(required=True, description="Task ID"),
"inputs": fields.Raw(description="Input variables"),
},
)
)
@api.response(200, "Loop node run started successfully")
@api.response(403, "Permission denied")
@api.response(404, "Node not found")
@setup_required
@login_required
@account_initialization_required
@@ -407,23 +317,7 @@ class AdvancedChatDraftRunLoopNodeApi(Resource):
raise InternalServerError()
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/loop/nodes/<string:node_id>/run")
class WorkflowDraftRunLoopNodeApi(Resource):
@api.doc("run_workflow_draft_loop_node")
@api.doc(description="Run draft workflow loop node")
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
@api.expect(
api.model(
"WorkflowLoopNodeRunRequest",
{
"task_id": fields.String(required=True, description="Task ID"),
"inputs": fields.Raw(description="Input variables"),
},
)
)
@api.response(200, "Workflow loop node run started successfully")
@api.response(403, "Permission denied")
@api.response(404, "Node not found")
@setup_required
@login_required
@account_initialization_required
@@ -460,22 +354,7 @@ class WorkflowDraftRunLoopNodeApi(Resource):
raise InternalServerError()
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/run")
class DraftWorkflowRunApi(Resource):
@api.doc("run_draft_workflow")
@api.doc(description="Run draft workflow")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"DraftWorkflowRunRequest",
{
"inputs": fields.Raw(required=True, description="Input variables"),
"files": fields.List(fields.Raw, description="File uploads"),
},
)
)
@api.response(200, "Draft workflow run started successfully")
@api.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@@ -514,14 +393,7 @@ class DraftWorkflowRunApi(Resource):
raise InvokeRateLimitHttpError(ex.description)
@console_ns.route("/apps/<uuid:app_id>/workflow-runs/tasks/<string:task_id>/stop")
class WorkflowTaskStopApi(Resource):
@api.doc("stop_workflow_task")
@api.doc(description="Stop running workflow task")
@api.doc(params={"app_id": "Application ID", "task_id": "Task ID"})
@api.response(200, "Task stopped successfully")
@api.response(404, "Task not found")
@api.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@@ -537,32 +409,12 @@ class WorkflowTaskStopApi(Resource):
if not current_user.has_edit_permission:
raise Forbidden()
# Stop using both mechanisms for backward compatibility
# Legacy stop flag mechanism (without user check)
AppQueueManager.set_stop_flag_no_user_check(task_id)
# New graph engine command channel mechanism
GraphEngineManager.send_stop_command(task_id)
AppQueueManager.set_stop_flag(task_id, InvokeFrom.DEBUGGER, current_user.id)
return {"result": "success"}
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/run")
class DraftWorkflowNodeRunApi(Resource):
@api.doc("run_draft_workflow_node")
@api.doc(description="Run draft workflow node")
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
@api.expect(
api.model(
"DraftWorkflowNodeRunRequest",
{
"inputs": fields.Raw(description="Input variables"),
},
)
)
@api.response(200, "Node run started successfully", workflow_run_node_execution_fields)
@api.response(403, "Permission denied")
@api.response(404, "Node not found")
@setup_required
@login_required
@account_initialization_required
@@ -610,13 +462,7 @@ class DraftWorkflowNodeRunApi(Resource):
return workflow_node_execution
@console_ns.route("/apps/<uuid:app_id>/workflows/publish")
class PublishedWorkflowApi(Resource):
@api.doc("get_published_workflow")
@api.doc(description="Get published workflow for an application")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Published workflow retrieved successfully", workflow_fields)
@api.response(404, "Published workflow not found")
@setup_required
@login_required
@account_initialization_required
@@ -688,12 +534,7 @@ class PublishedWorkflowApi(Resource):
}
@console_ns.route("/apps/<uuid:app_id>/workflows/default-workflow-block-configs")
class DefaultBlockConfigsApi(Resource):
@api.doc("get_default_block_configs")
@api.doc(description="Get default block configurations for workflow")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Default block configurations retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@@ -714,13 +555,7 @@ class DefaultBlockConfigsApi(Resource):
return workflow_service.get_default_block_configs()
@console_ns.route("/apps/<uuid:app_id>/workflows/default-workflow-block-configs/<string:block_type>")
class DefaultBlockConfigApi(Resource):
@api.doc("get_default_block_config")
@api.doc(description="Get default block configuration by type")
@api.doc(params={"app_id": "Application ID", "block_type": "Block type"})
@api.response(200, "Default block configuration retrieved successfully")
@api.response(404, "Block type not found")
@setup_required
@login_required
@account_initialization_required
@@ -753,14 +588,7 @@ class DefaultBlockConfigApi(Resource):
return workflow_service.get_default_block_config(node_type=block_type, filters=filters)
@console_ns.route("/apps/<uuid:app_id>/convert-to-workflow")
class ConvertToWorkflowApi(Resource):
@api.doc("convert_to_workflow")
@api.doc(description="Convert application to workflow mode")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Application converted to workflow successfully")
@api.response(400, "Application cannot be converted")
@api.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@@ -797,14 +625,9 @@ class ConvertToWorkflowApi(Resource):
}
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/config")
class WorkflowConfigApi(Resource):
"""Resource for workflow configuration."""
@api.doc("get_workflow_config")
@api.doc(description="Get workflow configuration")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Workflow configuration retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@@ -815,12 +638,7 @@ class WorkflowConfigApi(Resource):
}
@console_ns.route("/apps/<uuid:app_id>/workflows")
class PublishedAllWorkflowApi(Resource):
@api.doc("get_all_published_workflows")
@api.doc(description="Get all published workflows for an application")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Published workflows retrieved successfully", workflow_pagination_fields)
@setup_required
@login_required
@account_initialization_required
@@ -871,23 +689,7 @@ class PublishedAllWorkflowApi(Resource):
}
@console_ns.route("/apps/<uuid:app_id>/workflows/<string:workflow_id>")
class WorkflowByIdApi(Resource):
@api.doc("update_workflow_by_id")
@api.doc(description="Update workflow by ID")
@api.doc(params={"app_id": "Application ID", "workflow_id": "Workflow ID"})
@api.expect(
api.model(
"UpdateWorkflowRequest",
{
"environment_variables": fields.List(fields.Raw, description="Environment variables"),
"conversation_variables": fields.List(fields.Raw, description="Conversation variables"),
},
)
)
@api.response(200, "Workflow updated successfully", workflow_fields)
@api.response(404, "Workflow not found")
@api.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@@ -978,14 +780,7 @@ class WorkflowByIdApi(Resource):
return None, 204
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/last-run")
class DraftWorkflowNodeLastRunApi(Resource):
@api.doc("get_draft_workflow_node_last_run")
@api.doc(description="Get last run result for draft workflow node")
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
@api.response(200, "Node last run retrieved successfully", workflow_run_node_execution_fields)
@api.response(404, "Node last run not found")
@api.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@@ -1004,3 +799,73 @@ class DraftWorkflowNodeLastRunApi(Resource):
if node_exec is None:
raise NotFound("last run not found")
return node_exec
api.add_resource(
DraftWorkflowApi,
"/apps/<uuid:app_id>/workflows/draft",
)
api.add_resource(
WorkflowConfigApi,
"/apps/<uuid:app_id>/workflows/draft/config",
)
api.add_resource(
AdvancedChatDraftWorkflowRunApi,
"/apps/<uuid:app_id>/advanced-chat/workflows/draft/run",
)
api.add_resource(
DraftWorkflowRunApi,
"/apps/<uuid:app_id>/workflows/draft/run",
)
api.add_resource(
WorkflowTaskStopApi,
"/apps/<uuid:app_id>/workflow-runs/tasks/<string:task_id>/stop",
)
api.add_resource(
DraftWorkflowNodeRunApi,
"/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/run",
)
api.add_resource(
AdvancedChatDraftRunIterationNodeApi,
"/apps/<uuid:app_id>/advanced-chat/workflows/draft/iteration/nodes/<string:node_id>/run",
)
api.add_resource(
WorkflowDraftRunIterationNodeApi,
"/apps/<uuid:app_id>/workflows/draft/iteration/nodes/<string:node_id>/run",
)
api.add_resource(
AdvancedChatDraftRunLoopNodeApi,
"/apps/<uuid:app_id>/advanced-chat/workflows/draft/loop/nodes/<string:node_id>/run",
)
api.add_resource(
WorkflowDraftRunLoopNodeApi,
"/apps/<uuid:app_id>/workflows/draft/loop/nodes/<string:node_id>/run",
)
api.add_resource(
PublishedWorkflowApi,
"/apps/<uuid:app_id>/workflows/publish",
)
api.add_resource(
PublishedAllWorkflowApi,
"/apps/<uuid:app_id>/workflows",
)
api.add_resource(
DefaultBlockConfigsApi,
"/apps/<uuid:app_id>/workflows/default-workflow-block-configs",
)
api.add_resource(
DefaultBlockConfigApi,
"/apps/<uuid:app_id>/workflows/default-workflow-block-configs/<string:block_type>",
)
api.add_resource(
ConvertToWorkflowApi,
"/apps/<uuid:app_id>/convert-to-workflow",
)
api.add_resource(
WorkflowByIdApi,
"/apps/<uuid:app_id>/workflows/<string:workflow_id>",
)
api.add_resource(
DraftWorkflowNodeLastRunApi,
"/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/last-run",
)

View File

@@ -3,10 +3,10 @@ from flask_restx import Resource, marshal_with, reqparse
from flask_restx.inputs import int_range
from sqlalchemy.orm import Session
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from core.workflow.enums import WorkflowExecutionStatus
from core.workflow.entities.workflow_execution import WorkflowExecutionStatus
from extensions.ext_database import db
from fields.workflow_app_log_fields import workflow_app_log_pagination_fields
from libs.login import login_required
@@ -15,24 +15,7 @@ from models.model import AppMode
from services.workflow_app_service import WorkflowAppService
@console_ns.route("/apps/<uuid:app_id>/workflow-app-logs")
class WorkflowAppLogApi(Resource):
@api.doc("get_workflow_app_logs")
@api.doc(description="Get workflow application execution logs")
@api.doc(params={"app_id": "Application ID"})
@api.doc(
params={
"keyword": "Search keyword for filtering logs",
"status": "Filter by execution status (succeeded, failed, stopped, partial-succeeded)",
"created_at__before": "Filter logs created before this timestamp",
"created_at__after": "Filter logs created after this timestamp",
"created_by_end_user_session_id": "Filter by end user session ID",
"created_by_account": "Filter by account",
"page": "Page number (1-99999)",
"limit": "Number of items per page (1-100)",
}
)
@api.response(200, "Workflow app logs retrieved successfully", workflow_app_log_pagination_fields)
@setup_required
@login_required
@account_initialization_required
@@ -95,3 +78,6 @@ class WorkflowAppLogApi(Resource):
)
return workflow_app_log_pagination
api.add_resource(WorkflowAppLogApi, "/apps/<uuid:app_id>/workflow-app-logs")

View File

@@ -6,7 +6,7 @@ from flask_restx import Resource, fields, inputs, marshal, marshal_with, reqpars
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.error import (
DraftWorkflowNotExist,
)
@@ -17,11 +17,10 @@ from core.variables.segment_group import SegmentGroup
from core.variables.segments import ArrayFileSegment, FileSegment, Segment
from core.variables.types import SegmentType
from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID
from extensions.ext_database import db
from factories.file_factory import build_from_mapping, build_from_mappings
from factories.variable_factory import build_segment_with_type
from libs.login import current_user, login_required
from models import App, AppMode
from models import App, AppMode, db
from models.account import Account
from models.workflow import WorkflowDraftVariable
from services.workflow_draft_variable_service import WorkflowDraftVariableList, WorkflowDraftVariableService
@@ -145,13 +144,7 @@ def _api_prerequisite(f):
return wrapper
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/variables")
class WorkflowVariableCollectionApi(Resource):
@api.doc("get_workflow_variables")
@api.doc(description="Get draft workflow variables")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"page": "Page number (1-100000)", "limit": "Number of items per page (1-100)"})
@api.response(200, "Workflow variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS)
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS)
def get(self, app_model: App):
@@ -180,9 +173,6 @@ class WorkflowVariableCollectionApi(Resource):
return workflow_vars
@api.doc("delete_workflow_variables")
@api.doc(description="Delete all draft workflow variables")
@api.response(204, "Workflow variables deleted successfully")
@_api_prerequisite
def delete(self, app_model: App):
draft_var_srv = WorkflowDraftVariableService(
@@ -211,12 +201,7 @@ def validate_node_id(node_id: str) -> NoReturn | None:
return None
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/variables")
class NodeVariableCollectionApi(Resource):
@api.doc("get_node_variables")
@api.doc(description="Get variables for a specific node")
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
@api.response(200, "Node variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
def get(self, app_model: App, node_id: str):
@@ -229,9 +214,6 @@ class NodeVariableCollectionApi(Resource):
return node_vars
@api.doc("delete_node_variables")
@api.doc(description="Delete all variables for a specific node")
@api.response(204, "Node variables deleted successfully")
@_api_prerequisite
def delete(self, app_model: App, node_id: str):
validate_node_id(node_id)
@@ -241,16 +223,10 @@ class NodeVariableCollectionApi(Resource):
return Response("", 204)
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/variables/<uuid:variable_id>")
class VariableApi(Resource):
_PATCH_NAME_FIELD = "name"
_PATCH_VALUE_FIELD = "value"
@api.doc("get_variable")
@api.doc(description="Get a specific workflow variable")
@api.doc(params={"app_id": "Application ID", "variable_id": "Variable ID"})
@api.response(200, "Variable retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_FIELDS)
@api.response(404, "Variable not found")
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_FIELDS)
def get(self, app_model: App, variable_id: str):
@@ -264,19 +240,6 @@ class VariableApi(Resource):
raise NotFoundError(description=f"variable not found, id={variable_id}")
return variable
@api.doc("update_variable")
@api.doc(description="Update a workflow variable")
@api.expect(
api.model(
"UpdateVariableRequest",
{
"name": fields.String(description="Variable name"),
"value": fields.Raw(description="Variable value"),
},
)
)
@api.response(200, "Variable updated successfully", _WORKFLOW_DRAFT_VARIABLE_FIELDS)
@api.response(404, "Variable not found")
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_FIELDS)
def patch(self, app_model: App, variable_id: str):
@@ -339,10 +302,6 @@ class VariableApi(Resource):
db.session.commit()
return variable
@api.doc("delete_variable")
@api.doc(description="Delete a workflow variable")
@api.response(204, "Variable deleted successfully")
@api.response(404, "Variable not found")
@_api_prerequisite
def delete(self, app_model: App, variable_id: str):
draft_var_srv = WorkflowDraftVariableService(
@@ -358,14 +317,7 @@ class VariableApi(Resource):
return Response("", 204)
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/variables/<uuid:variable_id>/reset")
class VariableResetApi(Resource):
@api.doc("reset_variable")
@api.doc(description="Reset a workflow variable to its default value")
@api.doc(params={"app_id": "Application ID", "variable_id": "Variable ID"})
@api.response(200, "Variable reset successfully", _WORKFLOW_DRAFT_VARIABLE_FIELDS)
@api.response(204, "Variable reset (no content)")
@api.response(404, "Variable not found")
@_api_prerequisite
def put(self, app_model: App, variable_id: str):
draft_var_srv = WorkflowDraftVariableService(
@@ -406,13 +358,7 @@ def _get_variable_list(app_model: App, node_id) -> WorkflowDraftVariableList:
return draft_vars
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/conversation-variables")
class ConversationVariableCollectionApi(Resource):
@api.doc("get_conversation_variables")
@api.doc(description="Get conversation variables for workflow")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Conversation variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
@api.response(404, "Draft workflow not found")
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
def get(self, app_model: App):
@@ -428,25 +374,14 @@ class ConversationVariableCollectionApi(Resource):
return _get_variable_list(app_model, CONVERSATION_VARIABLE_NODE_ID)
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/system-variables")
class SystemVariableCollectionApi(Resource):
@api.doc("get_system_variables")
@api.doc(description="Get system variables for workflow")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "System variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
def get(self, app_model: App):
return _get_variable_list(app_model, SYSTEM_VARIABLE_NODE_ID)
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/environment-variables")
class EnvironmentVariableCollectionApi(Resource):
@api.doc("get_environment_variables")
@api.doc(description="Get environment variables for workflow")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Environment variables retrieved successfully")
@api.response(404, "Draft workflow not found")
@_api_prerequisite
def get(self, app_model: App):
"""
@@ -478,3 +413,16 @@ class EnvironmentVariableCollectionApi(Resource):
)
return {"items": env_vars_list}
api.add_resource(
WorkflowVariableCollectionApi,
"/apps/<uuid:app_id>/workflows/draft/variables",
)
api.add_resource(NodeVariableCollectionApi, "/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/variables")
api.add_resource(VariableApi, "/apps/<uuid:app_id>/workflows/draft/variables/<uuid:variable_id>")
api.add_resource(VariableResetApi, "/apps/<uuid:app_id>/workflows/draft/variables/<uuid:variable_id>/reset")
api.add_resource(ConversationVariableCollectionApi, "/apps/<uuid:app_id>/workflows/draft/conversation-variables")
api.add_resource(SystemVariableCollectionApi, "/apps/<uuid:app_id>/workflows/draft/system-variables")
api.add_resource(EnvironmentVariableCollectionApi, "/apps/<uuid:app_id>/workflows/draft/environment-variables")

View File

@@ -4,7 +4,7 @@ from flask_login import current_user
from flask_restx import Resource, marshal_with, reqparse
from flask_restx.inputs import int_range
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from fields.workflow_run_fields import (
@@ -19,13 +19,7 @@ from models import Account, App, AppMode, EndUser
from services.workflow_run_service import WorkflowRunService
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflow-runs")
class AdvancedChatAppWorkflowRunListApi(Resource):
@api.doc("get_advanced_chat_workflow_runs")
@api.doc(description="Get advanced chat workflow run list")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"last_id": "Last run ID for pagination", "limit": "Number of items per page (1-100)"})
@api.response(200, "Workflow runs retrieved successfully", advanced_chat_workflow_run_pagination_fields)
@setup_required
@login_required
@account_initialization_required
@@ -46,13 +40,7 @@ class AdvancedChatAppWorkflowRunListApi(Resource):
return result
@console_ns.route("/apps/<uuid:app_id>/workflow-runs")
class WorkflowRunListApi(Resource):
@api.doc("get_workflow_runs")
@api.doc(description="Get workflow run list")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"last_id": "Last run ID for pagination", "limit": "Number of items per page (1-100)"})
@api.response(200, "Workflow runs retrieved successfully", workflow_run_pagination_fields)
@setup_required
@login_required
@account_initialization_required
@@ -73,13 +61,7 @@ class WorkflowRunListApi(Resource):
return result
@console_ns.route("/apps/<uuid:app_id>/workflow-runs/<uuid:run_id>")
class WorkflowRunDetailApi(Resource):
@api.doc("get_workflow_run_detail")
@api.doc(description="Get workflow run detail")
@api.doc(params={"app_id": "Application ID", "run_id": "Workflow run ID"})
@api.response(200, "Workflow run detail retrieved successfully", workflow_run_detail_fields)
@api.response(404, "Workflow run not found")
@setup_required
@login_required
@account_initialization_required
@@ -97,13 +79,7 @@ class WorkflowRunDetailApi(Resource):
return workflow_run
@console_ns.route("/apps/<uuid:app_id>/workflow-runs/<uuid:run_id>/node-executions")
class WorkflowRunNodeExecutionListApi(Resource):
@api.doc("get_workflow_run_node_executions")
@api.doc(description="Get workflow run node execution list")
@api.doc(params={"app_id": "Application ID", "run_id": "Workflow run ID"})
@api.response(200, "Node executions retrieved successfully", workflow_run_node_execution_list_fields)
@api.response(404, "Workflow run not found")
@setup_required
@login_required
@account_initialization_required
@@ -124,3 +100,9 @@ class WorkflowRunNodeExecutionListApi(Resource):
)
return {"data": node_executions}
api.add_resource(AdvancedChatAppWorkflowRunListApi, "/apps/<uuid:app_id>/advanced-chat/workflow-runs")
api.add_resource(WorkflowRunListApi, "/apps/<uuid:app_id>/workflow-runs")
api.add_resource(WorkflowRunDetailApi, "/apps/<uuid:app_id>/workflow-runs/<uuid:run_id>")
api.add_resource(WorkflowRunNodeExecutionListApi, "/apps/<uuid:app_id>/workflow-runs/<uuid:run_id>/node-executions")

View File

@@ -7,7 +7,7 @@ from flask import jsonify
from flask_login import current_user
from flask_restx import Resource, reqparse
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from extensions.ext_database import db
@@ -17,13 +17,7 @@ from models.enums import WorkflowRunTriggeredFrom
from models.model import AppMode
@console_ns.route("/apps/<uuid:app_id>/workflow/statistics/daily-conversations")
class WorkflowDailyRunsStatistic(Resource):
@api.doc("get_workflow_daily_runs_statistic")
@api.doc(description="Get workflow daily runs statistics")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"})
@api.response(200, "Daily runs statistics retrieved successfully")
@get_app_model
@setup_required
@login_required
@@ -85,13 +79,7 @@ WHERE
return jsonify({"data": response_data})
@console_ns.route("/apps/<uuid:app_id>/workflow/statistics/daily-terminals")
class WorkflowDailyTerminalsStatistic(Resource):
@api.doc("get_workflow_daily_terminals_statistic")
@api.doc(description="Get workflow daily terminals statistics")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"})
@api.response(200, "Daily terminals statistics retrieved successfully")
@get_app_model
@setup_required
@login_required
@@ -153,13 +141,7 @@ WHERE
return jsonify({"data": response_data})
@console_ns.route("/apps/<uuid:app_id>/workflow/statistics/token-costs")
class WorkflowDailyTokenCostStatistic(Resource):
@api.doc("get_workflow_daily_token_cost_statistic")
@api.doc(description="Get workflow daily token cost statistics")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"})
@api.response(200, "Daily token cost statistics retrieved successfully")
@get_app_model
@setup_required
@login_required
@@ -226,13 +208,7 @@ WHERE
return jsonify({"data": response_data})
@console_ns.route("/apps/<uuid:app_id>/workflow/statistics/average-app-interactions")
class WorkflowAverageAppInteractionStatistic(Resource):
@api.doc("get_workflow_average_app_interaction_statistic")
@api.doc(description="Get workflow average app interaction statistics")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"})
@api.response(200, "Average app interaction statistics retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@@ -309,3 +285,11 @@ GROUP BY
)
return jsonify({"data": response_data})
api.add_resource(WorkflowDailyRunsStatistic, "/apps/<uuid:app_id>/workflow/statistics/daily-conversations")
api.add_resource(WorkflowDailyTerminalsStatistic, "/apps/<uuid:app_id>/workflow/statistics/daily-terminals")
api.add_resource(WorkflowDailyTokenCostStatistic, "/apps/<uuid:app_id>/workflow/statistics/token-costs")
api.add_resource(
WorkflowAverageAppInteractionStatistic, "/apps/<uuid:app_id>/workflow/statistics/average-app-interactions"
)

View File

@@ -1,6 +1,6 @@
from collections.abc import Callable
from functools import wraps
from typing import ParamSpec, TypeVar, Union
from typing import Optional, ParamSpec, TypeVar, Union
from controllers.console.app.error import AppNotFoundError
from extensions.ext_database import db
@@ -12,7 +12,7 @@ P = ParamSpec("P")
R = TypeVar("R")
def _load_app_model(app_id: str) -> App | None:
def _load_app_model(app_id: str) -> Optional[App]:
assert isinstance(current_user, Account)
app_model = (
db.session.query(App)
@@ -22,7 +22,7 @@ def _load_app_model(app_id: str) -> App | None:
return app_model
def get_app_model(view: Callable[P, R] | None = None, *, mode: Union[AppMode, list[AppMode], None] = None):
def get_app_model(view: Optional[Callable[P, R]] = None, *, mode: Union[AppMode, list[AppMode], None] = None):
def decorator(view_func: Callable[P, R]):
@wraps(view_func)
def decorated_view(*args: P.args, **kwargs: P.kwargs):

View File

@@ -1,4 +1,5 @@
import logging
from typing import Optional
import requests
from flask import current_app, redirect, request
@@ -156,8 +157,8 @@ class OAuthCallback(Resource):
)
def _get_account_by_openid_or_email(provider: str, user_info: OAuthUserInfo) -> Account | None:
account: Account | None = Account.get_by_openid(provider, user_info.id)
def _get_account_by_openid_or_email(provider: str, user_info: OAuthUserInfo) -> Optional[Account]:
account: Optional[Account] = Account.get_by_openid(provider, user_info.id)
if not account:
with Session(db.engine) as session:

View File

@@ -1,13 +1,13 @@
import flask_restx
from flask import request
from flask_login import current_user
from flask_restx import Resource, fields, marshal, marshal_with, reqparse
from flask_restx import Resource, marshal, marshal_with, reqparse
from sqlalchemy import select
from werkzeug.exceptions import Forbidden, NotFound
import services
from configs import dify_config
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.apikey import api_key_fields, api_key_list
from controllers.console.app.error import ProviderNotInitializeError
from controllers.console.datasets.error import DatasetInUseError, DatasetNameDuplicateError, IndexingEstimateError
@@ -20,6 +20,7 @@ from controllers.console.wraps import (
from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError
from core.indexing_runner import IndexingRunner
from core.model_runtime.entities.model_entities import ModelType
from core.plugin.entities.plugin import ModelProviderID
from core.provider_manager import ProviderManager
from core.rag.datasource.vdb.vector_type import VectorType
from core.rag.extractor.entity.datasource_type import DatasourceType
@@ -32,7 +33,6 @@ from fields.document_fields import document_status_fields
from libs.login import login_required
from models import ApiToken, Dataset, Document, DocumentSegment, UploadFile
from models.dataset import DatasetPermissionEnum
from models.provider_ids import ModelProviderID
from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService
@@ -48,21 +48,7 @@ def _validate_description_length(description):
return description
@console_ns.route("/datasets")
class DatasetListApi(Resource):
@api.doc("get_datasets")
@api.doc(description="Get list of datasets")
@api.doc(
params={
"page": "Page number (default: 1)",
"limit": "Number of items per page (default: 20)",
"ids": "Filter by dataset IDs (list)",
"keyword": "Search keyword",
"tag_ids": "Filter by tag IDs (list)",
"include_all": "Include all datasets (default: false)",
}
)
@api.response(200, "Datasets retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@@ -114,24 +100,6 @@ class DatasetListApi(Resource):
response = {"data": data, "has_more": len(datasets) == limit, "limit": limit, "total": total, "page": page}
return response, 200
@api.doc("create_dataset")
@api.doc(description="Create a new dataset")
@api.expect(
api.model(
"CreateDatasetRequest",
{
"name": fields.String(required=True, description="Dataset name (1-40 characters)"),
"description": fields.String(description="Dataset description (max 400 characters)"),
"indexing_technique": fields.String(description="Indexing technique"),
"permission": fields.String(description="Dataset permission"),
"provider": fields.String(description="Provider"),
"external_knowledge_api_id": fields.String(description="External knowledge API ID"),
"external_knowledge_id": fields.String(description="External knowledge ID"),
},
)
)
@api.response(201, "Dataset created successfully")
@api.response(400, "Invalid request parameters")
@setup_required
@login_required
@account_initialization_required
@@ -204,14 +172,7 @@ class DatasetListApi(Resource):
return marshal(dataset, dataset_detail_fields), 201
@console_ns.route("/datasets/<uuid:dataset_id>")
class DatasetApi(Resource):
@api.doc("get_dataset")
@api.doc(description="Get dataset details")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.response(200, "Dataset retrieved successfully", dataset_detail_fields)
@api.response(404, "Dataset not found")
@api.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@@ -254,23 +215,6 @@ class DatasetApi(Resource):
return data, 200
@api.doc("update_dataset")
@api.doc(description="Update dataset details")
@api.expect(
api.model(
"UpdateDatasetRequest",
{
"name": fields.String(description="Dataset name"),
"description": fields.String(description="Dataset description"),
"permission": fields.String(description="Dataset permission"),
"indexing_technique": fields.String(description="Indexing technique"),
"external_retrieval_model": fields.Raw(description="External retrieval model settings"),
},
)
)
@api.response(200, "Dataset updated successfully", dataset_detail_fields)
@api.response(404, "Dataset not found")
@api.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@@ -400,12 +344,7 @@ class DatasetApi(Resource):
raise DatasetInUseError()
@console_ns.route("/datasets/<uuid:dataset_id>/use-check")
class DatasetUseCheckApi(Resource):
@api.doc("check_dataset_use")
@api.doc(description="Check if dataset is in use")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.response(200, "Dataset use status retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@@ -416,12 +355,7 @@ class DatasetUseCheckApi(Resource):
return {"is_using": dataset_is_using}, 200
@console_ns.route("/datasets/<uuid:dataset_id>/queries")
class DatasetQueryApi(Resource):
@api.doc("get_dataset_queries")
@api.doc(description="Get dataset query history")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.response(200, "Query history retrieved successfully", dataset_query_detail_fields)
@setup_required
@login_required
@account_initialization_required
@@ -451,11 +385,7 @@ class DatasetQueryApi(Resource):
return response, 200
@console_ns.route("/datasets/indexing-estimate")
class DatasetIndexingEstimateApi(Resource):
@api.doc("estimate_dataset_indexing")
@api.doc(description="Estimate dataset indexing cost")
@api.response(200, "Indexing estimate calculated successfully")
@setup_required
@login_required
@account_initialization_required
@@ -556,12 +486,7 @@ class DatasetIndexingEstimateApi(Resource):
return response.model_dump(), 200
@console_ns.route("/datasets/<uuid:dataset_id>/related-apps")
class DatasetRelatedAppListApi(Resource):
@api.doc("get_dataset_related_apps")
@api.doc(description="Get applications related to dataset")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.response(200, "Related apps retrieved successfully", related_app_list)
@setup_required
@login_required
@account_initialization_required
@@ -588,12 +513,7 @@ class DatasetRelatedAppListApi(Resource):
return {"data": related_apps, "total": len(related_apps)}, 200
@console_ns.route("/datasets/<uuid:dataset_id>/indexing-status")
class DatasetIndexingStatusApi(Resource):
@api.doc("get_dataset_indexing_status")
@api.doc(description="Get dataset indexing status")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.response(200, "Indexing status retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@@ -640,15 +560,11 @@ class DatasetIndexingStatusApi(Resource):
return data, 200
@console_ns.route("/datasets/api-keys")
class DatasetApiKeyApi(Resource):
max_keys = 10
token_prefix = "dataset-"
resource_type = "dataset"
@api.doc("get_dataset_api_keys")
@api.doc(description="Get dataset API keys")
@api.response(200, "API keys retrieved successfully", api_key_list)
@setup_required
@login_required
@account_initialization_required
@@ -693,14 +609,9 @@ class DatasetApiKeyApi(Resource):
return api_token, 200
@console_ns.route("/datasets/api-keys/<uuid:api_key_id>")
class DatasetApiDeleteApi(Resource):
resource_type = "dataset"
@api.doc("delete_dataset_api_key")
@api.doc(description="Delete dataset API key")
@api.doc(params={"api_key_id": "API key ID"})
@api.response(204, "API key deleted successfully")
@setup_required
@login_required
@account_initialization_required
@@ -730,11 +641,7 @@ class DatasetApiDeleteApi(Resource):
return {"result": "success"}, 204
@console_ns.route("/datasets/api-base-info")
class DatasetApiBaseUrlApi(Resource):
@api.doc("get_dataset_api_base_info")
@api.doc(description="Get dataset API base information")
@api.response(200, "API base info retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@@ -742,11 +649,7 @@ class DatasetApiBaseUrlApi(Resource):
return {"api_base_url": (dify_config.SERVICE_API_URL or request.host_url.rstrip("/")) + "/v1"}
@console_ns.route("/datasets/retrieval-setting")
class DatasetRetrievalSettingApi(Resource):
@api.doc("get_dataset_retrieval_setting")
@api.doc(description="Get dataset retrieval settings")
@api.response(200, "Retrieval settings retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@@ -797,12 +700,7 @@ class DatasetRetrievalSettingApi(Resource):
raise ValueError(f"Unsupported vector db type {vector_type}.")
@console_ns.route("/datasets/retrieval-setting/<string:vector_type>")
class DatasetRetrievalSettingMockApi(Resource):
@api.doc("get_dataset_retrieval_setting_mock")
@api.doc(description="Get mock dataset retrieval settings by vector type")
@api.doc(params={"vector_type": "Vector store type"})
@api.response(200, "Mock retrieval settings retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@@ -851,13 +749,7 @@ class DatasetRetrievalSettingMockApi(Resource):
raise ValueError(f"Unsupported vector db type {vector_type}.")
@console_ns.route("/datasets/<uuid:dataset_id>/error-docs")
class DatasetErrorDocs(Resource):
@api.doc("get_dataset_error_docs")
@api.doc(description="Get dataset error documents")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.response(200, "Error documents retrieved successfully")
@api.response(404, "Dataset not found")
@setup_required
@login_required
@account_initialization_required
@@ -871,14 +763,7 @@ class DatasetErrorDocs(Resource):
return {"data": [marshal(item, document_status_fields) for item in results], "total": len(results)}, 200
@console_ns.route("/datasets/<uuid:dataset_id>/permission-part-users")
class DatasetPermissionUserListApi(Resource):
@api.doc("get_dataset_permission_users")
@api.doc(description="Get dataset permission user list")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.response(200, "Permission users retrieved successfully")
@api.response(404, "Dataset not found")
@api.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@@ -899,13 +784,7 @@ class DatasetPermissionUserListApi(Resource):
}, 200
@console_ns.route("/datasets/<uuid:dataset_id>/auto-disable-logs")
class DatasetAutoDisableLogApi(Resource):
@api.doc("get_dataset_auto_disable_logs")
@api.doc(description="Get dataset auto disable logs")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.response(200, "Auto disable logs retrieved successfully")
@api.response(404, "Dataset not found")
@setup_required
@login_required
@account_initialization_required
@@ -915,3 +794,20 @@ class DatasetAutoDisableLogApi(Resource):
if dataset is None:
raise NotFound("Dataset not found.")
return DatasetService.get_dataset_auto_disable_logs(dataset_id_str), 200
api.add_resource(DatasetListApi, "/datasets")
api.add_resource(DatasetApi, "/datasets/<uuid:dataset_id>")
api.add_resource(DatasetUseCheckApi, "/datasets/<uuid:dataset_id>/use-check")
api.add_resource(DatasetQueryApi, "/datasets/<uuid:dataset_id>/queries")
api.add_resource(DatasetErrorDocs, "/datasets/<uuid:dataset_id>/error-docs")
api.add_resource(DatasetIndexingEstimateApi, "/datasets/indexing-estimate")
api.add_resource(DatasetRelatedAppListApi, "/datasets/<uuid:dataset_id>/related-apps")
api.add_resource(DatasetIndexingStatusApi, "/datasets/<uuid:dataset_id>/indexing-status")
api.add_resource(DatasetApiKeyApi, "/datasets/api-keys")
api.add_resource(DatasetApiDeleteApi, "/datasets/api-keys/<uuid:api_key_id>")
api.add_resource(DatasetApiBaseUrlApi, "/datasets/api-base-info")
api.add_resource(DatasetRetrievalSettingApi, "/datasets/retrieval-setting")
api.add_resource(DatasetRetrievalSettingMockApi, "/datasets/retrieval-setting/<string:vector_type>")
api.add_resource(DatasetPermissionUserListApi, "/datasets/<uuid:dataset_id>/permission-part-users")
api.add_resource(DatasetAutoDisableLogApi, "/datasets/<uuid:dataset_id>/auto-disable-logs")

View File

@@ -5,12 +5,12 @@ from typing import Literal, cast
from flask import request
from flask_login import current_user
from flask_restx import Resource, fields, marshal, marshal_with, reqparse
from flask_restx import Resource, marshal, marshal_with, reqparse
from sqlalchemy import asc, desc, select
from werkzeug.exceptions import Forbidden, NotFound
import services
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.error import (
ProviderModelCurrentlyNotSupportError,
ProviderNotInitializeError,
@@ -98,12 +98,7 @@ class DocumentResource(Resource):
return documents
@console_ns.route("/datasets/process-rule")
class GetProcessRuleApi(Resource):
@api.doc("get_process_rule")
@api.doc(description="Get dataset document processing rules")
@api.doc(params={"document_id": "Document ID (optional)"})
@api.response(200, "Process rules retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@@ -145,21 +140,7 @@ class GetProcessRuleApi(Resource):
return {"mode": mode, "rules": rules, "limits": limits}
@console_ns.route("/datasets/<uuid:dataset_id>/documents")
class DatasetDocumentListApi(Resource):
@api.doc("get_dataset_documents")
@api.doc(description="Get documents in a dataset")
@api.doc(
params={
"dataset_id": "Dataset ID",
"page": "Page number (default: 1)",
"limit": "Number of items per page (default: 20)",
"keyword": "Search keyword",
"sort": "Sort order (default: -created_at)",
"fetch": "Fetch full details (default: false)",
}
)
@api.response(200, "Documents retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@@ -343,23 +324,7 @@ class DatasetDocumentListApi(Resource):
return {"result": "success"}, 204
@console_ns.route("/datasets/init")
class DatasetInitApi(Resource):
@api.doc("init_dataset")
@api.doc(description="Initialize dataset with documents")
@api.expect(
api.model(
"DatasetInitRequest",
{
"upload_file_id": fields.String(required=True, description="Upload file ID"),
"indexing_technique": fields.String(description="Indexing technique"),
"process_rule": fields.Raw(description="Processing rules"),
"data_source": fields.Raw(description="Data source configuration"),
},
)
)
@api.response(201, "Dataset initialized successfully", dataset_and_document_fields)
@api.response(400, "Invalid request parameters")
@setup_required
@login_required
@account_initialization_required
@@ -429,14 +394,7 @@ class DatasetInitApi(Resource):
return response
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/indexing-estimate")
class DocumentIndexingEstimateApi(DocumentResource):
@api.doc("estimate_document_indexing")
@api.doc(description="Estimate document indexing cost")
@api.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"})
@api.response(200, "Indexing estimate calculated successfully")
@api.response(404, "Document not found")
@api.response(400, "Document already finished")
@setup_required
@login_required
@account_initialization_required
@@ -499,7 +457,6 @@ class DocumentIndexingEstimateApi(DocumentResource):
return response, 200
@console_ns.route("/datasets/<uuid:dataset_id>/batch/<string:batch>/indexing-estimate")
class DocumentBatchIndexingEstimateApi(DocumentResource):
@setup_required
@login_required
@@ -592,7 +549,6 @@ class DocumentBatchIndexingEstimateApi(DocumentResource):
raise IndexingEstimateError(str(e))
@console_ns.route("/datasets/<uuid:dataset_id>/batch/<string:batch>/indexing-status")
class DocumentBatchIndexingStatusApi(DocumentResource):
@setup_required
@login_required
@@ -637,13 +593,7 @@ class DocumentBatchIndexingStatusApi(DocumentResource):
return data
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/indexing-status")
class DocumentIndexingStatusApi(DocumentResource):
@api.doc("get_document_indexing_status")
@api.doc(description="Get document indexing status")
@api.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"})
@api.response(200, "Indexing status retrieved successfully")
@api.response(404, "Document not found")
@setup_required
@login_required
@account_initialization_required
@@ -685,21 +635,9 @@ class DocumentIndexingStatusApi(DocumentResource):
return marshal(document_dict, document_status_fields)
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>")
class DocumentApi(DocumentResource):
METADATA_CHOICES = {"all", "only", "without"}
@api.doc("get_document")
@api.doc(description="Get document details")
@api.doc(
params={
"dataset_id": "Dataset ID",
"document_id": "Document ID",
"metadata": "Metadata inclusion (all/only/without)",
}
)
@api.response(200, "Document retrieved successfully")
@api.response(404, "Document not found")
@setup_required
@login_required
@account_initialization_required
@@ -808,16 +746,7 @@ class DocumentApi(DocumentResource):
return {"result": "success"}, 204
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/<string:action>")
class DocumentProcessingApi(DocumentResource):
@api.doc("update_document_processing")
@api.doc(description="Update document processing status (pause/resume)")
@api.doc(
params={"dataset_id": "Dataset ID", "document_id": "Document ID", "action": "Action to perform (pause/resume)"}
)
@api.response(200, "Processing status updated successfully")
@api.response(404, "Document not found")
@api.response(400, "Invalid action")
@setup_required
@login_required
@account_initialization_required
@@ -852,23 +781,7 @@ class DocumentProcessingApi(DocumentResource):
return {"result": "success"}, 200
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/metadata")
class DocumentMetadataApi(DocumentResource):
@api.doc("update_document_metadata")
@api.doc(description="Update document metadata")
@api.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"})
@api.expect(
api.model(
"UpdateDocumentMetadataRequest",
{
"doc_type": fields.String(description="Document type"),
"doc_metadata": fields.Raw(description="Document metadata"),
},
)
)
@api.response(200, "Document metadata updated successfully")
@api.response(404, "Document not found")
@api.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@@ -912,7 +825,6 @@ class DocumentMetadataApi(DocumentResource):
return {"result": "success", "message": "Document metadata updated."}, 200
@console_ns.route("/datasets/<uuid:dataset_id>/documents/status/<string:action>/batch")
class DocumentStatusApi(DocumentResource):
@setup_required
@login_required
@@ -949,7 +861,6 @@ class DocumentStatusApi(DocumentResource):
return {"result": "success"}, 200
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/pause")
class DocumentPauseApi(DocumentResource):
@setup_required
@login_required
@@ -983,7 +894,6 @@ class DocumentPauseApi(DocumentResource):
return {"result": "success"}, 204
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/resume")
class DocumentRecoverApi(DocumentResource):
@setup_required
@login_required
@@ -1014,7 +924,6 @@ class DocumentRecoverApi(DocumentResource):
return {"result": "success"}, 204
@console_ns.route("/datasets/<uuid:dataset_id>/retry")
class DocumentRetryApi(DocumentResource):
@setup_required
@login_required
@@ -1058,7 +967,6 @@ class DocumentRetryApi(DocumentResource):
return {"result": "success"}, 204
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/rename")
class DocumentRenameApi(DocumentResource):
@setup_required
@login_required
@@ -1082,7 +990,6 @@ class DocumentRenameApi(DocumentResource):
return document
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/website-sync")
class WebsiteDocumentSyncApi(DocumentResource):
@setup_required
@login_required
@@ -1108,3 +1015,26 @@ class WebsiteDocumentSyncApi(DocumentResource):
DocumentService.sync_website_document(dataset_id, document)
return {"result": "success"}, 200
api.add_resource(GetProcessRuleApi, "/datasets/process-rule")
api.add_resource(DatasetDocumentListApi, "/datasets/<uuid:dataset_id>/documents")
api.add_resource(DatasetInitApi, "/datasets/init")
api.add_resource(
DocumentIndexingEstimateApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/indexing-estimate"
)
api.add_resource(DocumentBatchIndexingEstimateApi, "/datasets/<uuid:dataset_id>/batch/<string:batch>/indexing-estimate")
api.add_resource(DocumentBatchIndexingStatusApi, "/datasets/<uuid:dataset_id>/batch/<string:batch>/indexing-status")
api.add_resource(DocumentIndexingStatusApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/indexing-status")
api.add_resource(DocumentApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>")
api.add_resource(
DocumentProcessingApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/<string:action>"
)
api.add_resource(DocumentMetadataApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/metadata")
api.add_resource(DocumentStatusApi, "/datasets/<uuid:dataset_id>/documents/status/<string:action>/batch")
api.add_resource(DocumentPauseApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/pause")
api.add_resource(DocumentRecoverApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/resume")
api.add_resource(DocumentRetryApi, "/datasets/<uuid:dataset_id>/retry")
api.add_resource(DocumentRenameApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/rename")
api.add_resource(WebsiteDocumentSyncApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/website-sync")

View File

@@ -1,10 +1,10 @@
from flask import request
from flask_login import current_user
from flask_restx import Resource, fields, marshal, reqparse
from flask_restx import Resource, marshal, reqparse
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
import services
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.datasets.error import DatasetNameDuplicateError
from controllers.console.wraps import account_initialization_required, setup_required
from fields.dataset_fields import dataset_detail_fields
@@ -21,18 +21,7 @@ def _validate_name(name):
return name
@console_ns.route("/datasets/external-knowledge-api")
class ExternalApiTemplateListApi(Resource):
@api.doc("get_external_api_templates")
@api.doc(description="Get external knowledge API templates")
@api.doc(
params={
"page": "Page number (default: 1)",
"limit": "Number of items per page (default: 20)",
"keyword": "Search keyword",
}
)
@api.response(200, "External API templates retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@@ -90,13 +79,7 @@ class ExternalApiTemplateListApi(Resource):
return external_knowledge_api.to_dict(), 201
@console_ns.route("/datasets/external-knowledge-api/<uuid:external_knowledge_api_id>")
class ExternalApiTemplateApi(Resource):
@api.doc("get_external_api_template")
@api.doc(description="Get external knowledge API template details")
@api.doc(params={"external_knowledge_api_id": "External knowledge API ID"})
@api.response(200, "External API template retrieved successfully")
@api.response(404, "Template not found")
@setup_required
@login_required
@account_initialization_required
@@ -155,12 +138,7 @@ class ExternalApiTemplateApi(Resource):
return {"result": "success"}, 204
@console_ns.route("/datasets/external-knowledge-api/<uuid:external_knowledge_api_id>/use-check")
class ExternalApiUseCheckApi(Resource):
@api.doc("check_external_api_usage")
@api.doc(description="Check if external knowledge API is being used")
@api.doc(params={"external_knowledge_api_id": "External knowledge API ID"})
@api.response(200, "Usage check completed successfully")
@setup_required
@login_required
@account_initialization_required
@@ -173,24 +151,7 @@ class ExternalApiUseCheckApi(Resource):
return {"is_using": external_knowledge_api_is_using, "count": count}, 200
@console_ns.route("/datasets/external")
class ExternalDatasetCreateApi(Resource):
@api.doc("create_external_dataset")
@api.doc(description="Create external knowledge dataset")
@api.expect(
api.model(
"CreateExternalDatasetRequest",
{
"external_knowledge_api_id": fields.String(required=True, description="External knowledge API ID"),
"external_knowledge_id": fields.String(required=True, description="External knowledge ID"),
"name": fields.String(required=True, description="Dataset name"),
"description": fields.String(description="Dataset description"),
},
)
)
@api.response(201, "External dataset created successfully", dataset_detail_fields)
@api.response(400, "Invalid parameters")
@api.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@@ -230,24 +191,7 @@ class ExternalDatasetCreateApi(Resource):
return marshal(dataset, dataset_detail_fields), 201
@console_ns.route("/datasets/<uuid:dataset_id>/external-hit-testing")
class ExternalKnowledgeHitTestingApi(Resource):
@api.doc("test_external_knowledge_retrieval")
@api.doc(description="Test external knowledge retrieval for dataset")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.expect(
api.model(
"ExternalHitTestingRequest",
{
"query": fields.String(required=True, description="Query text for testing"),
"retrieval_model": fields.Raw(description="Retrieval model configuration"),
"external_retrieval_model": fields.Raw(description="External retrieval model configuration"),
},
)
)
@api.response(200, "External hit testing completed successfully")
@api.response(404, "Dataset not found")
@api.response(400, "Invalid parameters")
@setup_required
@login_required
@account_initialization_required
@@ -284,22 +228,8 @@ class ExternalKnowledgeHitTestingApi(Resource):
raise InternalServerError(str(e))
@console_ns.route("/test/retrieval")
class BedrockRetrievalApi(Resource):
# this api is only for internal testing
@api.doc("bedrock_retrieval_test")
@api.doc(description="Bedrock retrieval test (internal use only)")
@api.expect(
api.model(
"BedrockRetrievalTestRequest",
{
"retrieval_setting": fields.Raw(required=True, description="Retrieval settings"),
"query": fields.String(required=True, description="Query text"),
"knowledge_id": fields.String(required=True, description="Knowledge ID"),
},
)
)
@api.response(200, "Bedrock retrieval test completed")
def post(self):
parser = reqparse.RequestParser()
parser.add_argument("retrieval_setting", nullable=False, required=True, type=dict, location="json")
@@ -317,3 +247,12 @@ class BedrockRetrievalApi(Resource):
args["retrieval_setting"], args["query"], args["knowledge_id"]
)
return result, 200
api.add_resource(ExternalKnowledgeHitTestingApi, "/datasets/<uuid:dataset_id>/external-hit-testing")
api.add_resource(ExternalDatasetCreateApi, "/datasets/external")
api.add_resource(ExternalApiTemplateListApi, "/datasets/external-knowledge-api")
api.add_resource(ExternalApiTemplateApi, "/datasets/external-knowledge-api/<uuid:external_knowledge_api_id>")
api.add_resource(ExternalApiUseCheckApi, "/datasets/external-knowledge-api/<uuid:external_knowledge_api_id>/use-check")
# this api is only for internal test
api.add_resource(BedrockRetrievalApi, "/test/retrieval")

View File

@@ -1,6 +1,6 @@
from flask_restx import Resource, fields
from flask_restx import Resource
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.datasets.hit_testing_base import DatasetsHitTestingBase
from controllers.console.wraps import (
account_initialization_required,
@@ -10,25 +10,7 @@ from controllers.console.wraps import (
from libs.login import login_required
@console_ns.route("/datasets/<uuid:dataset_id>/hit-testing")
class HitTestingApi(Resource, DatasetsHitTestingBase):
@api.doc("test_dataset_retrieval")
@api.doc(description="Test dataset knowledge retrieval")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.expect(
api.model(
"HitTestingRequest",
{
"query": fields.String(required=True, description="Query text for testing"),
"retrieval_model": fields.Raw(description="Retrieval model configuration"),
"top_k": fields.Integer(description="Number of top results to return"),
"score_threshold": fields.Float(description="Score threshold for filtering results"),
},
)
)
@api.response(200, "Hit testing completed successfully")
@api.response(404, "Dataset not found")
@api.response(400, "Invalid parameters")
@setup_required
@login_required
@account_initialization_required
@@ -41,3 +23,6 @@ class HitTestingApi(Resource, DatasetsHitTestingBase):
self.hit_testing_args_check(args)
return self.perform_hit_testing(dataset, args)
api.add_resource(HitTestingApi, "/datasets/<uuid:dataset_id>/hit-testing")

View File

@@ -1,32 +1,13 @@
from flask_restx import Resource, fields, reqparse
from flask_restx import Resource, reqparse
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.datasets.error import WebsiteCrawlError
from controllers.console.wraps import account_initialization_required, setup_required
from libs.login import login_required
from services.website_service import WebsiteCrawlApiRequest, WebsiteCrawlStatusApiRequest, WebsiteService
@console_ns.route("/website/crawl")
class WebsiteCrawlApi(Resource):
@api.doc("crawl_website")
@api.doc(description="Crawl website content")
@api.expect(
api.model(
"WebsiteCrawlRequest",
{
"provider": fields.String(
required=True,
description="Crawl provider (firecrawl/watercrawl/jinareader)",
enum=["firecrawl", "watercrawl", "jinareader"],
),
"url": fields.String(required=True, description="URL to crawl"),
"options": fields.Raw(required=True, description="Crawl options"),
},
)
)
@api.response(200, "Website crawl initiated successfully")
@api.response(400, "Invalid crawl parameters")
@setup_required
@login_required
@account_initialization_required
@@ -58,14 +39,7 @@ class WebsiteCrawlApi(Resource):
return result, 200
@console_ns.route("/website/crawl/status/<string:job_id>")
class WebsiteCrawlStatusApi(Resource):
@api.doc("get_crawl_status")
@api.doc(description="Get website crawl status")
@api.doc(params={"job_id": "Crawl job ID", "provider": "Crawl provider (firecrawl/watercrawl/jinareader)"})
@api.response(200, "Crawl status retrieved successfully")
@api.response(404, "Crawl job not found")
@api.response(400, "Invalid provider")
@setup_required
@login_required
@account_initialization_required
@@ -88,3 +62,7 @@ class WebsiteCrawlStatusApi(Resource):
except Exception as e:
raise WebsiteCrawlError(str(e))
return result, 200
api.add_resource(WebsiteCrawlApi, "/website/crawl")
api.add_resource(WebsiteCrawlStatusApi, "/website/crawl/status/<string:job_id>")

View File

@@ -20,7 +20,6 @@ from core.errors.error import (
QuotaExceededError,
)
from core.model_runtime.errors.invoke import InvokeError
from core.workflow.graph_engine.manager import GraphEngineManager
from libs import helper
from libs.login import current_user
from models.model import AppMode, InstalledApp
@@ -83,11 +82,6 @@ class InstalledAppWorkflowTaskStopApi(InstalledAppResource):
raise NotWorkflowAppError()
assert current_user is not None
# Stop using both mechanisms for backward compatibility
# Legacy stop flag mechanism (without user check)
AppQueueManager.set_stop_flag_no_user_check(task_id)
# New graph engine command channel mechanism
GraphEngineManager.send_stop_command(task_id)
AppQueueManager.set_stop_flag(task_id, InvokeFrom.EXPLORE, current_user.id)
return {"result": "success"}

View File

@@ -1,6 +1,6 @@
from collections.abc import Callable
from functools import wraps
from typing import Concatenate, ParamSpec, TypeVar
from typing import Concatenate, Optional, ParamSpec, TypeVar
from flask_login import current_user
from flask_restx import Resource
@@ -20,7 +20,7 @@ R = TypeVar("R")
T = TypeVar("T")
def installed_app_required(view: Callable[Concatenate[InstalledApp, P], R] | None = None):
def installed_app_required(view: Optional[Callable[Concatenate[InstalledApp, P], R]] = None):
def decorator(view: Callable[Concatenate[InstalledApp, P], R]):
@wraps(view)
def decorated(installed_app_id: str, *args: P.args, **kwargs: P.kwargs):
@@ -50,7 +50,7 @@ def installed_app_required(view: Callable[Concatenate[InstalledApp, P], R] | Non
return decorator
def user_allowed_to_access_app(view: Callable[Concatenate[InstalledApp, P], R] | None = None):
def user_allowed_to_access_app(view: Optional[Callable[Concatenate[InstalledApp, P], R]] = None):
def decorator(view: Callable[Concatenate[InstalledApp, P], R]):
@wraps(view)
def decorated(installed_app: InstalledApp, *args: P.args, **kwargs: P.kwargs):

View File

@@ -21,11 +21,11 @@ from core.mcp.auth.auth_provider import OAuthClientProvider
from core.mcp.error import MCPAuthError, MCPError
from core.mcp.mcp_client import MCPClient
from core.model_runtime.utils.encoders import jsonable_encoder
from core.plugin.entities.plugin import ToolProviderID
from core.plugin.impl.oauth import OAuthHandler
from core.tools.entities.tool_entities import CredentialType
from libs.helper import StrLen, alphanumeric, uuid_value
from libs.login import login_required
from models.provider_ids import ToolProviderID
from services.plugin.oauth_service import OAuthProxyService
from services.tools.api_tools_manage_service import ApiToolManageService
from services.tools.builtin_tools_manage_service import BuiltinToolManageService

View File

@@ -8,7 +8,7 @@ from controllers.common.errors import UnsupportedFileTypeError
from controllers.files import files_ns
from core.tools.signature import verify_tool_file_signature
from core.tools.tool_file_manager import ToolFileManager
from extensions.ext_database import db as global_db
from models import db as global_db
@files_ns.route("/tools/<uuid:file_id>.<string:extension>")

View File

@@ -1,4 +1,5 @@
from mimetypes import guess_extension
from typing import Optional
from flask_restx import Resource, reqparse
from flask_restx.api import HTTPStatus
@@ -72,11 +73,11 @@ class PluginUploadFileApi(Resource):
nonce: str = args["nonce"]
sign: str = args["sign"]
tenant_id: str = args["tenant_id"]
user_id: str | None = args.get("user_id")
user_id: Optional[str] = args.get("user_id")
user = get_user(tenant_id, user_id)
filename: str | None = file.filename
mimetype: str | None = file.mimetype
filename: Optional[str] = file.filename
mimetype: Optional[str] = file.mimetype
if not filename or not mimetype:
raise Forbidden("Invalid request.")

View File

@@ -1,6 +1,6 @@
from collections.abc import Callable
from functools import wraps
from typing import ParamSpec, TypeVar, cast
from typing import Optional, ParamSpec, TypeVar, cast
from flask import current_app, request
from flask_login import user_logged_in
@@ -54,7 +54,7 @@ def get_user(tenant_id: str, user_id: str | None) -> EndUser:
return user_model
def get_user_tenant(view: Callable[P, R] | None = None):
def get_user_tenant(view: Optional[Callable[P, R]] = None):
def decorator(view_func: Callable[P, R]):
@wraps(view_func)
def decorated_view(*args: P.args, **kwargs: P.kwargs):
@@ -106,7 +106,7 @@ def get_user_tenant(view: Callable[P, R] | None = None):
return decorator(view)
def plugin_data(view: Callable[P, R] | None = None, *, payload_type: type[BaseModel]):
def plugin_data(view: Optional[Callable[P, R]] = None, *, payload_type: type[BaseModel]):
def decorator(view_func: Callable[P, R]):
def decorated_view(*args: P.args, **kwargs: P.kwargs):
try:

View File

@@ -1,4 +1,4 @@
from typing import Union
from typing import Optional, Union
from flask import Response
from flask_restx import Resource, reqparse
@@ -73,7 +73,7 @@ class MCPAppApi(Resource):
ValidationError: Invalid request format or parameters
"""
args = mcp_request_parser.parse_args()
request_id: Union[int, str] | None = args.get("id")
request_id: Optional[Union[int, str]] = args.get("id")
mcp_request = self._parse_mcp_request(args)
with Session(db.engine, expire_on_commit=False) as session:
@@ -107,7 +107,7 @@ class MCPAppApi(Resource):
def _process_mcp_message(
self,
mcp_request: mcp_types.ClientRequest | mcp_types.ClientNotification,
request_id: Union[int, str] | None,
request_id: Optional[Union[int, str]],
app: App,
mcp_server: AppMCPServer,
user_input_form: list[VariableEntity],
@@ -130,7 +130,7 @@ class MCPAppApi(Resource):
def _handle_request(
self,
mcp_request: mcp_types.ClientRequest,
request_id: Union[int, str] | None,
request_id: Optional[Union[int, str]],
app: App,
mcp_server: AppMCPServer,
user_input_form: list[VariableEntity],

View File

@@ -26,8 +26,7 @@ from core.errors.error import (
)
from core.helper.trace_id_helper import get_external_trace_id
from core.model_runtime.errors.invoke import InvokeError
from core.workflow.enums import WorkflowExecutionStatus
from core.workflow.graph_engine.manager import GraphEngineManager
from core.workflow.entities.workflow_execution import WorkflowExecutionStatus
from extensions.ext_database import db
from fields.workflow_app_log_fields import build_workflow_app_log_pagination_model
from libs import helper
@@ -263,12 +262,7 @@ class WorkflowTaskStopApi(Resource):
if app_mode != AppMode.WORKFLOW:
raise NotWorkflowAppError()
# Stop using both mechanisms for backward compatibility
# Legacy stop flag mechanism (without user check)
AppQueueManager.set_stop_flag_no_user_check(task_id)
# New graph engine command channel mechanism
GraphEngineManager.send_stop_command(task_id)
AppQueueManager.set_stop_flag(task_id, InvokeFrom.SERVICE_API, end_user.id)
return {"result": "success"}

View File

@@ -13,13 +13,13 @@ from controllers.service_api.wraps import (
validate_dataset_token,
)
from core.model_runtime.entities.model_entities import ModelType
from core.plugin.entities.plugin import ModelProviderID
from core.provider_manager import ProviderManager
from fields.dataset_fields import dataset_detail_fields
from fields.tag_fields import build_dataset_tag_fields
from libs.login import current_user
from models.account import Account
from models.dataset import Dataset, DatasetPermissionEnum
from models.provider_ids import ModelProviderID
from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService
from services.entities.knowledge_entities.knowledge_entities import RetrievalModel
from services.tag_service import TagService

View File

@@ -3,7 +3,7 @@ from collections.abc import Callable
from datetime import timedelta
from enum import StrEnum, auto
from functools import wraps
from typing import Concatenate, ParamSpec, TypeVar
from typing import Concatenate, Optional, ParamSpec, TypeVar
from flask import current_app, request
from flask_login import user_logged_in
@@ -42,7 +42,7 @@ class FetchUserArg(BaseModel):
required: bool = False
def validate_app_token(view: Callable[P, R] | None = None, *, fetch_user_arg: FetchUserArg | None = None):
def validate_app_token(view: Optional[Callable[P, R]] = None, *, fetch_user_arg: Optional[FetchUserArg] = None):
def decorator(view_func: Callable[P, R]):
@wraps(view_func)
def decorated_view(*args: P.args, **kwargs: P.kwargs):
@@ -189,7 +189,7 @@ def cloud_edition_billing_rate_limit_check(resource: str, api_token_type: str):
return interceptor
def validate_dataset_token(view: Callable[Concatenate[T, P], R] | None = None):
def validate_dataset_token(view: Optional[Callable[Concatenate[T, P], R]] = None):
def decorator(view: Callable[Concatenate[T, P], R]):
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
@@ -267,7 +267,7 @@ def validate_and_get_api_token(scope: str | None = None):
return api_token
def create_or_update_end_user_for_user_id(app_model: App, user_id: str | None = None) -> EndUser:
def create_or_update_end_user_for_user_id(app_model: App, user_id: Optional[str] = None) -> EndUser:
"""
Create or update session terminal based on user ID.
"""

View File

@@ -21,7 +21,6 @@ from core.errors.error import (
QuotaExceededError,
)
from core.model_runtime.errors.invoke import InvokeError
from core.workflow.graph_engine.manager import GraphEngineManager
from libs import helper
from models.model import App, AppMode, EndUser
from services.app_generate_service import AppGenerateService
@@ -113,11 +112,6 @@ class WorkflowTaskStopApi(WebApiResource):
if app_mode != AppMode.WORKFLOW:
raise NotWorkflowAppError()
# Stop using both mechanisms for backward compatibility
# Legacy stop flag mechanism (without user check)
AppQueueManager.set_stop_flag_no_user_check(task_id)
# New graph engine command channel mechanism
GraphEngineManager.send_stop_command(task_id)
AppQueueManager.set_stop_flag(task_id, InvokeFrom.WEB_APP, end_user.id)
return {"result": "success"}

View File

@@ -1,7 +1,7 @@
from collections.abc import Callable
from datetime import UTC, datetime
from functools import wraps
from typing import Concatenate, ParamSpec, TypeVar
from typing import Concatenate, Optional, ParamSpec, TypeVar
from flask import request
from flask_restx import Resource
@@ -21,7 +21,7 @@ P = ParamSpec("P")
R = TypeVar("R")
def validate_jwt_token(view: Callable[Concatenate[App, EndUser, P], R] | None = None):
def validate_jwt_token(view: Optional[Callable[Concatenate[App, EndUser, P], R]] = None):
def decorator(view: Callable[Concatenate[App, EndUser, P], R]):
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):

View File

@@ -1,7 +1,7 @@
import json
import logging
import uuid
from typing import Union, cast
from typing import Optional, Union, cast
from sqlalchemy import select
@@ -60,8 +60,8 @@ class BaseAgentRunner(AppRunner):
message: Message,
user_id: str,
model_instance: ModelInstance,
memory: TokenBufferMemory | None = None,
prompt_messages: list[PromptMessage] | None = None,
memory: Optional[TokenBufferMemory] = None,
prompt_messages: Optional[list[PromptMessage]] = None,
):
self.tenant_id = tenant_id
self.application_generate_entity = application_generate_entity
@@ -112,7 +112,7 @@ class BaseAgentRunner(AppRunner):
features = model_schema.features if model_schema and model_schema.features else []
self.stream_tool_call = ModelFeature.STREAM_TOOL_CALL in features
self.files = application_generate_entity.files if ModelFeature.VISION in features else []
self.query: str | None = ""
self.query: Optional[str] = ""
self._current_thoughts: list[PromptMessage] = []
def _repack_app_generate_entity(

View File

@@ -1,7 +1,7 @@
import json
from abc import ABC, abstractmethod
from collections.abc import Generator, Mapping, Sequence
from typing import Any
from typing import Any, Optional
from core.agent.base_agent_runner import BaseAgentRunner
from core.agent.entities import AgentScratchpadUnit
@@ -70,12 +70,12 @@ class CotAgentRunner(BaseAgentRunner, ABC):
self._prompt_messages_tools = prompt_messages_tools
function_call_state = True
llm_usage: dict[str, LLMUsage | None] = {"usage": None}
llm_usage: dict[str, Optional[LLMUsage]] = {"usage": None}
final_answer = ""
prompt_messages: list = [] # Initialize prompt_messages
agent_thought_id = "" # Initialize agent_thought_id
def increase_usage(final_llm_usage_dict: dict[str, LLMUsage | None], usage: LLMUsage):
def increase_usage(final_llm_usage_dict: dict[str, Optional[LLMUsage]], usage: LLMUsage):
if not final_llm_usage_dict["usage"]:
final_llm_usage_dict["usage"] = usage
else:
@@ -122,7 +122,7 @@ class CotAgentRunner(BaseAgentRunner, ABC):
callbacks=[],
)
usage_dict: dict[str, LLMUsage | None] = {}
usage_dict: dict[str, Optional[LLMUsage]] = {}
react_chunks = CotAgentOutputParser.handle_react_stream_output(chunks, usage_dict)
scratchpad = AgentScratchpadUnit(
agent_response="",
@@ -274,7 +274,7 @@ class CotAgentRunner(BaseAgentRunner, ABC):
action: AgentScratchpadUnit.Action,
tool_instances: Mapping[str, Tool],
message_file_ids: list[str],
trace_manager: TraceQueueManager | None = None,
trace_manager: Optional[TraceQueueManager] = None,
) -> tuple[str, ToolInvokeMeta]:
"""
handle invoke action

View File

@@ -1,4 +1,5 @@
import json
from typing import Optional
from core.agent.cot_agent_runner import CotAgentRunner
from core.model_runtime.entities.message_entities import (
@@ -30,7 +31,7 @@ class CotCompletionAgentRunner(CotAgentRunner):
return system_prompt
def _organize_historic_prompt(self, current_session_messages: list[PromptMessage] | None = None) -> str:
def _organize_historic_prompt(self, current_session_messages: Optional[list[PromptMessage]] = None) -> str:
"""
Organize historic prompt
"""

View File

@@ -1,5 +1,5 @@
from enum import StrEnum
from typing import Any, Union
from typing import Any, Optional, Union
from pydantic import BaseModel, Field
@@ -50,11 +50,11 @@ class AgentScratchpadUnit(BaseModel):
"action_input": self.action_input,
}
agent_response: str | None = None
thought: str | None = None
action_str: str | None = None
observation: str | None = None
action: Action | None = None
agent_response: Optional[str] = None
thought: Optional[str] = None
action_str: Optional[str] = None
observation: Optional[str] = None
action: Optional[Action] = None
def is_final(self) -> bool:
"""
@@ -81,8 +81,8 @@ class AgentEntity(BaseModel):
provider: str
model: str
strategy: Strategy
prompt: AgentPromptEntity | None = None
tools: list[AgentToolEntity] | None = None
prompt: Optional[AgentPromptEntity] = None
tools: Optional[list[AgentToolEntity]] = None
max_iteration: int = 10

View File

@@ -2,7 +2,7 @@ import json
import logging
from collections.abc import Generator
from copy import deepcopy
from typing import Any, Union
from typing import Any, Optional, Union
from core.agent.base_agent_runner import BaseAgentRunner
from core.app.apps.base_app_queue_manager import PublishFrom
@@ -52,14 +52,14 @@ class FunctionCallAgentRunner(BaseAgentRunner):
# continue to run until there is not any tool call
function_call_state = True
llm_usage: dict[str, LLMUsage | None] = {"usage": None}
llm_usage: dict[str, Optional[LLMUsage]] = {"usage": None}
final_answer = ""
prompt_messages: list = [] # Initialize prompt_messages
# get tracing instance
trace_manager = app_generate_entity.trace_manager
def increase_usage(final_llm_usage_dict: dict[str, LLMUsage | None], usage: LLMUsage):
def increase_usage(final_llm_usage_dict: dict[str, Optional[LLMUsage]], usage: LLMUsage):
if not final_llm_usage_dict["usage"]:
final_llm_usage_dict["usage"] = usage
else:

View File

@@ -1,5 +1,5 @@
from enum import StrEnum
from typing import Any
from typing import Any, Optional
from pydantic import BaseModel, ConfigDict, Field, ValidationInfo, field_validator
@@ -53,7 +53,7 @@ class AgentStrategyParameter(PluginParameter):
return cast_parameter_value(self, value)
type: AgentStrategyParameterType = Field(..., description="The type of the parameter")
help: I18nObject | None = None
help: Optional[I18nObject] = None
def init_frontend_parameter(self, value: Any):
return init_frontend_parameter(self, self.type, value)
@@ -61,7 +61,7 @@ class AgentStrategyParameter(PluginParameter):
class AgentStrategyProviderEntity(BaseModel):
identity: AgentStrategyProviderIdentity
plugin_id: str | None = Field(None, description="The id of the plugin")
plugin_id: Optional[str] = Field(None, description="The id of the plugin")
class AgentStrategyIdentity(ToolIdentity):
@@ -84,9 +84,9 @@ class AgentStrategyEntity(BaseModel):
identity: AgentStrategyIdentity
parameters: list[AgentStrategyParameter] = Field(default_factory=list)
description: I18nObject = Field(..., description="The description of the agent strategy")
output_schema: dict | None = None
features: list[AgentFeature] | None = None
meta_version: str | None = None
output_schema: Optional[dict] = None
features: Optional[list[AgentFeature]] = None
meta_version: Optional[str] = None
# pydantic configs
model_config = ConfigDict(protected_namespaces=())

View File

@@ -1,6 +1,6 @@
from abc import ABC, abstractmethod
from collections.abc import Generator, Sequence
from typing import Any
from typing import Any, Optional
from core.agent.entities import AgentInvokeMessage
from core.agent.plugin_entities import AgentStrategyParameter
@@ -16,10 +16,10 @@ class BaseAgentStrategy(ABC):
self,
params: dict[str, Any],
user_id: str,
conversation_id: str | None = None,
app_id: str | None = None,
message_id: str | None = None,
credentials: InvokeCredentials | None = None,
conversation_id: Optional[str] = None,
app_id: Optional[str] = None,
message_id: Optional[str] = None,
credentials: Optional[InvokeCredentials] = None,
) -> Generator[AgentInvokeMessage, None, None]:
"""
Invoke the agent strategy.
@@ -37,9 +37,9 @@ class BaseAgentStrategy(ABC):
self,
params: dict[str, Any],
user_id: str,
conversation_id: str | None = None,
app_id: str | None = None,
message_id: str | None = None,
credentials: InvokeCredentials | None = None,
conversation_id: Optional[str] = None,
app_id: Optional[str] = None,
message_id: Optional[str] = None,
credentials: Optional[InvokeCredentials] = None,
) -> Generator[AgentInvokeMessage, None, None]:
pass

View File

@@ -1,5 +1,5 @@
from collections.abc import Generator, Sequence
from typing import Any
from typing import Any, Optional
from core.agent.entities import AgentInvokeMessage
from core.agent.plugin_entities import AgentStrategyEntity, AgentStrategyParameter
@@ -38,10 +38,10 @@ class PluginAgentStrategy(BaseAgentStrategy):
self,
params: dict[str, Any],
user_id: str,
conversation_id: str | None = None,
app_id: str | None = None,
message_id: str | None = None,
credentials: InvokeCredentials | None = None,
conversation_id: Optional[str] = None,
app_id: Optional[str] = None,
message_id: Optional[str] = None,
credentials: Optional[InvokeCredentials] = None,
) -> Generator[AgentInvokeMessage, None, None]:
"""
Invoke the agent strategy.

View File

@@ -1,10 +1,12 @@
from typing import Optional
from core.app.app_config.entities import SensitiveWordAvoidanceEntity
from core.moderation.factory import ModerationFactory
class SensitiveWordAvoidanceConfigManager:
@classmethod
def convert(cls, config: dict) -> SensitiveWordAvoidanceEntity | None:
def convert(cls, config: dict) -> Optional[SensitiveWordAvoidanceEntity]:
sensitive_word_avoidance_dict = config.get("sensitive_word_avoidance")
if not sensitive_word_avoidance_dict:
return None

View File

@@ -1,10 +1,12 @@
from typing import Optional
from core.agent.entities import AgentEntity, AgentPromptEntity, AgentToolEntity
from core.agent.prompt.template import REACT_PROMPT_TEMPLATES
class AgentConfigManager:
@classmethod
def convert(cls, config: dict) -> AgentEntity | None:
def convert(cls, config: dict) -> Optional[AgentEntity]:
"""
Convert model config to model config

View File

@@ -1,4 +1,5 @@
import uuid
from typing import Optional
from core.app.app_config.entities import (
DatasetEntity,
@@ -13,7 +14,7 @@ from services.dataset_service import DatasetService
class DatasetConfigManager:
@classmethod
def convert(cls, config: dict) -> DatasetEntity | None:
def convert(cls, config: dict) -> Optional[DatasetEntity]:
"""
Convert model config to model config

View File

@@ -4,8 +4,8 @@ from typing import Any
from core.app.app_config.entities import ModelConfigEntity
from core.model_runtime.entities.model_entities import ModelPropertyKey, ModelType
from core.model_runtime.model_providers.model_provider_factory import ModelProviderFactory
from core.plugin.entities.plugin import ModelProviderID
from core.provider_manager import ProviderManager
from models.provider_ids import ModelProviderID
class ModelConfigManager:

View File

@@ -1,6 +1,6 @@
from collections.abc import Sequence
from enum import StrEnum, auto
from typing import Any, Literal
from typing import Any, Literal, Optional
from pydantic import BaseModel, Field, field_validator
@@ -17,7 +17,7 @@ class ModelConfigEntity(BaseModel):
provider: str
model: str
mode: str | None = None
mode: Optional[str] = None
parameters: dict[str, Any] = Field(default_factory=dict)
stop: list[str] = Field(default_factory=list)
@@ -53,7 +53,7 @@ class AdvancedCompletionPromptTemplateEntity(BaseModel):
assistant: str
prompt: str
role_prefix: RolePrefixEntity | None = None
role_prefix: Optional[RolePrefixEntity] = None
class PromptTemplateEntity(BaseModel):
@@ -84,9 +84,9 @@ class PromptTemplateEntity(BaseModel):
raise ValueError(f"invalid prompt type value {value}")
prompt_type: PromptType
simple_prompt_template: str | None = None
advanced_chat_prompt_template: AdvancedChatPromptTemplateEntity | None = None
advanced_completion_prompt_template: AdvancedCompletionPromptTemplateEntity | None = None
simple_prompt_template: Optional[str] = None
advanced_chat_prompt_template: Optional[AdvancedChatPromptTemplateEntity] = None
advanced_completion_prompt_template: Optional[AdvancedCompletionPromptTemplateEntity] = None
class VariableEntityType(StrEnum):
@@ -112,7 +112,7 @@ class VariableEntity(BaseModel):
type: VariableEntityType
required: bool = False
hide: bool = False
max_length: int | None = None
max_length: Optional[int] = None
options: Sequence[str] = Field(default_factory=list)
allowed_file_types: Sequence[FileType] = Field(default_factory=list)
allowed_file_extensions: Sequence[str] = Field(default_factory=list)
@@ -173,7 +173,7 @@ class ModelConfig(BaseModel):
class Condition(BaseModel):
"""
Condition detail
Conditon detail
"""
name: str
@@ -186,8 +186,8 @@ class MetadataFilteringCondition(BaseModel):
Metadata Filtering Condition.
"""
logical_operator: Literal["and", "or"] | None = "and"
conditions: list[Condition] | None = Field(default=None, deprecated=True)
logical_operator: Optional[Literal["and", "or"]] = "and"
conditions: Optional[list[Condition]] = Field(default=None, deprecated=True)
class DatasetRetrieveConfigEntity(BaseModel):
@@ -217,18 +217,18 @@ class DatasetRetrieveConfigEntity(BaseModel):
return mode
raise ValueError(f"invalid retrieve strategy value {value}")
query_variable: str | None = None # Only when app mode is completion
query_variable: Optional[str] = None # Only when app mode is completion
retrieve_strategy: RetrieveStrategy
top_k: int | None = None
score_threshold: float | None = 0.0
rerank_mode: str | None = "reranking_model"
reranking_model: dict | None = None
weights: dict | None = None
reranking_enabled: bool | None = True
metadata_filtering_mode: Literal["disabled", "automatic", "manual"] | None = "disabled"
metadata_model_config: ModelConfig | None = None
metadata_filtering_conditions: MetadataFilteringCondition | None = None
top_k: Optional[int] = None
score_threshold: Optional[float] = 0.0
rerank_mode: Optional[str] = "reranking_model"
reranking_model: Optional[dict] = None
weights: Optional[dict] = None
reranking_enabled: Optional[bool] = True
metadata_filtering_mode: Optional[Literal["disabled", "automatic", "manual"]] = "disabled"
metadata_model_config: Optional[ModelConfig] = None
metadata_filtering_conditions: Optional[MetadataFilteringCondition] = None
class DatasetEntity(BaseModel):
@@ -255,8 +255,8 @@ class TextToSpeechEntity(BaseModel):
"""
enabled: bool
voice: str | None = None
language: str | None = None
voice: Optional[str] = None
language: Optional[str] = None
class TracingConfigEntity(BaseModel):
@@ -269,15 +269,15 @@ class TracingConfigEntity(BaseModel):
class AppAdditionalFeatures(BaseModel):
file_upload: FileUploadConfig | None = None
opening_statement: str | None = None
file_upload: Optional[FileUploadConfig] = None
opening_statement: Optional[str] = None
suggested_questions: list[str] = []
suggested_questions_after_answer: bool = False
show_retrieve_source: bool = False
more_like_this: bool = False
speech_to_text: bool = False
text_to_speech: TextToSpeechEntity | None = None
trace_config: TracingConfigEntity | None = None
text_to_speech: Optional[TextToSpeechEntity] = None
trace_config: Optional[TracingConfigEntity] = None
class AppConfig(BaseModel):
@@ -290,7 +290,7 @@ class AppConfig(BaseModel):
app_mode: AppMode
additional_features: AppAdditionalFeatures
variables: list[VariableEntity] = []
sensitive_word_avoidance: SensitiveWordAvoidanceEntity | None = None
sensitive_word_avoidance: Optional[SensitiveWordAvoidanceEntity] = None
class EasyUIBasedAppModelConfigFrom(StrEnum):
@@ -313,7 +313,7 @@ class EasyUIBasedAppConfig(AppConfig):
app_model_config_dict: dict
model: ModelConfigEntity
prompt_template: PromptTemplateEntity
dataset: DatasetEntity | None = None
dataset: Optional[DatasetEntity] = None
external_data_variables: list[ExternalDataVariableEntity] = []

View File

@@ -3,7 +3,7 @@ import logging
import threading
import uuid
from collections.abc import Generator, Mapping
from typing import Any, Literal, Union, overload
from typing import Any, Literal, Optional, Union, overload
from flask import Flask, current_app
from pydantic import ValidationError
@@ -390,7 +390,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
application_generate_entity: AdvancedChatAppGenerateEntity,
workflow_execution_repository: WorkflowExecutionRepository,
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
conversation: Conversation | None = None,
conversation: Optional[Conversation] = None,
stream: bool = True,
variable_loader: VariableLoader = DUMMY_VARIABLE_LOADER,
) -> Mapping[str, Any] | Generator[str | Mapping[str, Any], Any, None]:

View File

@@ -1,11 +1,11 @@
import logging
import time
from collections.abc import Mapping
from typing import Any, cast
from typing import Any, Optional, cast
from sqlalchemy import select
from sqlalchemy.orm import Session
from configs import dify_config
from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfig
from core.app.apps.base_app_queue_manager import AppQueueManager
from core.app.apps.workflow_app_runner import WorkflowBasedAppRunner
@@ -23,17 +23,16 @@ from core.app.features.annotation_reply.annotation_reply import AnnotationReplyF
from core.moderation.base import ModerationError
from core.moderation.input_moderation import InputModeration
from core.variables.variables import VariableUnion
from core.workflow.entities import GraphRuntimeState, VariablePool
from core.workflow.graph_engine.command_channels.redis_channel import RedisChannel
from core.workflow.callbacks import WorkflowCallback, WorkflowLoggingCallback
from core.workflow.entities.variable_pool import VariablePool
from core.workflow.system_variable import SystemVariable
from core.workflow.variable_loader import VariableLoader
from core.workflow.workflow_entry import WorkflowEntry
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from models import Workflow
from models.enums import UserFrom
from models.model import App, Conversation, Message, MessageAnnotation
from models.workflow import ConversationVariable
from models.workflow import ConversationVariable, WorkflowType
logger = logging.getLogger(__name__)
@@ -79,29 +78,23 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner):
if not app_record:
raise ValueError("App not found")
workflow_callbacks: list[WorkflowCallback] = []
if dify_config.DEBUG:
workflow_callbacks.append(WorkflowLoggingCallback())
if self.application_generate_entity.single_iteration_run:
# if only single iteration run is requested
graph_runtime_state = GraphRuntimeState(
variable_pool=VariablePool.empty(),
start_at=time.time(),
)
graph, variable_pool = self._get_graph_and_variable_pool_of_single_iteration(
workflow=self._workflow,
node_id=self.application_generate_entity.single_iteration_run.node_id,
user_inputs=dict(self.application_generate_entity.single_iteration_run.inputs),
graph_runtime_state=graph_runtime_state,
)
elif self.application_generate_entity.single_loop_run:
# if only single loop run is requested
graph_runtime_state = GraphRuntimeState(
variable_pool=VariablePool.empty(),
start_at=time.time(),
)
graph, variable_pool = self._get_graph_and_variable_pool_of_single_loop(
workflow=self._workflow,
node_id=self.application_generate_entity.single_loop_run.node_id,
user_inputs=dict(self.application_generate_entity.single_loop_run.inputs),
graph_runtime_state=graph_runtime_state,
)
else:
inputs = self.application_generate_entity.inputs
@@ -153,27 +146,16 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner):
)
# init graph
graph_runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=time.time())
graph = self._init_graph(
graph_config=self._workflow.graph_dict,
graph_runtime_state=graph_runtime_state,
workflow_id=self._workflow.id,
tenant_id=self._workflow.tenant_id,
user_id=self.application_generate_entity.user_id,
)
graph = self._init_graph(graph_config=self._workflow.graph_dict)
db.session.close()
# RUN WORKFLOW
# Create Redis command channel for this workflow execution
task_id = self.application_generate_entity.task_id
channel_key = f"workflow:{task_id}:commands"
command_channel = RedisChannel(redis_client, channel_key)
workflow_entry = WorkflowEntry(
tenant_id=self._workflow.tenant_id,
app_id=self._workflow.app_id,
workflow_id=self._workflow.id,
workflow_type=WorkflowType.value_of(self._workflow.type),
graph=graph,
graph_config=self._workflow.graph_dict,
user_id=self.application_generate_entity.user_id,
@@ -185,11 +167,11 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner):
invoke_from=self.application_generate_entity.invoke_from,
call_depth=self.application_generate_entity.call_depth,
variable_pool=variable_pool,
graph_runtime_state=graph_runtime_state,
command_channel=command_channel,
)
generator = workflow_entry.run()
generator = workflow_entry.run(
callbacks=workflow_callbacks,
)
for event in generator:
self._handle_event(workflow_entry, event)
@@ -249,7 +231,7 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner):
def query_app_annotations_to_reply(
self, app_record: App, message: Message, query: str, user_id: str, invoke_from: InvokeFrom
) -> MessageAnnotation | None:
) -> Optional[MessageAnnotation]:
"""
Query app annotations to reply
:param app_record: app record

View File

@@ -4,7 +4,7 @@ import time
from collections.abc import Callable, Generator, Mapping
from contextlib import contextmanager
from threading import Thread
from typing import Any, Union
from typing import Any, Optional, Union
from sqlalchemy import select
from sqlalchemy.orm import Session
@@ -31,9 +31,14 @@ from core.app.entities.queue_entities import (
QueueMessageReplaceEvent,
QueueNodeExceptionEvent,
QueueNodeFailedEvent,
QueueNodeInIterationFailedEvent,
QueueNodeInLoopFailedEvent,
QueueNodeRetryEvent,
QueueNodeStartedEvent,
QueueNodeSucceededEvent,
QueueParallelBranchRunFailedEvent,
QueueParallelBranchRunStartedEvent,
QueueParallelBranchRunSucceededEvent,
QueuePingEvent,
QueueRetrieverResourcesEvent,
QueueStopEvent,
@@ -60,8 +65,8 @@ from core.app.task_pipeline.message_cycle_manager import MessageCycleManager
from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk
from core.model_runtime.entities.llm_entities import LLMUsage
from core.ops.ops_trace_manager import TraceQueueManager
from core.workflow.entities import GraphRuntimeState
from core.workflow.enums import WorkflowExecutionStatus, WorkflowType
from core.workflow.entities.workflow_execution import WorkflowExecutionStatus, WorkflowType
from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState
from core.workflow.nodes import NodeType
from core.workflow.repositories.draft_variable_repository import DraftVariableSaverFactory
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
@@ -228,7 +233,7 @@ class AdvancedChatAppGenerateTaskPipeline:
return None
def _wrapper_process_stream_response(
self, trace_manager: TraceQueueManager | None = None
self, trace_manager: Optional[TraceQueueManager] = None
) -> Generator[StreamResponse, None, None]:
tts_publisher = None
task_id = self._application_generate_entity.task_id
@@ -289,7 +294,7 @@ class AdvancedChatAppGenerateTaskPipeline:
if not self._workflow_run_id:
raise ValueError("workflow run not initialized.")
def _ensure_graph_runtime_initialized(self, graph_runtime_state: GraphRuntimeState | None) -> GraphRuntimeState:
def _ensure_graph_runtime_initialized(self, graph_runtime_state: Optional[GraphRuntimeState]) -> GraphRuntimeState:
"""Fluent validation for graph runtime state."""
if not graph_runtime_state:
raise ValueError("graph runtime state not initialized.")
@@ -382,7 +387,9 @@ class AdvancedChatAppGenerateTaskPipeline:
def _handle_node_failed_events(
self,
event: Union[QueueNodeFailedEvent, QueueNodeExceptionEvent],
event: Union[
QueueNodeFailedEvent, QueueNodeInIterationFailedEvent, QueueNodeInLoopFailedEvent, QueueNodeExceptionEvent
],
**kwargs,
) -> Generator[StreamResponse, None, None]:
"""Handle various node failure events."""
@@ -404,8 +411,8 @@ class AdvancedChatAppGenerateTaskPipeline:
self,
event: QueueTextChunkEvent,
*,
tts_publisher: AppGeneratorTTSPublisher | None = None,
queue_message: Union[WorkflowQueueMessage, MessageQueueMessage] | None = None,
tts_publisher: Optional[AppGeneratorTTSPublisher] = None,
queue_message: Optional[Union[WorkflowQueueMessage, MessageQueueMessage]] = None,
**kwargs,
) -> Generator[StreamResponse, None, None]:
"""Handle text chunk events."""
@@ -427,6 +434,32 @@ class AdvancedChatAppGenerateTaskPipeline:
answer=delta_text, message_id=self._message_id, from_variable_selector=event.from_variable_selector
)
def _handle_parallel_branch_started_event(
self, event: QueueParallelBranchRunStartedEvent, **kwargs
) -> Generator[StreamResponse, None, None]:
"""Handle parallel branch started events."""
self._ensure_workflow_initialized()
parallel_start_resp = self._workflow_response_converter.workflow_parallel_branch_start_to_stream_response(
task_id=self._application_generate_entity.task_id,
workflow_execution_id=self._workflow_run_id,
event=event,
)
yield parallel_start_resp
def _handle_parallel_branch_finished_events(
self, event: Union[QueueParallelBranchRunSucceededEvent, QueueParallelBranchRunFailedEvent], **kwargs
) -> Generator[StreamResponse, None, None]:
"""Handle parallel branch finished events."""
self._ensure_workflow_initialized()
parallel_finish_resp = self._workflow_response_converter.workflow_parallel_branch_finished_to_stream_response(
task_id=self._application_generate_entity.task_id,
workflow_execution_id=self._workflow_run_id,
event=event,
)
yield parallel_finish_resp
def _handle_iteration_start_event(
self, event: QueueIterationStartEvent, **kwargs
) -> Generator[StreamResponse, None, None]:
@@ -505,8 +538,8 @@ class AdvancedChatAppGenerateTaskPipeline:
self,
event: QueueWorkflowSucceededEvent,
*,
graph_runtime_state: GraphRuntimeState | None = None,
trace_manager: TraceQueueManager | None = None,
graph_runtime_state: Optional[GraphRuntimeState] = None,
trace_manager: Optional[TraceQueueManager] = None,
**kwargs,
) -> Generator[StreamResponse, None, None]:
"""Handle workflow succeeded events."""
@@ -536,8 +569,8 @@ class AdvancedChatAppGenerateTaskPipeline:
self,
event: QueueWorkflowPartialSuccessEvent,
*,
graph_runtime_state: GraphRuntimeState | None = None,
trace_manager: TraceQueueManager | None = None,
graph_runtime_state: Optional[GraphRuntimeState] = None,
trace_manager: Optional[TraceQueueManager] = None,
**kwargs,
) -> Generator[StreamResponse, None, None]:
"""Handle workflow partial success events."""
@@ -568,8 +601,8 @@ class AdvancedChatAppGenerateTaskPipeline:
self,
event: QueueWorkflowFailedEvent,
*,
graph_runtime_state: GraphRuntimeState | None = None,
trace_manager: TraceQueueManager | None = None,
graph_runtime_state: Optional[GraphRuntimeState] = None,
trace_manager: Optional[TraceQueueManager] = None,
**kwargs,
) -> Generator[StreamResponse, None, None]:
"""Handle workflow failed events."""
@@ -603,8 +636,8 @@ class AdvancedChatAppGenerateTaskPipeline:
self,
event: QueueStopEvent,
*,
graph_runtime_state: GraphRuntimeState | None = None,
trace_manager: TraceQueueManager | None = None,
graph_runtime_state: Optional[GraphRuntimeState] = None,
trace_manager: Optional[TraceQueueManager] = None,
**kwargs,
) -> Generator[StreamResponse, None, None]:
"""Handle stop events."""
@@ -644,7 +677,7 @@ class AdvancedChatAppGenerateTaskPipeline:
self,
event: QueueAdvancedChatMessageEndEvent,
*,
graph_runtime_state: GraphRuntimeState | None = None,
graph_runtime_state: Optional[GraphRuntimeState] = None,
**kwargs,
) -> Generator[StreamResponse, None, None]:
"""Handle advanced chat message end events."""
@@ -718,6 +751,8 @@ class AdvancedChatAppGenerateTaskPipeline:
QueueNodeRetryEvent: self._handle_node_retry_event,
QueueNodeStartedEvent: self._handle_node_started_event,
QueueNodeSucceededEvent: self._handle_node_succeeded_event,
# Parallel branch events
QueueParallelBranchRunStartedEvent: self._handle_parallel_branch_started_event,
# Iteration events
QueueIterationStartEvent: self._handle_iteration_start_event,
QueueIterationNextEvent: self._handle_iteration_next_event,
@@ -740,10 +775,10 @@ class AdvancedChatAppGenerateTaskPipeline:
self,
event: Any,
*,
graph_runtime_state: GraphRuntimeState | None = None,
tts_publisher: AppGeneratorTTSPublisher | None = None,
trace_manager: TraceQueueManager | None = None,
queue_message: Union[WorkflowQueueMessage, MessageQueueMessage] | None = None,
graph_runtime_state: Optional[GraphRuntimeState] = None,
tts_publisher: Optional[AppGeneratorTTSPublisher] = None,
trace_manager: Optional[TraceQueueManager] = None,
queue_message: Optional[Union[WorkflowQueueMessage, MessageQueueMessage]] = None,
) -> Generator[StreamResponse, None, None]:
"""Dispatch events using elegant pattern matching."""
handlers = self._get_event_handlers()
@@ -765,6 +800,8 @@ class AdvancedChatAppGenerateTaskPipeline:
event,
(
QueueNodeFailedEvent,
QueueNodeInIterationFailedEvent,
QueueNodeInLoopFailedEvent,
QueueNodeExceptionEvent,
),
):
@@ -777,20 +814,31 @@ class AdvancedChatAppGenerateTaskPipeline:
)
return
# Handle parallel branch finished events with isinstance check
if isinstance(event, (QueueParallelBranchRunSucceededEvent, QueueParallelBranchRunFailedEvent)):
yield from self._handle_parallel_branch_finished_events(
event,
graph_runtime_state=graph_runtime_state,
tts_publisher=tts_publisher,
trace_manager=trace_manager,
queue_message=queue_message,
)
return
# For unhandled events, we continue (original behavior)
return
def _process_stream_response(
self,
tts_publisher: AppGeneratorTTSPublisher | None = None,
trace_manager: TraceQueueManager | None = None,
tts_publisher: Optional[AppGeneratorTTSPublisher] = None,
trace_manager: Optional[TraceQueueManager] = None,
) -> Generator[StreamResponse, None, None]:
"""
Process stream response using elegant Fluent Python patterns.
Maintains exact same functionality as original 57-if-statement version.
"""
# Initialize graph runtime state
graph_runtime_state: GraphRuntimeState | None = None
graph_runtime_state: Optional[GraphRuntimeState] = None
for queue_message in self._base_task_pipeline.queue_manager.listen():
event = queue_message.event
@@ -800,6 +848,11 @@ class AdvancedChatAppGenerateTaskPipeline:
graph_runtime_state = event.graph_runtime_state
yield from self._handle_workflow_started_event(event)
case QueueTextChunkEvent():
yield from self._handle_text_chunk_event(
event, tts_publisher=tts_publisher, queue_message=queue_message
)
case QueueErrorEvent():
yield from self._handle_error_event(event)
break
@@ -835,7 +888,7 @@ class AdvancedChatAppGenerateTaskPipeline:
if self._conversation_name_generate_thread:
self._conversation_name_generate_thread.join()
def _save_message(self, *, session: Session, graph_runtime_state: GraphRuntimeState | None = None):
def _save_message(self, *, session: Session, graph_runtime_state: Optional[GraphRuntimeState] = None):
message = self._get_message(session=session)
# If there are assistant files, remove markdown image links from answer

View File

@@ -1,6 +1,6 @@
import uuid
from collections.abc import Mapping
from typing import Any, cast
from typing import Any, Optional, cast
from core.agent.entities import AgentEntity
from core.app.app_config.base_app_config_manager import BaseAppConfigManager
@@ -30,7 +30,7 @@ class AgentChatAppConfig(EasyUIBasedAppConfig):
Agent Chatbot App Config Entity.
"""
agent: AgentEntity | None = None
agent: Optional[AgentEntity] = None
class AgentChatAppConfigManager(BaseAppConfigManager):
@@ -39,8 +39,8 @@ class AgentChatAppConfigManager(BaseAppConfigManager):
cls,
app_model: App,
app_model_config: AppModelConfig,
conversation: Conversation | None = None,
override_config_dict: dict | None = None,
conversation: Optional[Conversation] = None,
override_config_dict: Optional[dict] = None,
) -> AgentChatAppConfig:
"""
Convert app model config to agent chat app config

View File

@@ -1,12 +1,12 @@
from collections.abc import Generator, Mapping, Sequence
from typing import TYPE_CHECKING, Any, Union, final
from typing import TYPE_CHECKING, Any, Optional, Union, final
from sqlalchemy.orm import Session
from core.app.app_config.entities import VariableEntityType
from core.app.entities.app_invoke_entities import InvokeFrom
from core.file import File, FileUploadConfig
from core.workflow.enums import NodeType
from core.workflow.nodes.enums import NodeType
from core.workflow.repositories.draft_variable_repository import (
DraftVariableSaver,
DraftVariableSaverFactory,
@@ -24,7 +24,7 @@ class BaseAppGenerator:
def _prepare_user_inputs(
self,
*,
user_inputs: Mapping[str, Any] | None,
user_inputs: Optional[Mapping[str, Any]],
variables: Sequence["VariableEntity"],
tenant_id: str,
strict_type_validation: bool = False,

View File

@@ -2,7 +2,7 @@ import queue
import time
from abc import abstractmethod
from enum import IntEnum, auto
from typing import Any
from typing import Any, Optional
from sqlalchemy.orm import DeclarativeMeta
@@ -116,7 +116,7 @@ class AppQueueManager:
Set task stop flag
:return:
"""
result: Any | None = redis_client.get(cls._generate_task_belong_cache_key(task_id))
result: Optional[Any] = redis_client.get(cls._generate_task_belong_cache_key(task_id))
if result is None:
return
@@ -127,21 +127,6 @@ class AppQueueManager:
stopped_cache_key = cls._generate_stopped_cache_key(task_id)
redis_client.setex(stopped_cache_key, 600, 1)
@classmethod
def set_stop_flag_no_user_check(cls, task_id: str) -> None:
"""
Set task stop flag without user permission check.
This method allows stopping workflows without user context.
:param task_id: The task ID to stop
:return:
"""
if not task_id:
return
stopped_cache_key = cls._generate_stopped_cache_key(task_id)
redis_client.setex(stopped_cache_key, 600, 1)
def _is_stopped(self) -> bool:
"""
Check if task is stopped

Some files were not shown because too many files have changed in this diff Show More