mirror of
https://github.com/langgenius/dify.git
synced 2026-02-07 00:23:57 +00:00
Compare commits
2 Commits
dev/plugin
...
fix/iterat
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5f7771bc47 | ||
|
|
286741e139 |
@@ -1,13 +1,11 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
npm add -g pnpm@9.12.2
|
cd web && npm install
|
||||||
cd web && pnpm install
|
|
||||||
pipx install poetry
|
pipx install poetry
|
||||||
|
|
||||||
echo 'alias start-api="cd /workspaces/dify/api && poetry run python -m flask run --host 0.0.0.0 --port=5001 --debug"' >> ~/.bashrc
|
echo 'alias start-api="cd /workspaces/dify/api && poetry run python -m flask run --host 0.0.0.0 --port=5001 --debug"' >> ~/.bashrc
|
||||||
echo 'alias start-worker="cd /workspaces/dify/api && poetry run python -m celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion"' >> ~/.bashrc
|
echo 'alias start-worker="cd /workspaces/dify/api && poetry run python -m celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion"' >> ~/.bashrc
|
||||||
echo 'alias start-web="cd /workspaces/dify/web && pnpm dev"' >> ~/.bashrc
|
echo 'alias start-web="cd /workspaces/dify/web && npm run dev"' >> ~/.bashrc
|
||||||
echo 'alias start-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify up -d"' >> ~/.bashrc
|
echo 'alias start-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify up -d"' >> ~/.bashrc
|
||||||
echo 'alias stop-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify down"' >> ~/.bashrc
|
|
||||||
|
|
||||||
source /home/vscode/.bashrc
|
source /home/vscode/.bashrc
|
||||||
2
.github/actions/setup-poetry/action.yml
vendored
2
.github/actions/setup-poetry/action.yml
vendored
@@ -8,7 +8,7 @@ inputs:
|
|||||||
poetry-version:
|
poetry-version:
|
||||||
description: Poetry version to set up
|
description: Poetry version to set up
|
||||||
required: true
|
required: true
|
||||||
default: '2.0.1'
|
default: '1.8.4'
|
||||||
poetry-lockfile:
|
poetry-lockfile:
|
||||||
description: Path to the Poetry lockfile to restore cache from
|
description: Path to the Poetry lockfile to restore cache from
|
||||||
required: true
|
required: true
|
||||||
|
|||||||
13
.github/pull_request_template.md
vendored
13
.github/pull_request_template.md
vendored
@@ -8,9 +8,16 @@ Please include a summary of the change and which issue is fixed. Please also inc
|
|||||||
|
|
||||||
# Screenshots
|
# Screenshots
|
||||||
|
|
||||||
| Before | After |
|
<table>
|
||||||
|--------|-------|
|
<tr>
|
||||||
| ... | ... |
|
<td>Before: </td>
|
||||||
|
<td>After: </td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>...</td>
|
||||||
|
<td>...</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
|
||||||
# Checklist
|
# Checklist
|
||||||
|
|
||||||
|
|||||||
18
.github/workflows/api-tests.yml
vendored
18
.github/workflows/api-tests.yml
vendored
@@ -26,9 +26,6 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- name: Setup Poetry and Python ${{ matrix.python-version }}
|
- name: Setup Poetry and Python ${{ matrix.python-version }}
|
||||||
uses: ./.github/actions/setup-poetry
|
uses: ./.github/actions/setup-poetry
|
||||||
@@ -45,17 +42,16 @@ jobs:
|
|||||||
run: poetry install -C api --with dev
|
run: poetry install -C api --with dev
|
||||||
|
|
||||||
- name: Check dependencies in pyproject.toml
|
- name: Check dependencies in pyproject.toml
|
||||||
run: poetry run -P api bash dev/pytest/pytest_artifacts.sh
|
run: poetry run -C api bash dev/pytest/pytest_artifacts.sh
|
||||||
|
|
||||||
- name: Run Unit tests
|
- name: Run Unit tests
|
||||||
run: poetry run -P api bash dev/pytest/pytest_unit_tests.sh
|
run: poetry run -C api bash dev/pytest/pytest_unit_tests.sh
|
||||||
|
|
||||||
- name: Run dify config tests
|
- name: Run ModelRuntime
|
||||||
run: poetry run -P api python dev/pytest/pytest_config_tests.py
|
run: poetry run -C api bash dev/pytest/pytest_model_runtime.sh
|
||||||
|
|
||||||
- name: Run mypy
|
- name: Run Tool
|
||||||
run: |
|
run: poetry run -C api bash dev/pytest/pytest_tools.sh
|
||||||
poetry run -C api python -m mypy --install-types --non-interactive .
|
|
||||||
|
|
||||||
- name: Set up dotenvs
|
- name: Set up dotenvs
|
||||||
run: |
|
run: |
|
||||||
@@ -75,4 +71,4 @@ jobs:
|
|||||||
ssrf_proxy
|
ssrf_proxy
|
||||||
|
|
||||||
- name: Run Workflow
|
- name: Run Workflow
|
||||||
run: poetry run -P api bash dev/pytest/pytest_workflow.sh
|
run: poetry run -C api bash dev/pytest/pytest_workflow.sh
|
||||||
|
|||||||
16
.github/workflows/build-push.yml
vendored
16
.github/workflows/build-push.yml
vendored
@@ -5,7 +5,6 @@ on:
|
|||||||
branches:
|
branches:
|
||||||
- "main"
|
- "main"
|
||||||
- "deploy/dev"
|
- "deploy/dev"
|
||||||
- "dev/plugin-deploy"
|
|
||||||
release:
|
release:
|
||||||
types: [published]
|
types: [published]
|
||||||
|
|
||||||
@@ -80,12 +79,10 @@ jobs:
|
|||||||
cache-to: type=gha,mode=max,scope=${{ matrix.service_name }}
|
cache-to: type=gha,mode=max,scope=${{ matrix.service_name }}
|
||||||
|
|
||||||
- name: Export digest
|
- name: Export digest
|
||||||
env:
|
|
||||||
DIGEST: ${{ steps.build.outputs.digest }}
|
|
||||||
run: |
|
run: |
|
||||||
mkdir -p /tmp/digests
|
mkdir -p /tmp/digests
|
||||||
sanitized_digest=${DIGEST#sha256:}
|
digest="${{ steps.build.outputs.digest }}"
|
||||||
touch "/tmp/digests/${sanitized_digest}"
|
touch "/tmp/digests/${digest#sha256:}"
|
||||||
|
|
||||||
- name: Upload digest
|
- name: Upload digest
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
@@ -135,15 +132,10 @@ jobs:
|
|||||||
|
|
||||||
- name: Create manifest list and push
|
- name: Create manifest list and push
|
||||||
working-directory: /tmp/digests
|
working-directory: /tmp/digests
|
||||||
env:
|
|
||||||
IMAGE_NAME: ${{ env[matrix.image_name_env] }}
|
|
||||||
run: |
|
run: |
|
||||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||||
$(printf "$IMAGE_NAME@sha256:%s " *)
|
$(printf '${{ env[matrix.image_name_env] }}@sha256:%s ' *)
|
||||||
|
|
||||||
- name: Inspect image
|
- name: Inspect image
|
||||||
env:
|
|
||||||
IMAGE_NAME: ${{ env[matrix.image_name_env] }}
|
|
||||||
IMAGE_VERSION: ${{ steps.meta.outputs.version }}
|
|
||||||
run: |
|
run: |
|
||||||
docker buildx imagetools inspect "$IMAGE_NAME:$IMAGE_VERSION"
|
docker buildx imagetools inspect ${{ env[matrix.image_name_env] }}:${{ steps.meta.outputs.version }}
|
||||||
|
|||||||
4
.github/workflows/db-migration-test.yml
vendored
4
.github/workflows/db-migration-test.yml
vendored
@@ -4,7 +4,6 @@ on:
|
|||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- plugins/beta
|
|
||||||
paths:
|
paths:
|
||||||
- api/migrations/**
|
- api/migrations/**
|
||||||
- .github/workflows/db-migration-test.yml
|
- .github/workflows/db-migration-test.yml
|
||||||
@@ -20,9 +19,6 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- name: Setup Poetry and Python
|
- name: Setup Poetry and Python
|
||||||
uses: ./.github/actions/setup-poetry
|
uses: ./.github/actions/setup-poetry
|
||||||
|
|||||||
47
.github/workflows/docker-build.yml
vendored
47
.github/workflows/docker-build.yml
vendored
@@ -1,47 +0,0 @@
|
|||||||
name: Build docker image
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
branches:
|
|
||||||
- "main"
|
|
||||||
paths:
|
|
||||||
- api/Dockerfile
|
|
||||||
- web/Dockerfile
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: docker-build-${{ github.head_ref || github.run_id }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-docker:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- service_name: "api-amd64"
|
|
||||||
platform: linux/amd64
|
|
||||||
context: "api"
|
|
||||||
- service_name: "api-arm64"
|
|
||||||
platform: linux/arm64
|
|
||||||
context: "api"
|
|
||||||
- service_name: "web-amd64"
|
|
||||||
platform: linux/amd64
|
|
||||||
context: "web"
|
|
||||||
- service_name: "web-arm64"
|
|
||||||
platform: linux/arm64
|
|
||||||
context: "web"
|
|
||||||
steps:
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v3
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
|
|
||||||
- name: Build Docker Image
|
|
||||||
uses: docker/build-push-action@v6
|
|
||||||
with:
|
|
||||||
push: false
|
|
||||||
context: "{{defaultContext}}:${{ matrix.context }}"
|
|
||||||
platforms: ${{ matrix.platform }}
|
|
||||||
cache-from: type=gha
|
|
||||||
cache-to: type=gha,mode=max
|
|
||||||
3
.github/workflows/expose_service_ports.sh
vendored
3
.github/workflows/expose_service_ports.sh
vendored
@@ -9,6 +9,5 @@ yq eval '.services["pgvecto-rs"].ports += ["5431:5432"]' -i docker/docker-compos
|
|||||||
yq eval '.services["elasticsearch"].ports += ["9200:9200"]' -i docker/docker-compose.yaml
|
yq eval '.services["elasticsearch"].ports += ["9200:9200"]' -i docker/docker-compose.yaml
|
||||||
yq eval '.services.couchbase-server.ports += ["8091-8096:8091-8096"]' -i docker/docker-compose.yaml
|
yq eval '.services.couchbase-server.ports += ["8091-8096:8091-8096"]' -i docker/docker-compose.yaml
|
||||||
yq eval '.services.couchbase-server.ports += ["11210:11210"]' -i docker/docker-compose.yaml
|
yq eval '.services.couchbase-server.ports += ["11210:11210"]' -i docker/docker-compose.yaml
|
||||||
yq eval '.services.tidb.ports += ["4000:4000"]' -i docker/tidb/docker-compose.yaml
|
|
||||||
|
|
||||||
echo "Ports exposed for sandbox, weaviate, tidb, qdrant, chroma, milvus, pgvector, pgvecto-rs, elasticsearch, couchbase"
|
echo "Ports exposed for sandbox, weaviate, qdrant, chroma, milvus, pgvector, pgvecto-rs, elasticsearch, couchbase"
|
||||||
|
|||||||
58
.github/workflows/style.yml
vendored
58
.github/workflows/style.yml
vendored
@@ -17,9 +17,6 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- name: Check changed files
|
- name: Check changed files
|
||||||
id: changed-files
|
id: changed-files
|
||||||
@@ -40,13 +37,12 @@ jobs:
|
|||||||
- name: Ruff check
|
- name: Ruff check
|
||||||
if: steps.changed-files.outputs.any_changed == 'true'
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
run: |
|
run: |
|
||||||
poetry run -C api ruff --version
|
poetry run -C api ruff check ./api
|
||||||
poetry run -C api ruff check ./
|
poetry run -C api ruff format --check ./api
|
||||||
poetry run -C api ruff format --check ./
|
|
||||||
|
|
||||||
- name: Dotenv check
|
- name: Dotenv check
|
||||||
if: steps.changed-files.outputs.any_changed == 'true'
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
run: poetry run -P api dotenv-linter ./api/.env.example ./web/.env.example
|
run: poetry run -C api dotenv-linter ./api/.env.example ./web/.env.example
|
||||||
|
|
||||||
- name: Lint hints
|
- name: Lint hints
|
||||||
if: failure()
|
if: failure()
|
||||||
@@ -62,9 +58,6 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- name: Check changed files
|
- name: Check changed files
|
||||||
id: changed-files
|
id: changed-files
|
||||||
@@ -72,58 +65,22 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
files: web/**
|
files: web/**
|
||||||
|
|
||||||
- name: Install pnpm
|
|
||||||
uses: pnpm/action-setup@v4
|
|
||||||
with:
|
|
||||||
version: 10
|
|
||||||
run_install: false
|
|
||||||
|
|
||||||
- name: Setup NodeJS
|
- name: Setup NodeJS
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
if: steps.changed-files.outputs.any_changed == 'true'
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
with:
|
with:
|
||||||
node-version: 20
|
node-version: 20
|
||||||
cache: pnpm
|
cache: yarn
|
||||||
cache-dependency-path: ./web/package.json
|
cache-dependency-path: ./web/package.json
|
||||||
|
|
||||||
- name: Web dependencies
|
- name: Web dependencies
|
||||||
if: steps.changed-files.outputs.any_changed == 'true'
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
run: pnpm install --frozen-lockfile
|
run: yarn install --frozen-lockfile
|
||||||
|
|
||||||
- name: Web style check
|
- name: Web style check
|
||||||
if: steps.changed-files.outputs.any_changed == 'true'
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
run: pnpm run lint
|
run: yarn run lint
|
||||||
|
|
||||||
docker-compose-template:
|
|
||||||
name: Docker Compose Template
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- name: Check changed files
|
|
||||||
id: changed-files
|
|
||||||
uses: tj-actions/changed-files@v45
|
|
||||||
with:
|
|
||||||
files: |
|
|
||||||
docker/generate_docker_compose
|
|
||||||
docker/.env.example
|
|
||||||
docker/docker-compose-template.yaml
|
|
||||||
docker/docker-compose.yaml
|
|
||||||
|
|
||||||
- name: Generate Docker Compose
|
|
||||||
if: steps.changed-files.outputs.any_changed == 'true'
|
|
||||||
run: |
|
|
||||||
cd docker
|
|
||||||
./generate_docker_compose
|
|
||||||
|
|
||||||
- name: Check for changes
|
|
||||||
if: steps.changed-files.outputs.any_changed == 'true'
|
|
||||||
run: git diff --exit-code
|
|
||||||
|
|
||||||
superlinter:
|
superlinter:
|
||||||
name: SuperLinter
|
name: SuperLinter
|
||||||
@@ -132,9 +89,6 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- name: Check changed files
|
- name: Check changed files
|
||||||
id: changed-files
|
id: changed-files
|
||||||
|
|||||||
9
.github/workflows/tool-test-sdks.yaml
vendored
9
.github/workflows/tool-test-sdks.yaml
vendored
@@ -26,19 +26,16 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- name: Use Node.js ${{ matrix.node-version }}
|
- name: Use Node.js ${{ matrix.node-version }}
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: ${{ matrix.node-version }}
|
node-version: ${{ matrix.node-version }}
|
||||||
cache: ''
|
cache: ''
|
||||||
cache-dependency-path: 'pnpm-lock.yaml'
|
cache-dependency-path: 'yarn.lock'
|
||||||
|
|
||||||
- name: Install Dependencies
|
- name: Install Dependencies
|
||||||
run: pnpm install --frozen-lockfile
|
run: yarn install
|
||||||
|
|
||||||
- name: Test
|
- name: Test
|
||||||
run: pnpm test
|
run: yarn test
|
||||||
|
|||||||
@@ -16,7 +16,6 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 2 # last 2 commits
|
fetch-depth: 2 # last 2 commits
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- name: Check for file changes in i18n/en-US
|
- name: Check for file changes in i18n/en-US
|
||||||
id: check_files
|
id: check_files
|
||||||
@@ -39,11 +38,11 @@ jobs:
|
|||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
if: env.FILES_CHANGED == 'true'
|
if: env.FILES_CHANGED == 'true'
|
||||||
run: pnpm install --frozen-lockfile
|
run: yarn install --frozen-lockfile
|
||||||
|
|
||||||
- name: Run npm script
|
- name: Run npm script
|
||||||
if: env.FILES_CHANGED == 'true'
|
if: env.FILES_CHANGED == 'true'
|
||||||
run: pnpm run auto-gen-i18n
|
run: npm run auto-gen-i18n
|
||||||
|
|
||||||
- name: Create Pull Request
|
- name: Create Pull Request
|
||||||
if: env.FILES_CHANGED == 'true'
|
if: env.FILES_CHANGED == 'true'
|
||||||
|
|||||||
16
.github/workflows/vdb-tests.yml
vendored
16
.github/workflows/vdb-tests.yml
vendored
@@ -28,9 +28,6 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- name: Setup Poetry and Python ${{ matrix.python-version }}
|
- name: Setup Poetry and Python ${{ matrix.python-version }}
|
||||||
uses: ./.github/actions/setup-poetry
|
uses: ./.github/actions/setup-poetry
|
||||||
@@ -54,14 +51,6 @@ jobs:
|
|||||||
- name: Expose Service Ports
|
- name: Expose Service Ports
|
||||||
run: sh .github/workflows/expose_service_ports.sh
|
run: sh .github/workflows/expose_service_ports.sh
|
||||||
|
|
||||||
- name: Set up Vector Store (TiDB)
|
|
||||||
uses: hoverkraft-tech/compose-action@v2.0.2
|
|
||||||
with:
|
|
||||||
compose-file: docker/tidb/docker-compose.yaml
|
|
||||||
services: |
|
|
||||||
tidb
|
|
||||||
tiflash
|
|
||||||
|
|
||||||
- name: Set up Vector Stores (Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma, MyScale, ElasticSearch, Couchbase)
|
- name: Set up Vector Stores (Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma, MyScale, ElasticSearch, Couchbase)
|
||||||
uses: hoverkraft-tech/compose-action@v2.0.2
|
uses: hoverkraft-tech/compose-action@v2.0.2
|
||||||
with:
|
with:
|
||||||
@@ -79,8 +68,5 @@ jobs:
|
|||||||
chroma
|
chroma
|
||||||
elasticsearch
|
elasticsearch
|
||||||
|
|
||||||
- name: Check TiDB Ready
|
|
||||||
run: poetry run -P api python api/tests/integration_tests/vdb/tidb_vector/check_tiflash_ready.py
|
|
||||||
|
|
||||||
- name: Test Vector Stores
|
- name: Test Vector Stores
|
||||||
run: poetry run -P api bash dev/pytest/pytest_vdb.sh
|
run: poetry run -C api bash dev/pytest/pytest_vdb.sh
|
||||||
|
|||||||
35
.github/workflows/web-tests.yml
vendored
35
.github/workflows/web-tests.yml
vendored
@@ -22,34 +22,25 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- name: Check changed files
|
- name: Check changed files
|
||||||
id: changed-files
|
id: changed-files
|
||||||
uses: tj-actions/changed-files@v45
|
uses: tj-actions/changed-files@v45
|
||||||
with:
|
with:
|
||||||
files: web/**
|
files: web/**
|
||||||
# to run pnpm, should install package canvas, but it always install failed on amd64 under ubuntu-latest
|
|
||||||
# - name: Install pnpm
|
|
||||||
# uses: pnpm/action-setup@v4
|
|
||||||
# with:
|
|
||||||
# version: 10
|
|
||||||
# run_install: false
|
|
||||||
|
|
||||||
# - name: Setup Node.js
|
- name: Setup Node.js
|
||||||
# uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
# if: steps.changed-files.outputs.any_changed == 'true'
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
# with:
|
with:
|
||||||
# node-version: 20
|
node-version: 20
|
||||||
# cache: pnpm
|
cache: yarn
|
||||||
# cache-dependency-path: ./web/package.json
|
cache-dependency-path: ./web/package.json
|
||||||
|
|
||||||
# - name: Install dependencies
|
- name: Install dependencies
|
||||||
# if: steps.changed-files.outputs.any_changed == 'true'
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
# run: pnpm install --frozen-lockfile
|
run: yarn install --frozen-lockfile
|
||||||
|
|
||||||
# - name: Run tests
|
- name: Run tests
|
||||||
# if: steps.changed-files.outputs.any_changed == 'true'
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
# run: pnpm test
|
run: yarn test
|
||||||
|
|||||||
8
.gitignore
vendored
8
.gitignore
vendored
@@ -163,7 +163,6 @@ docker/volumes/db/data/*
|
|||||||
docker/volumes/redis/data/*
|
docker/volumes/redis/data/*
|
||||||
docker/volumes/weaviate/*
|
docker/volumes/weaviate/*
|
||||||
docker/volumes/qdrant/*
|
docker/volumes/qdrant/*
|
||||||
docker/tidb/volumes/*
|
|
||||||
docker/volumes/etcd/*
|
docker/volumes/etcd/*
|
||||||
docker/volumes/minio/*
|
docker/volumes/minio/*
|
||||||
docker/volumes/milvus/*
|
docker/volumes/milvus/*
|
||||||
@@ -176,7 +175,6 @@ docker/volumes/pgvector/data/*
|
|||||||
docker/volumes/pgvecto_rs/data/*
|
docker/volumes/pgvecto_rs/data/*
|
||||||
docker/volumes/couchbase/*
|
docker/volumes/couchbase/*
|
||||||
docker/volumes/oceanbase/*
|
docker/volumes/oceanbase/*
|
||||||
docker/volumes/plugin_daemon/*
|
|
||||||
!docker/volumes/oceanbase/init.d
|
!docker/volumes/oceanbase/init.d
|
||||||
|
|
||||||
docker/nginx/conf.d/default.conf
|
docker/nginx/conf.d/default.conf
|
||||||
@@ -195,9 +193,3 @@ api/.vscode
|
|||||||
|
|
||||||
.idea/
|
.idea/
|
||||||
.vscode
|
.vscode
|
||||||
|
|
||||||
# pnpm
|
|
||||||
/.pnpm-store
|
|
||||||
|
|
||||||
# plugin migrate
|
|
||||||
plugins.jsonl
|
|
||||||
|
|||||||
@@ -73,7 +73,7 @@ Dify requires the following dependencies to build, make sure they're installed o
|
|||||||
* [Docker](https://www.docker.com/)
|
* [Docker](https://www.docker.com/)
|
||||||
* [Docker Compose](https://docs.docker.com/compose/install/)
|
* [Docker Compose](https://docs.docker.com/compose/install/)
|
||||||
* [Node.js v18.x (LTS)](http://nodejs.org)
|
* [Node.js v18.x (LTS)](http://nodejs.org)
|
||||||
* [pnpm](https://pnpm.io/)
|
* [npm](https://www.npmjs.com/) version 8.x.x or [Yarn](https://yarnpkg.com/)
|
||||||
* [Python](https://www.python.org/) version 3.11.x or 3.12.x
|
* [Python](https://www.python.org/) version 3.11.x or 3.12.x
|
||||||
|
|
||||||
### 4. Installations
|
### 4. Installations
|
||||||
|
|||||||
@@ -70,7 +70,7 @@ Dify 依赖以下工具和库:
|
|||||||
- [Docker](https://www.docker.com/)
|
- [Docker](https://www.docker.com/)
|
||||||
- [Docker Compose](https://docs.docker.com/compose/install/)
|
- [Docker Compose](https://docs.docker.com/compose/install/)
|
||||||
- [Node.js v18.x (LTS)](http://nodejs.org)
|
- [Node.js v18.x (LTS)](http://nodejs.org)
|
||||||
- [pnpm](https://pnpm.io/)
|
- [npm](https://www.npmjs.com/) version 8.x.x or [Yarn](https://yarnpkg.com/)
|
||||||
- [Python](https://www.python.org/) version 3.11.x or 3.12.x
|
- [Python](https://www.python.org/) version 3.11.x or 3.12.x
|
||||||
|
|
||||||
### 4. 安装
|
### 4. 安装
|
||||||
|
|||||||
@@ -73,7 +73,7 @@ Dify を構築するには次の依存関係が必要です。それらがシス
|
|||||||
- [Docker](https://www.docker.com/)
|
- [Docker](https://www.docker.com/)
|
||||||
- [Docker Compose](https://docs.docker.com/compose/install/)
|
- [Docker Compose](https://docs.docker.com/compose/install/)
|
||||||
- [Node.js v18.x (LTS)](http://nodejs.org)
|
- [Node.js v18.x (LTS)](http://nodejs.org)
|
||||||
- [pnpm](https://pnpm.io/)
|
- [npm](https://www.npmjs.com/) version 8.x.x or [Yarn](https://yarnpkg.com/)
|
||||||
- [Python](https://www.python.org/) version 3.11.x or 3.12.x
|
- [Python](https://www.python.org/) version 3.11.x or 3.12.x
|
||||||
|
|
||||||
### 4. インストール
|
### 4. インストール
|
||||||
|
|||||||
@@ -72,7 +72,7 @@ Dify yêu cầu các phụ thuộc sau để build, hãy đảm bảo chúng đ
|
|||||||
- [Docker](https://www.docker.com/)
|
- [Docker](https://www.docker.com/)
|
||||||
- [Docker Compose](https://docs.docker.com/compose/install/)
|
- [Docker Compose](https://docs.docker.com/compose/install/)
|
||||||
- [Node.js v18.x (LTS)](http://nodejs.org)
|
- [Node.js v18.x (LTS)](http://nodejs.org)
|
||||||
- [pnpm](https://pnpm.io/)
|
- [npm](https://www.npmjs.com/) phiên bản 8.x.x hoặc [Yarn](https://yarnpkg.com/)
|
||||||
- [Python](https://www.python.org/) phiên bản 3.11.x hoặc 3.12.x
|
- [Python](https://www.python.org/) phiên bản 3.11.x hoặc 3.12.x
|
||||||
|
|
||||||
### 4. Cài đặt
|
### 4. Cài đặt
|
||||||
|
|||||||
23
LICENSE
23
LICENSE
@@ -1,12 +1,12 @@
|
|||||||
# Open Source License
|
# Open Source License
|
||||||
|
|
||||||
Dify is licensed under a modified version of the Apache License 2.0, with the following additional conditions:
|
Dify is licensed under the Apache License 2.0, with the following additional conditions:
|
||||||
|
|
||||||
1. Dify may be utilized commercially, including as a backend service for other applications or as an application development platform for enterprises. Should the conditions below be met, a commercial license must be obtained from the producer:
|
1. Dify may be utilized commercially, including as a backend service for other applications or as an application development platform for enterprises. Should the conditions below be met, a commercial license must be obtained from the producer:
|
||||||
|
|
||||||
a. Multi-tenant service: Unless explicitly authorized by Dify in writing, you may not use the Dify source code to operate a multi-tenant environment.
|
a. Multi-tenant service: Unless explicitly authorized by Dify in writing, you may not use the Dify source code to operate a multi-tenant environment.
|
||||||
- Tenant Definition: Within the context of Dify, one tenant corresponds to one workspace. The workspace provides a separated area for each tenant's data and configurations.
|
- Tenant Definition: Within the context of Dify, one tenant corresponds to one workspace. The workspace provides a separated area for each tenant's data and configurations.
|
||||||
|
|
||||||
b. LOGO and copyright information: In the process of using Dify's frontend, you may not remove or modify the LOGO or copyright information in the Dify console or applications. This restriction is inapplicable to uses of Dify that do not involve its frontend.
|
b. LOGO and copyright information: In the process of using Dify's frontend, you may not remove or modify the LOGO or copyright information in the Dify console or applications. This restriction is inapplicable to uses of Dify that do not involve its frontend.
|
||||||
- Frontend Definition: For the purposes of this license, the "frontend" of Dify includes all components located in the `web/` directory when running Dify from the raw source code, or the "web" image when running Dify with Docker.
|
- Frontend Definition: For the purposes of this license, the "frontend" of Dify includes all components located in the `web/` directory when running Dify from the raw source code, or the "web" image when running Dify with Docker.
|
||||||
|
|
||||||
@@ -21,4 +21,19 @@ Apart from the specific conditions mentioned above, all other rights and restric
|
|||||||
|
|
||||||
The interactive design of this product is protected by appearance patent.
|
The interactive design of this product is protected by appearance patent.
|
||||||
|
|
||||||
© 2025 LangGenius, Inc.
|
© 2024 LangGenius, Inc.
|
||||||
|
|
||||||
|
|
||||||
|
----------
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
|||||||
69
README.md
69
README.md
@@ -25,9 +25,6 @@
|
|||||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||||
alt="follow on X(Twitter)"></a>
|
alt="follow on X(Twitter)"></a>
|
||||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
|
||||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
|
||||||
alt="follow on LinkedIn"></a>
|
|
||||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||||
@@ -108,72 +105,6 @@ Please refer to our [FAQ](https://docs.dify.ai/getting-started/install-self-host
|
|||||||
**7. Backend-as-a-Service**:
|
**7. Backend-as-a-Service**:
|
||||||
All of Dify's offerings come with corresponding APIs, so you could effortlessly integrate Dify into your own business logic.
|
All of Dify's offerings come with corresponding APIs, so you could effortlessly integrate Dify into your own business logic.
|
||||||
|
|
||||||
## Feature Comparison
|
|
||||||
<table style="width: 100%;">
|
|
||||||
<tr>
|
|
||||||
<th align="center">Feature</th>
|
|
||||||
<th align="center">Dify.AI</th>
|
|
||||||
<th align="center">LangChain</th>
|
|
||||||
<th align="center">Flowise</th>
|
|
||||||
<th align="center">OpenAI Assistants API</th>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">Programming Approach</td>
|
|
||||||
<td align="center">API + App-oriented</td>
|
|
||||||
<td align="center">Python Code</td>
|
|
||||||
<td align="center">App-oriented</td>
|
|
||||||
<td align="center">API-oriented</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">Supported LLMs</td>
|
|
||||||
<td align="center">Rich Variety</td>
|
|
||||||
<td align="center">Rich Variety</td>
|
|
||||||
<td align="center">Rich Variety</td>
|
|
||||||
<td align="center">OpenAI-only</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">RAG Engine</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">Agent</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">Workflow</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">Observability</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">Enterprise Feature (SSO/Access control)</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">Local Deployment</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
## Using Dify
|
## Using Dify
|
||||||
|
|
||||||
|
|||||||
@@ -21,9 +21,6 @@
|
|||||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||||
alt="follow on X(Twitter)"></a>
|
alt="follow on X(Twitter)"></a>
|
||||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
|
||||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
|
||||||
alt="follow on LinkedIn"></a>
|
|
||||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||||
|
|||||||
@@ -21,9 +21,6 @@
|
|||||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||||
alt="follow on X(Twitter)"></a>
|
alt="follow on X(Twitter)"></a>
|
||||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
|
||||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
|
||||||
alt="follow on LinkedIn"></a>
|
|
||||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||||
|
|||||||
@@ -21,9 +21,6 @@
|
|||||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||||
alt="seguir en X(Twitter)"></a>
|
alt="seguir en X(Twitter)"></a>
|
||||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
|
||||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
|
||||||
alt="seguir en LinkedIn"></a>
|
|
||||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||||
<img alt="Descargas de Docker" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
<img alt="Descargas de Docker" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||||
|
|||||||
19
README_FR.md
19
README_FR.md
@@ -21,9 +21,6 @@
|
|||||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||||
alt="suivre sur X(Twitter)"></a>
|
alt="suivre sur X(Twitter)"></a>
|
||||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
|
||||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
|
||||||
alt="suivre sur LinkedIn"></a>
|
|
||||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||||
<img alt="Tirages Docker" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
<img alt="Tirages Docker" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||||
@@ -55,7 +52,7 @@
|
|||||||
Dify est une plateforme de développement d'applications LLM open source. Son interface intuitive combine un flux de travail d'IA, un pipeline RAG, des capacités d'agent, une gestion de modèles, des fonctionnalités d'observabilité, et plus encore, vous permettant de passer rapidement du prototype à la production. Voici une liste des fonctionnalités principales:
|
Dify est une plateforme de développement d'applications LLM open source. Son interface intuitive combine un flux de travail d'IA, un pipeline RAG, des capacités d'agent, une gestion de modèles, des fonctionnalités d'observabilité, et plus encore, vous permettant de passer rapidement du prototype à la production. Voici une liste des fonctionnalités principales:
|
||||||
</br> </br>
|
</br> </br>
|
||||||
|
|
||||||
**1. Flux de travail** :
|
**1. Flux de travail**:
|
||||||
Construisez et testez des flux de travail d'IA puissants sur un canevas visuel, en utilisant toutes les fonctionnalités suivantes et plus encore.
|
Construisez et testez des flux de travail d'IA puissants sur un canevas visuel, en utilisant toutes les fonctionnalités suivantes et plus encore.
|
||||||
|
|
||||||
|
|
||||||
@@ -63,25 +60,27 @@ Dify est une plateforme de développement d'applications LLM open source. Son in
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
**2. Prise en charge complète des modèles** :
|
**2. Prise en charge complète des modèles**:
|
||||||
Intégration transparente avec des centaines de LLM propriétaires / open source provenant de dizaines de fournisseurs d'inférence et de solutions auto-hébergées, couvrant GPT, Mistral, Llama3, et tous les modèles compatibles avec l'API OpenAI. Une liste complète des fournisseurs de modèles pris en charge se trouve [ici](https://docs.dify.ai/getting-started/readme/model-providers).
|
Intégration transparente avec des centaines de LLM propriétaires / open source provenant de dizaines de fournisseurs d'inférence et de solutions auto-hébergées, couvrant GPT, Mistral, Llama3, et tous les modèles compatibles avec l'API OpenAI. Une liste complète des fournisseurs de modèles pris en charge se trouve [ici](https://docs.dify.ai/getting-started/readme/model-providers).
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
|
||||||
**3. IDE de prompt** :
|
**3. IDE de prompt**:
|
||||||
Interface intuitive pour créer des prompts, comparer les performances des modèles et ajouter des fonctionnalités supplémentaires telles que la synthèse vocale à une application basée sur des chats.
|
Interface intuitive pour créer des prompts, comparer les performances des modèles et ajouter des fonctionnalités supplémentaires telles que la synthèse vocale à une application basée sur des chats.
|
||||||
|
|
||||||
**4. Pipeline RAG** :
|
**4. Pipeline RAG**:
|
||||||
Des capacités RAG étendues qui couvrent tout, de l'ingestion de documents à la récupération, avec un support prêt à l'emploi pour l'extraction de texte à partir de PDF, PPT et autres formats de document courants.
|
Des capacités RAG étendues qui couvrent tout, de l'ingestion de documents à la récupération, avec un support prêt à l'emploi pour l'extraction de texte à partir de PDF, PPT et autres formats de document courants.
|
||||||
|
|
||||||
**5. Capacités d'agent** :
|
**5. Capac
|
||||||
|
|
||||||
|
ités d'agent**:
|
||||||
Vous pouvez définir des agents basés sur l'appel de fonction LLM ou ReAct, et ajouter des outils pré-construits ou personnalisés pour l'agent. Dify fournit plus de 50 outils intégrés pour les agents d'IA, tels que la recherche Google, DALL·E, Stable Diffusion et WolframAlpha.
|
Vous pouvez définir des agents basés sur l'appel de fonction LLM ou ReAct, et ajouter des outils pré-construits ou personnalisés pour l'agent. Dify fournit plus de 50 outils intégrés pour les agents d'IA, tels que la recherche Google, DALL·E, Stable Diffusion et WolframAlpha.
|
||||||
|
|
||||||
**6. LLMOps** :
|
**6. LLMOps**:
|
||||||
Surveillez et analysez les journaux d'application et les performances au fil du temps. Vous pouvez continuellement améliorer les prompts, les ensembles de données et les modèles en fonction des données de production et des annotations.
|
Surveillez et analysez les journaux d'application et les performances au fil du temps. Vous pouvez continuellement améliorer les prompts, les ensembles de données et les modèles en fonction des données de production et des annotations.
|
||||||
|
|
||||||
**7. Backend-as-a-Service** :
|
**7. Backend-as-a-Service**:
|
||||||
Toutes les offres de Dify sont accompagnées d'API correspondantes, vous permettant d'intégrer facilement Dify dans votre propre logique métier.
|
Toutes les offres de Dify sont accompagnées d'API correspondantes, vous permettant d'intégrer facilement Dify dans votre propre logique métier.
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -21,9 +21,6 @@
|
|||||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||||
alt="X(Twitter)でフォロー"></a>
|
alt="X(Twitter)でフォロー"></a>
|
||||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
|
||||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
|
||||||
alt="LinkedInでフォロー"></a>
|
|
||||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||||
@@ -164,7 +161,7 @@ DifyはオープンソースのLLMアプリケーション開発プラットフ
|
|||||||
|
|
||||||
- **企業/組織向けのDify</br>**
|
- **企業/組織向けのDify</br>**
|
||||||
企業中心の機能を提供しています。[メールを送信](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)して企業のニーズについて相談してください。 </br>
|
企業中心の機能を提供しています。[メールを送信](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)して企業のニーズについて相談してください。 </br>
|
||||||
> AWSを使用しているスタートアップ企業や中小企業の場合は、[AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t23mebxzwjhu6)のDify Premiumをチェックして、ワンクリックで自分のAWS VPCにデプロイできます。さらに、手頃な価格のAMIオファリングとして、ロゴやブランディングをカスタマイズしてアプリケーションを作成するオプションがあります。
|
> AWSを使用しているスタートアップ企業や中小企業の場合は、[AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6)のDify Premiumをチェックして、ワンクリックで自分のAWS VPCにデプロイできます。さらに、手頃な価格のAMIオファリングどして、ロゴやブランディングをカスタマイズしてアプリケーションを作成するオプションがあります。
|
||||||
|
|
||||||
|
|
||||||
## 最新の情報を入手
|
## 最新の情報を入手
|
||||||
|
|||||||
@@ -21,9 +21,6 @@
|
|||||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||||
alt="follow on X(Twitter)"></a>
|
alt="follow on X(Twitter)"></a>
|
||||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
|
||||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
|
||||||
alt="follow on LinkedIn"></a>
|
|
||||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||||
@@ -87,7 +84,9 @@ Dify is an open-source LLM app development platform. Its intuitive interface com
|
|||||||
|
|
||||||
## Feature Comparison
|
## Feature Comparison
|
||||||
<table style="width: 100%;">
|
<table style="width: 100%;">
|
||||||
<tr>
|
<tr
|
||||||
|
|
||||||
|
>
|
||||||
<th align="center">Feature</th>
|
<th align="center">Feature</th>
|
||||||
<th align="center">Dify.AI</th>
|
<th align="center">Dify.AI</th>
|
||||||
<th align="center">LangChain</th>
|
<th align="center">LangChain</th>
|
||||||
|
|||||||
@@ -21,9 +21,6 @@
|
|||||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||||
alt="follow on X(Twitter)"></a>
|
alt="follow on X(Twitter)"></a>
|
||||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
|
||||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
|
||||||
alt="follow on LinkedIn"></a>
|
|
||||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||||
|
|||||||
@@ -25,9 +25,6 @@
|
|||||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||||
alt="follow on X(Twitter)"></a>
|
alt="follow on X(Twitter)"></a>
|
||||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
|
||||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
|
||||||
alt="follow on LinkedIn"></a>
|
|
||||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||||
|
|||||||
72
README_SI.md
72
README_SI.md
@@ -22,9 +22,6 @@
|
|||||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||||
alt="follow on X(Twitter)"></a>
|
alt="follow on X(Twitter)"></a>
|
||||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
|
||||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
|
||||||
alt="follow on LinkedIn"></a>
|
|
||||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||||
@@ -106,73 +103,6 @@ Prosimo, glejte naša pogosta vprašanja [FAQ](https://docs.dify.ai/getting-star
|
|||||||
**7. Backend-as-a-Service**:
|
**7. Backend-as-a-Service**:
|
||||||
AVse ponudbe Difyja so opremljene z ustreznimi API-ji, tako da lahko Dify brez težav integrirate v svojo poslovno logiko.
|
AVse ponudbe Difyja so opremljene z ustreznimi API-ji, tako da lahko Dify brez težav integrirate v svojo poslovno logiko.
|
||||||
|
|
||||||
## Primerjava Funkcij
|
|
||||||
|
|
||||||
<table style="width: 100%;">
|
|
||||||
<tr>
|
|
||||||
<th align="center">Funkcija</th>
|
|
||||||
<th align="center">Dify.AI</th>
|
|
||||||
<th align="center">LangChain</th>
|
|
||||||
<th align="center">Flowise</th>
|
|
||||||
<th align="center">OpenAI Assistants API</th>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">Programski pristop</td>
|
|
||||||
<td align="center">API + usmerjeno v aplikacije</td>
|
|
||||||
<td align="center">Python koda</td>
|
|
||||||
<td align="center">Usmerjeno v aplikacije</td>
|
|
||||||
<td align="center">Usmerjeno v API</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">Podprti LLM-ji</td>
|
|
||||||
<td align="center">Bogata izbira</td>
|
|
||||||
<td align="center">Bogata izbira</td>
|
|
||||||
<td align="center">Bogata izbira</td>
|
|
||||||
<td align="center">Samo OpenAI</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">RAG pogon</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">Agent</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">Potek dela</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">Spremljanje</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">Funkcija za podjetja (SSO/nadzor dostopa)</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">Lokalna namestitev</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
## Uporaba Dify
|
## Uporaba Dify
|
||||||
|
|
||||||
@@ -254,4 +184,4 @@ Zaradi zaščite vaše zasebnosti se izogibajte objavljanju varnostnih vprašanj
|
|||||||
|
|
||||||
## Licenca
|
## Licenca
|
||||||
|
|
||||||
To skladišče je na voljo pod [odprtokodno licenco Dify](LICENSE) , ki je v bistvu Apache 2.0 z nekaj dodatnimi omejitvami.
|
To skladišče je na voljo pod [odprtokodno licenco Dify](LICENSE) , ki je v bistvu Apache 2.0 z nekaj dodatnimi omejitvami.
|
||||||
@@ -21,9 +21,6 @@
|
|||||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||||
alt="X(Twitter)'da takip et"></a>
|
alt="X(Twitter)'da takip et"></a>
|
||||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
|
||||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
|
||||||
alt="LinkedIn'da takip et"></a>
|
|
||||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||||
<img alt="Docker Çekmeleri" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
<img alt="Docker Çekmeleri" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||||
@@ -65,6 +62,8 @@ Görsel bir arayüz üzerinde güçlü AI iş akışları oluşturun ve test edi
|
|||||||

|

|
||||||
|
|
||||||
|
|
||||||
|
Özür dilerim, haklısınız. Daha anlamlı ve akıcı bir çeviri yapmaya çalışayım. İşte güncellenmiş çeviri:
|
||||||
|
|
||||||
**3. Prompt IDE**:
|
**3. Prompt IDE**:
|
||||||
Komut istemlerini oluşturmak, model performansını karşılaştırmak ve sohbet tabanlı uygulamalara metin-konuşma gibi ek özellikler eklemek için kullanıcı dostu bir arayüz.
|
Komut istemlerini oluşturmak, model performansını karşılaştırmak ve sohbet tabanlı uygulamalara metin-konuşma gibi ek özellikler eklemek için kullanıcı dostu bir arayüz.
|
||||||
|
|
||||||
@@ -151,6 +150,8 @@ Görsel bir arayüz üzerinde güçlü AI iş akışları oluşturun ve test edi
|
|||||||
## Dify'ı Kullanma
|
## Dify'ı Kullanma
|
||||||
|
|
||||||
- **Cloud </br>**
|
- **Cloud </br>**
|
||||||
|
İşte verdiğiniz metnin Türkçe çevirisi, kod bloğu içinde:
|
||||||
|
-
|
||||||
Herkesin sıfır kurulumla denemesi için bir [Dify Cloud](https://dify.ai) hizmeti sunuyoruz. Bu hizmet, kendi kendine dağıtılan versiyonun tüm yeteneklerini sağlar ve sandbox planında 200 ücretsiz GPT-4 çağrısı içerir.
|
Herkesin sıfır kurulumla denemesi için bir [Dify Cloud](https://dify.ai) hizmeti sunuyoruz. Bu hizmet, kendi kendine dağıtılan versiyonun tüm yeteneklerini sağlar ve sandbox planında 200 ücretsiz GPT-4 çağrısı içerir.
|
||||||
|
|
||||||
- **Dify Topluluk Sürümünü Kendi Sunucunuzda Barındırma</br>**
|
- **Dify Topluluk Sürümünü Kendi Sunucunuzda Barındırma</br>**
|
||||||
@@ -176,6 +177,8 @@ GitHub'da Dify'a yıldız verin ve yeni sürümlerden anında haberdar olun.
|
|||||||
>- RAM >= 4GB
|
>- RAM >= 4GB
|
||||||
|
|
||||||
</br>
|
</br>
|
||||||
|
İşte verdiğiniz metnin Türkçe çevirisi, kod bloğu içinde:
|
||||||
|
|
||||||
Dify sunucusunu başlatmanın en kolay yolu, [docker-compose.yml](docker/docker-compose.yaml) dosyamızı çalıştırmaktır. Kurulum komutunu çalıştırmadan önce, makinenizde [Docker](https://docs.docker.com/get-docker/) ve [Docker Compose](https://docs.docker.com/compose/install/)'un kurulu olduğundan emin olun:
|
Dify sunucusunu başlatmanın en kolay yolu, [docker-compose.yml](docker/docker-compose.yaml) dosyamızı çalıştırmaktır. Kurulum komutunu çalıştırmadan önce, makinenizde [Docker](https://docs.docker.com/get-docker/) ve [Docker Compose](https://docs.docker.com/compose/install/)'un kurulu olduğundan emin olun:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
|||||||
@@ -21,9 +21,6 @@
|
|||||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||||
alt="theo dõi trên X(Twitter)"></a>
|
alt="theo dõi trên X(Twitter)"></a>
|
||||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
|
||||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
|
||||||
alt="theo dõi trên LinkedIn"></a>
|
|
||||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||||
|
|||||||
@@ -1,10 +1,7 @@
|
|||||||
.env
|
.env
|
||||||
*.env.*
|
*.env.*
|
||||||
|
|
||||||
storage/generate_files/*
|
|
||||||
storage/privkeys/*
|
storage/privkeys/*
|
||||||
storage/tools/*
|
|
||||||
storage/upload_files/*
|
|
||||||
|
|
||||||
# Logs
|
# Logs
|
||||||
logs
|
logs
|
||||||
@@ -12,8 +9,6 @@ logs
|
|||||||
|
|
||||||
# jetbrains
|
# jetbrains
|
||||||
.idea
|
.idea
|
||||||
.mypy_cache
|
|
||||||
.ruff_cache
|
|
||||||
|
|
||||||
# venv
|
# venv
|
||||||
.venv
|
.venv
|
||||||
@@ -23,9 +23,6 @@ FILES_ACCESS_TIMEOUT=300
|
|||||||
# Access token expiration time in minutes
|
# Access token expiration time in minutes
|
||||||
ACCESS_TOKEN_EXPIRE_MINUTES=60
|
ACCESS_TOKEN_EXPIRE_MINUTES=60
|
||||||
|
|
||||||
# Refresh token expiration time in days
|
|
||||||
REFRESH_TOKEN_EXPIRE_DAYS=30
|
|
||||||
|
|
||||||
# celery configuration
|
# celery configuration
|
||||||
CELERY_BROKER_URL=redis://:difyai123456@localhost:6379/1
|
CELERY_BROKER_URL=redis://:difyai123456@localhost:6379/1
|
||||||
|
|
||||||
@@ -59,27 +56,20 @@ DB_DATABASE=dify
|
|||||||
|
|
||||||
# Storage configuration
|
# Storage configuration
|
||||||
# use for store upload files, private keys...
|
# use for store upload files, private keys...
|
||||||
# storage type: opendal, s3, aliyun-oss, azure-blob, baidu-obs, google-storage, huawei-obs, oci-storage, tencent-cos, volcengine-tos, supabase
|
# storage type: local, s3, aliyun-oss, azure-blob, baidu-obs, google-storage, huawei-obs, oci-storage, tencent-cos, volcengine-tos, supabase
|
||||||
STORAGE_TYPE=opendal
|
STORAGE_TYPE=local
|
||||||
|
STORAGE_LOCAL_PATH=storage
|
||||||
# Apache OpenDAL storage configuration, refer to https://github.com/apache/opendal
|
|
||||||
OPENDAL_SCHEME=fs
|
|
||||||
OPENDAL_FS_ROOT=storage
|
|
||||||
|
|
||||||
# S3 Storage configuration
|
|
||||||
S3_USE_AWS_MANAGED_IAM=false
|
S3_USE_AWS_MANAGED_IAM=false
|
||||||
S3_ENDPOINT=https://your-bucket-name.storage.s3.cloudflare.com
|
S3_ENDPOINT=https://your-bucket-name.storage.s3.clooudflare.com
|
||||||
S3_BUCKET_NAME=your-bucket-name
|
S3_BUCKET_NAME=your-bucket-name
|
||||||
S3_ACCESS_KEY=your-access-key
|
S3_ACCESS_KEY=your-access-key
|
||||||
S3_SECRET_KEY=your-secret-key
|
S3_SECRET_KEY=your-secret-key
|
||||||
S3_REGION=your-region
|
S3_REGION=your-region
|
||||||
|
|
||||||
# Azure Blob Storage configuration
|
# Azure Blob Storage configuration
|
||||||
AZURE_BLOB_ACCOUNT_NAME=your-account-name
|
AZURE_BLOB_ACCOUNT_NAME=your-account-name
|
||||||
AZURE_BLOB_ACCOUNT_KEY=your-account-key
|
AZURE_BLOB_ACCOUNT_KEY=your-account-key
|
||||||
AZURE_BLOB_CONTAINER_NAME=your-container-name
|
AZURE_BLOB_CONTAINER_NAME=yout-container-name
|
||||||
AZURE_BLOB_ACCOUNT_URL=https://<your_account_name>.blob.core.windows.net
|
AZURE_BLOB_ACCOUNT_URL=https://<your_account_name>.blob.core.windows.net
|
||||||
|
|
||||||
# Aliyun oss Storage configuration
|
# Aliyun oss Storage configuration
|
||||||
ALIYUN_OSS_BUCKET_NAME=your-bucket-name
|
ALIYUN_OSS_BUCKET_NAME=your-bucket-name
|
||||||
ALIYUN_OSS_ACCESS_KEY=your-access-key
|
ALIYUN_OSS_ACCESS_KEY=your-access-key
|
||||||
@@ -89,9 +79,8 @@ ALIYUN_OSS_AUTH_VERSION=v1
|
|||||||
ALIYUN_OSS_REGION=your-region
|
ALIYUN_OSS_REGION=your-region
|
||||||
# Don't start with '/'. OSS doesn't support leading slash in object names.
|
# Don't start with '/'. OSS doesn't support leading slash in object names.
|
||||||
ALIYUN_OSS_PATH=your-path
|
ALIYUN_OSS_PATH=your-path
|
||||||
|
|
||||||
# Google Storage configuration
|
# Google Storage configuration
|
||||||
GOOGLE_STORAGE_BUCKET_NAME=your-bucket-name
|
GOOGLE_STORAGE_BUCKET_NAME=yout-bucket-name
|
||||||
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64=your-google-service-account-json-base64-string
|
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64=your-google-service-account-json-base64-string
|
||||||
|
|
||||||
# Tencent COS Storage configuration
|
# Tencent COS Storage configuration
|
||||||
@@ -136,8 +125,8 @@ SUPABASE_URL=your-server-url
|
|||||||
WEB_API_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
|
WEB_API_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
|
||||||
CONSOLE_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
|
CONSOLE_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
|
||||||
|
|
||||||
# Vector database configuration
|
|
||||||
# support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector, couchbase, vikingdb, upstash, lindorm, oceanbase
|
# Vector database configuration, support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector, couchbase, vikingdb, upstash, lindorm, oceanbase
|
||||||
VECTOR_STORE=weaviate
|
VECTOR_STORE=weaviate
|
||||||
|
|
||||||
# Weaviate configuration
|
# Weaviate configuration
|
||||||
@@ -288,7 +277,6 @@ VIKINGDB_SOCKET_TIMEOUT=30
|
|||||||
LINDORM_URL=http://ld-*******************-proxy-search-pub.lindorm.aliyuncs.com:30070
|
LINDORM_URL=http://ld-*******************-proxy-search-pub.lindorm.aliyuncs.com:30070
|
||||||
LINDORM_USERNAME=admin
|
LINDORM_USERNAME=admin
|
||||||
LINDORM_PASSWORD=admin
|
LINDORM_PASSWORD=admin
|
||||||
USING_UGC_INDEX=False
|
|
||||||
|
|
||||||
# OceanBase Vector configuration
|
# OceanBase Vector configuration
|
||||||
OCEANBASE_VECTOR_HOST=127.0.0.1
|
OCEANBASE_VECTOR_HOST=127.0.0.1
|
||||||
@@ -307,7 +295,8 @@ UPLOAD_VIDEO_FILE_SIZE_LIMIT=100
|
|||||||
UPLOAD_AUDIO_FILE_SIZE_LIMIT=50
|
UPLOAD_AUDIO_FILE_SIZE_LIMIT=50
|
||||||
|
|
||||||
# Model configuration
|
# Model configuration
|
||||||
MULTIMODAL_SEND_FORMAT=base64
|
MULTIMODAL_SEND_IMAGE_FORMAT=base64
|
||||||
|
MULTIMODAL_SEND_VIDEO_FORMAT=base64
|
||||||
PROMPT_GENERATION_MAX_TOKENS=512
|
PROMPT_GENERATION_MAX_TOKENS=512
|
||||||
CODE_GENERATION_MAX_TOKENS=1024
|
CODE_GENERATION_MAX_TOKENS=1024
|
||||||
|
|
||||||
@@ -392,8 +381,6 @@ LOG_FILE_BACKUP_COUNT=5
|
|||||||
LOG_DATEFORMAT=%Y-%m-%d %H:%M:%S
|
LOG_DATEFORMAT=%Y-%m-%d %H:%M:%S
|
||||||
# Log Timezone
|
# Log Timezone
|
||||||
LOG_TZ=UTC
|
LOG_TZ=UTC
|
||||||
# Log format
|
|
||||||
LOG_FORMAT=%(asctime)s,%(msecs)d %(levelname)-2s [%(filename)s:%(lineno)d] %(req_id)s %(message)s
|
|
||||||
|
|
||||||
# Indexing configuration
|
# Indexing configuration
|
||||||
INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=4000
|
INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=4000
|
||||||
@@ -402,13 +389,13 @@ INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=4000
|
|||||||
WORKFLOW_MAX_EXECUTION_STEPS=500
|
WORKFLOW_MAX_EXECUTION_STEPS=500
|
||||||
WORKFLOW_MAX_EXECUTION_TIME=1200
|
WORKFLOW_MAX_EXECUTION_TIME=1200
|
||||||
WORKFLOW_CALL_MAX_DEPTH=5
|
WORKFLOW_CALL_MAX_DEPTH=5
|
||||||
WORKFLOW_PARALLEL_DEPTH_LIMIT=3
|
|
||||||
MAX_VARIABLE_SIZE=204800
|
MAX_VARIABLE_SIZE=204800
|
||||||
|
|
||||||
# App configuration
|
# App configuration
|
||||||
APP_MAX_EXECUTION_TIME=1200
|
APP_MAX_EXECUTION_TIME=1200
|
||||||
APP_MAX_ACTIVE_REQUESTS=0
|
APP_MAX_ACTIVE_REQUESTS=0
|
||||||
|
|
||||||
|
|
||||||
# Celery beat configuration
|
# Celery beat configuration
|
||||||
CELERY_BEAT_SCHEDULER_TIME=1
|
CELERY_BEAT_SCHEDULER_TIME=1
|
||||||
|
|
||||||
@@ -421,28 +408,9 @@ POSITION_PROVIDER_PINS=
|
|||||||
POSITION_PROVIDER_INCLUDES=
|
POSITION_PROVIDER_INCLUDES=
|
||||||
POSITION_PROVIDER_EXCLUDES=
|
POSITION_PROVIDER_EXCLUDES=
|
||||||
|
|
||||||
# Plugin configuration
|
|
||||||
PLUGIN_DAEMON_KEY=lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi
|
|
||||||
PLUGIN_DAEMON_URL=http://127.0.0.1:5002
|
|
||||||
PLUGIN_REMOTE_INSTALL_PORT=5003
|
|
||||||
PLUGIN_REMOTE_INSTALL_HOST=localhost
|
|
||||||
PLUGIN_MAX_PACKAGE_SIZE=15728640
|
|
||||||
INNER_API_KEY=QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1
|
|
||||||
INNER_API_KEY_FOR_PLUGIN=QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1
|
|
||||||
|
|
||||||
# Marketplace configuration
|
|
||||||
MARKETPLACE_ENABLED=true
|
|
||||||
MARKETPLACE_API_URL=https://marketplace.dify.ai
|
|
||||||
|
|
||||||
# Endpoint configuration
|
|
||||||
ENDPOINT_URL_TEMPLATE=http://localhost:5002/e/{hook_id}
|
|
||||||
|
|
||||||
# Reset password token expiry minutes
|
# Reset password token expiry minutes
|
||||||
RESET_PASSWORD_TOKEN_EXPIRY_MINUTES=5
|
RESET_PASSWORD_TOKEN_EXPIRY_MINUTES=5
|
||||||
|
|
||||||
CREATE_TIDB_SERVICE_JOB_ENABLED=false
|
CREATE_TIDB_SERVICE_JOB_ENABLED=false
|
||||||
|
|
||||||
# Maximum number of submitted thread count in a ThreadPool for parallel node execution
|
RETRIEVAL_TOP_N=0
|
||||||
MAX_SUBMIT_COUNT=100
|
|
||||||
# Lockout duration in seconds
|
|
||||||
LOGIN_LOCKOUT_DURATION=86400
|
|
||||||
|
|||||||
@@ -20,8 +20,6 @@ select = [
|
|||||||
"PLC0208", # iteration-over-set
|
"PLC0208", # iteration-over-set
|
||||||
"PLC2801", # unnecessary-dunder-call
|
"PLC2801", # unnecessary-dunder-call
|
||||||
"PLC0414", # useless-import-alias
|
"PLC0414", # useless-import-alias
|
||||||
"PLE0604", # invalid-all-object
|
|
||||||
"PLE0605", # invalid-all-format
|
|
||||||
"PLR0402", # manual-from-import
|
"PLR0402", # manual-from-import
|
||||||
"PLR1711", # useless-return
|
"PLR1711", # useless-return
|
||||||
"PLR1714", # repeated-equality-comparison
|
"PLR1714", # repeated-equality-comparison
|
||||||
@@ -30,7 +28,6 @@ select = [
|
|||||||
"RUF100", # unused-noqa
|
"RUF100", # unused-noqa
|
||||||
"RUF101", # redirected-noqa
|
"RUF101", # redirected-noqa
|
||||||
"RUF200", # invalid-pyproject-toml
|
"RUF200", # invalid-pyproject-toml
|
||||||
"RUF022", # unsorted-dunder-all
|
|
||||||
"S506", # unsafe-yaml-load
|
"S506", # unsafe-yaml-load
|
||||||
"SIM", # flake8-simplify rules
|
"SIM", # flake8-simplify rules
|
||||||
"TRY400", # error-instead-of-exception
|
"TRY400", # error-instead-of-exception
|
||||||
@@ -53,12 +50,10 @@ ignore = [
|
|||||||
"FURB152", # math-constant
|
"FURB152", # math-constant
|
||||||
"UP007", # non-pep604-annotation
|
"UP007", # non-pep604-annotation
|
||||||
"UP032", # f-string
|
"UP032", # f-string
|
||||||
"UP045", # non-pep604-annotation-optional
|
|
||||||
"B005", # strip-with-multi-characters
|
"B005", # strip-with-multi-characters
|
||||||
"B006", # mutable-argument-default
|
"B006", # mutable-argument-default
|
||||||
"B007", # unused-loop-control-variable
|
"B007", # unused-loop-control-variable
|
||||||
"B026", # star-arg-unpacking-after-keyword-arg
|
"B026", # star-arg-unpacking-after-keyword-arg
|
||||||
"B903", # class-as-data-structure
|
|
||||||
"B904", # raise-without-from-inside-except
|
"B904", # raise-without-from-inside-except
|
||||||
"B905", # zip-without-explicit-strict
|
"B905", # zip-without-explicit-strict
|
||||||
"N806", # non-lowercase-variable-in-function
|
"N806", # non-lowercase-variable-in-function
|
||||||
@@ -69,9 +64,10 @@ ignore = [
|
|||||||
"SIM105", # suppressible-exception
|
"SIM105", # suppressible-exception
|
||||||
"SIM107", # return-in-try-except-finally
|
"SIM107", # return-in-try-except-finally
|
||||||
"SIM108", # if-else-block-instead-of-if-exp
|
"SIM108", # if-else-block-instead-of-if-exp
|
||||||
"SIM113", # enumerate-for-loop
|
"SIM113", # eumerate-for-loop
|
||||||
"SIM117", # multiple-with-statements
|
"SIM117", # multiple-with-statements
|
||||||
"SIM210", # if-expr-with-true-false
|
"SIM210", # if-expr-with-true-false
|
||||||
|
"SIM300", # yoda-conditions,
|
||||||
]
|
]
|
||||||
|
|
||||||
[lint.per-file-ignores]
|
[lint.per-file-ignores]
|
||||||
@@ -87,11 +83,11 @@ ignore = [
|
|||||||
]
|
]
|
||||||
"tests/*" = [
|
"tests/*" = [
|
||||||
"F811", # redefined-while-unused
|
"F811", # redefined-while-unused
|
||||||
|
"F401", # unused-import
|
||||||
]
|
]
|
||||||
|
|
||||||
[lint.pyflakes]
|
[lint.pyflakes]
|
||||||
allowed-unused-imports = [
|
extend-generics = [
|
||||||
"_pytest.monkeypatch",
|
"_pytest.monkeypatch",
|
||||||
"tests.integration_tests",
|
"tests.integration_tests",
|
||||||
"tests.unit_tests",
|
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ FROM python:3.12-slim-bookworm AS base
|
|||||||
WORKDIR /app/api
|
WORKDIR /app/api
|
||||||
|
|
||||||
# Install Poetry
|
# Install Poetry
|
||||||
ENV POETRY_VERSION=2.0.1
|
ENV POETRY_VERSION=1.8.4
|
||||||
|
|
||||||
# if you located in China, you can use aliyun mirror to speed up
|
# if you located in China, you can use aliyun mirror to speed up
|
||||||
# RUN pip install --no-cache-dir poetry==${POETRY_VERSION} -i https://mirrors.aliyun.com/pypi/simple/
|
# RUN pip install --no-cache-dir poetry==${POETRY_VERSION} -i https://mirrors.aliyun.com/pypi/simple/
|
||||||
@@ -48,20 +48,16 @@ ENV TZ=UTC
|
|||||||
|
|
||||||
WORKDIR /app/api
|
WORKDIR /app/api
|
||||||
|
|
||||||
RUN \
|
RUN apt-get update \
|
||||||
apt-get update \
|
&& apt-get install -y --no-install-recommends curl nodejs libgmp-dev libmpfr-dev libmpc-dev \
|
||||||
# Install dependencies
|
# if you located in China, you can use aliyun mirror to speed up
|
||||||
&& apt-get install -y --no-install-recommends \
|
# && echo "deb http://mirrors.aliyun.com/debian testing main" > /etc/apt/sources.list \
|
||||||
# basic environment
|
&& echo "deb http://deb.debian.org/debian testing main" > /etc/apt/sources.list \
|
||||||
curl nodejs libgmp-dev libmpfr-dev libmpc-dev \
|
&& apt-get update \
|
||||||
# For Security
|
# For Security
|
||||||
expat libldap-2.5-0 perl libsqlite3-0 zlib1g \
|
&& apt-get install -y --no-install-recommends expat=2.6.4-1 libldap-2.5-0=2.5.18+dfsg-3+b1 perl=5.40.0-8 libsqlite3-0=3.46.1-1 zlib1g=1:1.3.dfsg+really1.3.1-1+b1 \
|
||||||
# install a chinese font to support the use of tools like matplotlib
|
# install a chinese font to support the use of tools like matplotlib
|
||||||
fonts-noto-cjk \
|
&& apt-get install -y fonts-noto-cjk \
|
||||||
# install a package to improve the accuracy of guessing mime type and file extension
|
|
||||||
media-types \
|
|
||||||
# install libmagic to support the use of python-magic guess MIMETYPE
|
|
||||||
libmagic1 \
|
|
||||||
&& apt-get autoremove -y \
|
&& apt-get autoremove -y \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
@@ -73,10 +69,6 @@ ENV PATH="${VIRTUAL_ENV}/bin:${PATH}"
|
|||||||
# Download nltk data
|
# Download nltk data
|
||||||
RUN python -c "import nltk; nltk.download('punkt'); nltk.download('averaged_perceptron_tagger')"
|
RUN python -c "import nltk; nltk.download('punkt'); nltk.download('averaged_perceptron_tagger')"
|
||||||
|
|
||||||
ENV TIKTOKEN_CACHE_DIR=/app/api/.tiktoken_cache
|
|
||||||
|
|
||||||
RUN python -c "import tiktoken; tiktoken.encoding_for_model('gpt2')"
|
|
||||||
|
|
||||||
# Copy source code
|
# Copy source code
|
||||||
COPY . /app/api/
|
COPY . /app/api/
|
||||||
|
|
||||||
@@ -84,6 +76,7 @@ COPY . /app/api/
|
|||||||
COPY docker/entrypoint.sh /entrypoint.sh
|
COPY docker/entrypoint.sh /entrypoint.sh
|
||||||
RUN chmod +x /entrypoint.sh
|
RUN chmod +x /entrypoint.sh
|
||||||
|
|
||||||
|
|
||||||
ARG COMMIT_SHA
|
ARG COMMIT_SHA
|
||||||
ENV COMMIT_SHA=${COMMIT_SHA}
|
ENV COMMIT_SHA=${COMMIT_SHA}
|
||||||
|
|
||||||
|
|||||||
@@ -37,13 +37,7 @@
|
|||||||
|
|
||||||
4. Create environment.
|
4. Create environment.
|
||||||
|
|
||||||
Dify API service uses [Poetry](https://python-poetry.org/docs/) to manage dependencies. First, you need to add the poetry shell plugin, if you don't have it already, in order to run in a virtual environment. [Note: Poetry shell is no longer a native command so you need to install the poetry plugin beforehand]
|
Dify API service uses [Poetry](https://python-poetry.org/docs/) to manage dependencies. You can execute `poetry shell` to activate the environment.
|
||||||
|
|
||||||
```bash
|
|
||||||
poetry self add poetry-plugin-shell
|
|
||||||
```
|
|
||||||
|
|
||||||
Then, You can execute `poetry shell` to activate the environment.
|
|
||||||
|
|
||||||
5. Install dependencies
|
5. Install dependencies
|
||||||
|
|
||||||
@@ -85,5 +79,5 @@
|
|||||||
2. Run the tests locally with mocked system environment variables in `tool.pytest_env` section in `pyproject.toml`
|
2. Run the tests locally with mocked system environment variables in `tool.pytest_env` section in `pyproject.toml`
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
poetry run -P api bash dev/pytest/pytest_all_tests.sh
|
poetry run -C api bash dev/pytest/pytest_all_tests.sh
|
||||||
```
|
```
|
||||||
|
|||||||
42
api/app.py
42
api/app.py
@@ -1,41 +1,13 @@
|
|||||||
import os
|
from app_factory import create_app
|
||||||
import sys
|
from libs import threadings_utils, version_utils
|
||||||
|
|
||||||
|
|
||||||
def is_db_command():
|
|
||||||
if len(sys.argv) > 1 and sys.argv[0].endswith("flask") and sys.argv[1] == "db":
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
# preparation before creating app
|
||||||
|
version_utils.check_supported_python_version()
|
||||||
|
threadings_utils.apply_gevent_threading_patch()
|
||||||
|
|
||||||
# create app
|
# create app
|
||||||
if is_db_command():
|
app = create_app()
|
||||||
from app_factory import create_migrations_app
|
celery = app.extensions["celery"]
|
||||||
|
|
||||||
app = create_migrations_app()
|
|
||||||
else:
|
|
||||||
# It seems that JetBrains Python debugger does not work well with gevent,
|
|
||||||
# so we need to disable gevent in debug mode.
|
|
||||||
# If you are using debugpy and set GEVENT_SUPPORT=True, you can debug with gevent.
|
|
||||||
if (flask_debug := os.environ.get("FLASK_DEBUG", "0")) and flask_debug.lower() in {"false", "0", "no"}:
|
|
||||||
from gevent import monkey # type: ignore
|
|
||||||
|
|
||||||
# gevent
|
|
||||||
monkey.patch_all()
|
|
||||||
|
|
||||||
from grpc.experimental import gevent as grpc_gevent # type: ignore
|
|
||||||
|
|
||||||
# grpc gevent
|
|
||||||
grpc_gevent.init_gevent()
|
|
||||||
|
|
||||||
import psycogreen.gevent # type: ignore
|
|
||||||
|
|
||||||
psycogreen.gevent.patch_psycopg()
|
|
||||||
|
|
||||||
from app_factory import create_app
|
|
||||||
|
|
||||||
app = create_app()
|
|
||||||
celery = app.extensions["celery"]
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
app.run(host="0.0.0.0", port=5001)
|
app.run(host="0.0.0.0", port=5001)
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from configs import dify_config
|
from configs import dify_config
|
||||||
from contexts.wrapper import RecyclableContextVar
|
|
||||||
from dify_app import DifyApp
|
from dify_app import DifyApp
|
||||||
|
|
||||||
|
|
||||||
@@ -17,11 +17,14 @@ def create_flask_app_with_configs() -> DifyApp:
|
|||||||
dify_app = DifyApp(__name__)
|
dify_app = DifyApp(__name__)
|
||||||
dify_app.config.from_mapping(dify_config.model_dump())
|
dify_app.config.from_mapping(dify_config.model_dump())
|
||||||
|
|
||||||
# add before request hook
|
# populate configs into system environment variables
|
||||||
@dify_app.before_request
|
for key, value in dify_app.config.items():
|
||||||
def before_request():
|
if isinstance(value, str):
|
||||||
# add an unique identifier to each request
|
os.environ[key] = value
|
||||||
RecyclableContextVar.increment_thread_recycles()
|
elif isinstance(value, int | float | bool):
|
||||||
|
os.environ[key] = str(value)
|
||||||
|
elif value is None:
|
||||||
|
os.environ[key] = ""
|
||||||
|
|
||||||
return dify_app
|
return dify_app
|
||||||
|
|
||||||
@@ -95,14 +98,3 @@ def initialize_extensions(app: DifyApp):
|
|||||||
end_time = time.perf_counter()
|
end_time = time.perf_counter()
|
||||||
if dify_config.DEBUG:
|
if dify_config.DEBUG:
|
||||||
logging.info(f"Loaded {short_name} ({round((end_time - start_time) * 1000, 2)} ms)")
|
logging.info(f"Loaded {short_name} ({round((end_time - start_time) * 1000, 2)} ms)")
|
||||||
|
|
||||||
|
|
||||||
def create_migrations_app():
|
|
||||||
app = create_flask_app_with_configs()
|
|
||||||
from extensions import ext_database, ext_migrate
|
|
||||||
|
|
||||||
# Initialize only required extensions
|
|
||||||
ext_database.init_app(app)
|
|
||||||
ext_migrate.init_app(app)
|
|
||||||
|
|
||||||
return app
|
|
||||||
|
|||||||
109
api/commands.py
109
api/commands.py
@@ -25,8 +25,6 @@ from models.dataset import Document as DatasetDocument
|
|||||||
from models.model import Account, App, AppAnnotationSetting, AppMode, Conversation, MessageAnnotation
|
from models.model import Account, App, AppAnnotationSetting, AppMode, Conversation, MessageAnnotation
|
||||||
from models.provider import Provider, ProviderModel
|
from models.provider import Provider, ProviderModel
|
||||||
from services.account_service import RegisterService, TenantService
|
from services.account_service import RegisterService, TenantService
|
||||||
from services.plugin.data_migration import PluginDataMigration
|
|
||||||
from services.plugin.plugin_migration import PluginMigration
|
|
||||||
|
|
||||||
|
|
||||||
@click.command("reset-password", help="Reset the account password.")
|
@click.command("reset-password", help="Reset the account password.")
|
||||||
@@ -161,7 +159,8 @@ def migrate_annotation_vector_database():
|
|||||||
try:
|
try:
|
||||||
# get apps info
|
# get apps info
|
||||||
apps = (
|
apps = (
|
||||||
App.query.filter(App.status == "normal")
|
db.session.query(App)
|
||||||
|
.filter(App.status == "normal")
|
||||||
.order_by(App.created_at.desc())
|
.order_by(App.created_at.desc())
|
||||||
.paginate(page=page, per_page=50)
|
.paginate(page=page, per_page=50)
|
||||||
)
|
)
|
||||||
@@ -260,7 +259,7 @@ def migrate_knowledge_vector_database():
|
|||||||
skipped_count = 0
|
skipped_count = 0
|
||||||
total_count = 0
|
total_count = 0
|
||||||
vector_type = dify_config.VECTOR_STORE
|
vector_type = dify_config.VECTOR_STORE
|
||||||
upper_collection_vector_types = {
|
upper_colletion_vector_types = {
|
||||||
VectorType.MILVUS,
|
VectorType.MILVUS,
|
||||||
VectorType.PGVECTOR,
|
VectorType.PGVECTOR,
|
||||||
VectorType.RELYT,
|
VectorType.RELYT,
|
||||||
@@ -268,7 +267,7 @@ def migrate_knowledge_vector_database():
|
|||||||
VectorType.ORACLE,
|
VectorType.ORACLE,
|
||||||
VectorType.ELASTICSEARCH,
|
VectorType.ELASTICSEARCH,
|
||||||
}
|
}
|
||||||
lower_collection_vector_types = {
|
lower_colletion_vector_types = {
|
||||||
VectorType.ANALYTICDB,
|
VectorType.ANALYTICDB,
|
||||||
VectorType.CHROMA,
|
VectorType.CHROMA,
|
||||||
VectorType.MYSCALE,
|
VectorType.MYSCALE,
|
||||||
@@ -286,7 +285,8 @@ def migrate_knowledge_vector_database():
|
|||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
datasets = (
|
datasets = (
|
||||||
Dataset.query.filter(Dataset.indexing_technique == "high_quality")
|
db.session.query(Dataset)
|
||||||
|
.filter(Dataset.indexing_technique == "high_quality")
|
||||||
.order_by(Dataset.created_at.desc())
|
.order_by(Dataset.created_at.desc())
|
||||||
.paginate(page=page, per_page=50)
|
.paginate(page=page, per_page=50)
|
||||||
)
|
)
|
||||||
@@ -307,7 +307,7 @@ def migrate_knowledge_vector_database():
|
|||||||
continue
|
continue
|
||||||
collection_name = ""
|
collection_name = ""
|
||||||
dataset_id = dataset.id
|
dataset_id = dataset.id
|
||||||
if vector_type in upper_collection_vector_types:
|
if vector_type in upper_colletion_vector_types:
|
||||||
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
|
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
|
||||||
elif vector_type == VectorType.QDRANT:
|
elif vector_type == VectorType.QDRANT:
|
||||||
if dataset.collection_binding_id:
|
if dataset.collection_binding_id:
|
||||||
@@ -323,7 +323,7 @@ def migrate_knowledge_vector_database():
|
|||||||
else:
|
else:
|
||||||
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
|
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
|
||||||
|
|
||||||
elif vector_type in lower_collection_vector_types:
|
elif vector_type in lower_colletion_vector_types:
|
||||||
collection_name = Dataset.gen_collection_name_by_id(dataset_id).lower()
|
collection_name = Dataset.gen_collection_name_by_id(dataset_id).lower()
|
||||||
else:
|
else:
|
||||||
raise ValueError(f"Vector store {vector_type} is not supported.")
|
raise ValueError(f"Vector store {vector_type} is not supported.")
|
||||||
@@ -450,8 +450,7 @@ def convert_to_agent_apps():
|
|||||||
if app_id not in proceeded_app_ids:
|
if app_id not in proceeded_app_ids:
|
||||||
proceeded_app_ids.append(app_id)
|
proceeded_app_ids.append(app_id)
|
||||||
app = db.session.query(App).filter(App.id == app_id).first()
|
app = db.session.query(App).filter(App.id == app_id).first()
|
||||||
if app is not None:
|
apps.append(app)
|
||||||
apps.append(app)
|
|
||||||
|
|
||||||
if len(apps) == 0:
|
if len(apps) == 0:
|
||||||
break
|
break
|
||||||
@@ -526,7 +525,7 @@ def add_qdrant_doc_id_index(field: str):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
except Exception:
|
except Exception as e:
|
||||||
click.echo(click.style("Failed to create Qdrant client.", fg="red"))
|
click.echo(click.style("Failed to create Qdrant client.", fg="red"))
|
||||||
|
|
||||||
click.echo(click.style(f"Index creation complete. Created {create_count} collection indexes.", fg="green"))
|
click.echo(click.style(f"Index creation complete. Created {create_count} collection indexes.", fg="green"))
|
||||||
@@ -556,20 +555,14 @@ def create_tenant(email: str, language: Optional[str] = None, name: Optional[str
|
|||||||
if language not in languages:
|
if language not in languages:
|
||||||
language = "en-US"
|
language = "en-US"
|
||||||
|
|
||||||
# Validates name encoding for non-Latin characters.
|
name = name.strip()
|
||||||
name = name.strip().encode("utf-8").decode("utf-8") if name else None
|
|
||||||
|
|
||||||
# generate random password
|
# generate random password
|
||||||
new_password = secrets.token_urlsafe(16)
|
new_password = secrets.token_urlsafe(16)
|
||||||
|
|
||||||
# register account
|
# register account
|
||||||
account = RegisterService.register(
|
account = RegisterService.register(email=email, name=account_name, password=new_password, language=language)
|
||||||
email=email,
|
|
||||||
name=account_name,
|
|
||||||
password=new_password,
|
|
||||||
language=language,
|
|
||||||
create_workspace_required=False,
|
|
||||||
)
|
|
||||||
TenantService.create_owner_tenant_if_not_exist(account, name)
|
TenantService.create_owner_tenant_if_not_exist(account, name)
|
||||||
|
|
||||||
click.echo(
|
click.echo(
|
||||||
@@ -589,13 +582,13 @@ def upgrade_db():
|
|||||||
click.echo(click.style("Starting database migration.", fg="green"))
|
click.echo(click.style("Starting database migration.", fg="green"))
|
||||||
|
|
||||||
# run db migration
|
# run db migration
|
||||||
import flask_migrate # type: ignore
|
import flask_migrate
|
||||||
|
|
||||||
flask_migrate.upgrade()
|
flask_migrate.upgrade()
|
||||||
|
|
||||||
click.echo(click.style("Database migration successful!", fg="green"))
|
click.echo(click.style("Database migration successful!", fg="green"))
|
||||||
|
|
||||||
except Exception:
|
except Exception as e:
|
||||||
logging.exception("Failed to execute database migration")
|
logging.exception("Failed to execute database migration")
|
||||||
finally:
|
finally:
|
||||||
lock.release()
|
lock.release()
|
||||||
@@ -627,10 +620,6 @@ where sites.id is null limit 1000"""
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
app = db.session.query(App).filter(App.id == app_id).first()
|
app = db.session.query(App).filter(App.id == app_id).first()
|
||||||
if not app:
|
|
||||||
print(f"App {app_id} not found")
|
|
||||||
continue
|
|
||||||
|
|
||||||
tenant = app.tenant
|
tenant = app.tenant
|
||||||
if tenant:
|
if tenant:
|
||||||
accounts = tenant.get_accounts()
|
accounts = tenant.get_accounts()
|
||||||
@@ -641,7 +630,7 @@ where sites.id is null limit 1000"""
|
|||||||
account = accounts[0]
|
account = accounts[0]
|
||||||
print("Fixing missing site for app {}".format(app.id))
|
print("Fixing missing site for app {}".format(app.id))
|
||||||
app_was_created.send(app, account=account)
|
app_was_created.send(app, account=account)
|
||||||
except Exception:
|
except Exception as e:
|
||||||
failed_app_ids.append(app_id)
|
failed_app_ids.append(app_id)
|
||||||
click.echo(click.style("Failed to fix missing site for app {}".format(app_id), fg="red"))
|
click.echo(click.style("Failed to fix missing site for app {}".format(app_id), fg="red"))
|
||||||
logging.exception(f"Failed to fix app related site missing issue, app_id: {app_id}")
|
logging.exception(f"Failed to fix app related site missing issue, app_id: {app_id}")
|
||||||
@@ -651,69 +640,3 @@ where sites.id is null limit 1000"""
|
|||||||
break
|
break
|
||||||
|
|
||||||
click.echo(click.style("Fix for missing app-related sites completed successfully!", fg="green"))
|
click.echo(click.style("Fix for missing app-related sites completed successfully!", fg="green"))
|
||||||
|
|
||||||
|
|
||||||
@click.command("migrate-data-for-plugin", help="Migrate data for plugin.")
|
|
||||||
def migrate_data_for_plugin():
|
|
||||||
"""
|
|
||||||
Migrate data for plugin.
|
|
||||||
"""
|
|
||||||
click.echo(click.style("Starting migrate data for plugin.", fg="white"))
|
|
||||||
|
|
||||||
PluginDataMigration.migrate()
|
|
||||||
|
|
||||||
click.echo(click.style("Migrate data for plugin completed.", fg="green"))
|
|
||||||
|
|
||||||
|
|
||||||
@click.command("extract-plugins", help="Extract plugins.")
|
|
||||||
@click.option("--output_file", prompt=True, help="The file to store the extracted plugins.", default="plugins.jsonl")
|
|
||||||
@click.option("--workers", prompt=True, help="The number of workers to extract plugins.", default=10)
|
|
||||||
def extract_plugins(output_file: str, workers: int):
|
|
||||||
"""
|
|
||||||
Extract plugins.
|
|
||||||
"""
|
|
||||||
click.echo(click.style("Starting extract plugins.", fg="white"))
|
|
||||||
|
|
||||||
PluginMigration.extract_plugins(output_file, workers)
|
|
||||||
|
|
||||||
click.echo(click.style("Extract plugins completed.", fg="green"))
|
|
||||||
|
|
||||||
|
|
||||||
@click.command("extract-unique-identifiers", help="Extract unique identifiers.")
|
|
||||||
@click.option(
|
|
||||||
"--output_file",
|
|
||||||
prompt=True,
|
|
||||||
help="The file to store the extracted unique identifiers.",
|
|
||||||
default="unique_identifiers.json",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--input_file", prompt=True, help="The file to store the extracted unique identifiers.", default="plugins.jsonl"
|
|
||||||
)
|
|
||||||
def extract_unique_plugins(output_file: str, input_file: str):
|
|
||||||
"""
|
|
||||||
Extract unique plugins.
|
|
||||||
"""
|
|
||||||
click.echo(click.style("Starting extract unique plugins.", fg="white"))
|
|
||||||
|
|
||||||
PluginMigration.extract_unique_plugins_to_file(input_file, output_file)
|
|
||||||
|
|
||||||
click.echo(click.style("Extract unique plugins completed.", fg="green"))
|
|
||||||
|
|
||||||
|
|
||||||
@click.command("install-plugins", help="Install plugins.")
|
|
||||||
@click.option(
|
|
||||||
"--input_file", prompt=True, help="The file to store the extracted unique identifiers.", default="plugins.jsonl"
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--output_file", prompt=True, help="The file to store the installed plugins.", default="installed_plugins.jsonl"
|
|
||||||
)
|
|
||||||
@click.option("--workers", prompt=True, help="The number of workers to install plugins.", default=100)
|
|
||||||
def install_plugins(input_file: str, output_file: str, workers: int):
|
|
||||||
"""
|
|
||||||
Install plugins.
|
|
||||||
"""
|
|
||||||
click.echo(click.style("Starting install plugins.", fg="white"))
|
|
||||||
|
|
||||||
PluginMigration.install_plugins(input_file, output_file, workers)
|
|
||||||
|
|
||||||
click.echo(click.style("Install plugins completed.", fg="green"))
|
|
||||||
|
|||||||
@@ -1,51 +1,11 @@
|
|||||||
import logging
|
from pydantic_settings import SettingsConfigDict
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from pydantic.fields import FieldInfo
|
from configs.deploy import DeploymentConfig
|
||||||
from pydantic_settings import BaseSettings, PydanticBaseSettingsSource, SettingsConfigDict
|
from configs.enterprise import EnterpriseFeatureConfig
|
||||||
|
from configs.extra import ExtraServiceConfig
|
||||||
from .deploy import DeploymentConfig
|
from configs.feature import FeatureConfig
|
||||||
from .enterprise import EnterpriseFeatureConfig
|
from configs.middleware import MiddlewareConfig
|
||||||
from .extra import ExtraServiceConfig
|
from configs.packaging import PackagingInfo
|
||||||
from .feature import FeatureConfig
|
|
||||||
from .middleware import MiddlewareConfig
|
|
||||||
from .packaging import PackagingInfo
|
|
||||||
from .remote_settings_sources import RemoteSettingsSource, RemoteSettingsSourceConfig, RemoteSettingsSourceName
|
|
||||||
from .remote_settings_sources.apollo import ApolloSettingsSource
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class RemoteSettingsSourceFactory(PydanticBaseSettingsSource):
|
|
||||||
def __init__(self, settings_cls: type[BaseSettings]):
|
|
||||||
super().__init__(settings_cls)
|
|
||||||
|
|
||||||
def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]:
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def __call__(self) -> dict[str, Any]:
|
|
||||||
current_state = self.current_state
|
|
||||||
remote_source_name = current_state.get("REMOTE_SETTINGS_SOURCE_NAME")
|
|
||||||
if not remote_source_name:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
remote_source: RemoteSettingsSource | None = None
|
|
||||||
match remote_source_name:
|
|
||||||
case RemoteSettingsSourceName.APOLLO:
|
|
||||||
remote_source = ApolloSettingsSource(current_state)
|
|
||||||
case _:
|
|
||||||
logger.warning(f"Unsupported remote source: {remote_source_name}")
|
|
||||||
return {}
|
|
||||||
|
|
||||||
d: dict[str, Any] = {}
|
|
||||||
|
|
||||||
for field_name, field in self.settings_cls.model_fields.items():
|
|
||||||
field_value, field_key, value_is_complex = remote_source.get_field_value(field, field_name)
|
|
||||||
field_value = remote_source.prepare_field_value(field_name, field, field_value, value_is_complex)
|
|
||||||
if field_value is not None:
|
|
||||||
d[field_key] = field_value
|
|
||||||
|
|
||||||
return d
|
|
||||||
|
|
||||||
|
|
||||||
class DifyConfig(
|
class DifyConfig(
|
||||||
@@ -59,8 +19,6 @@ class DifyConfig(
|
|||||||
MiddlewareConfig,
|
MiddlewareConfig,
|
||||||
# Extra service configs
|
# Extra service configs
|
||||||
ExtraServiceConfig,
|
ExtraServiceConfig,
|
||||||
# Remote source configs
|
|
||||||
RemoteSettingsSourceConfig,
|
|
||||||
# Enterprise feature configs
|
# Enterprise feature configs
|
||||||
# **Before using, please contact business@dify.ai by email to inquire about licensing matters.**
|
# **Before using, please contact business@dify.ai by email to inquire about licensing matters.**
|
||||||
EnterpriseFeatureConfig,
|
EnterpriseFeatureConfig,
|
||||||
@@ -77,20 +35,3 @@ class DifyConfig(
|
|||||||
# please consider to arrange it in the proper config group of existed or added
|
# please consider to arrange it in the proper config group of existed or added
|
||||||
# for better readability and maintainability.
|
# for better readability and maintainability.
|
||||||
# Thanks for your concentration and consideration.
|
# Thanks for your concentration and consideration.
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def settings_customise_sources(
|
|
||||||
cls,
|
|
||||||
settings_cls: type[BaseSettings],
|
|
||||||
init_settings: PydanticBaseSettingsSource,
|
|
||||||
env_settings: PydanticBaseSettingsSource,
|
|
||||||
dotenv_settings: PydanticBaseSettingsSource,
|
|
||||||
file_secret_settings: PydanticBaseSettingsSource,
|
|
||||||
) -> tuple[PydanticBaseSettingsSource, ...]:
|
|
||||||
return (
|
|
||||||
init_settings,
|
|
||||||
env_settings,
|
|
||||||
RemoteSettingsSourceFactory(settings_cls),
|
|
||||||
dotenv_settings,
|
|
||||||
file_secret_settings,
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -134,60 +134,6 @@ class CodeExecutionSandboxConfig(BaseSettings):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class PluginConfig(BaseSettings):
|
|
||||||
"""
|
|
||||||
Plugin configs
|
|
||||||
"""
|
|
||||||
|
|
||||||
PLUGIN_DAEMON_URL: HttpUrl = Field(
|
|
||||||
description="Plugin API URL",
|
|
||||||
default="http://localhost:5002",
|
|
||||||
)
|
|
||||||
|
|
||||||
PLUGIN_DAEMON_KEY: str = Field(
|
|
||||||
description="Plugin API key",
|
|
||||||
default="plugin-api-key",
|
|
||||||
)
|
|
||||||
|
|
||||||
INNER_API_KEY_FOR_PLUGIN: str = Field(description="Inner api key for plugin", default="inner-api-key")
|
|
||||||
|
|
||||||
PLUGIN_REMOTE_INSTALL_HOST: str = Field(
|
|
||||||
description="Plugin Remote Install Host",
|
|
||||||
default="localhost",
|
|
||||||
)
|
|
||||||
|
|
||||||
PLUGIN_REMOTE_INSTALL_PORT: PositiveInt = Field(
|
|
||||||
description="Plugin Remote Install Port",
|
|
||||||
default=5003,
|
|
||||||
)
|
|
||||||
|
|
||||||
PLUGIN_MAX_PACKAGE_SIZE: PositiveInt = Field(
|
|
||||||
description="Maximum allowed size for plugin packages in bytes",
|
|
||||||
default=15728640,
|
|
||||||
)
|
|
||||||
|
|
||||||
PLUGIN_MAX_BUNDLE_SIZE: PositiveInt = Field(
|
|
||||||
description="Maximum allowed size for plugin bundles in bytes",
|
|
||||||
default=15728640 * 12,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class MarketplaceConfig(BaseSettings):
|
|
||||||
"""
|
|
||||||
Configuration for marketplace
|
|
||||||
"""
|
|
||||||
|
|
||||||
MARKETPLACE_ENABLED: bool = Field(
|
|
||||||
description="Enable or disable marketplace",
|
|
||||||
default=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
MARKETPLACE_API_URL: HttpUrl = Field(
|
|
||||||
description="Marketplace API URL",
|
|
||||||
default="https://marketplace.dify.ai",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class EndpointConfig(BaseSettings):
|
class EndpointConfig(BaseSettings):
|
||||||
"""
|
"""
|
||||||
Configuration for various application endpoints and URLs
|
Configuration for various application endpoints and URLs
|
||||||
@@ -200,7 +146,7 @@ class EndpointConfig(BaseSettings):
|
|||||||
)
|
)
|
||||||
|
|
||||||
CONSOLE_WEB_URL: str = Field(
|
CONSOLE_WEB_URL: str = Field(
|
||||||
description="Base URL for the console web interface,used for frontend references and CORS configuration",
|
description="Base URL for the console web interface," "used for frontend references and CORS configuration",
|
||||||
default="",
|
default="",
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -214,10 +160,6 @@ class EndpointConfig(BaseSettings):
|
|||||||
default="",
|
default="",
|
||||||
)
|
)
|
||||||
|
|
||||||
ENDPOINT_URL_TEMPLATE: str = Field(
|
|
||||||
description="Template url for endpoint plugin", default="http://localhost:5002/e/{hook_id}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class FileAccessConfig(BaseSettings):
|
class FileAccessConfig(BaseSettings):
|
||||||
"""
|
"""
|
||||||
@@ -297,6 +239,7 @@ class HttpConfig(BaseSettings):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@computed_field
|
@computed_field
|
||||||
|
@property
|
||||||
def CONSOLE_CORS_ALLOW_ORIGINS(self) -> list[str]:
|
def CONSOLE_CORS_ALLOW_ORIGINS(self) -> list[str]:
|
||||||
return self.inner_CONSOLE_CORS_ALLOW_ORIGINS.split(",")
|
return self.inner_CONSOLE_CORS_ALLOW_ORIGINS.split(",")
|
||||||
|
|
||||||
@@ -307,6 +250,7 @@ class HttpConfig(BaseSettings):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@computed_field
|
@computed_field
|
||||||
|
@property
|
||||||
def WEB_API_CORS_ALLOW_ORIGINS(self) -> list[str]:
|
def WEB_API_CORS_ALLOW_ORIGINS(self) -> list[str]:
|
||||||
return self.inner_WEB_API_CORS_ALLOW_ORIGINS.split(",")
|
return self.inner_WEB_API_CORS_ALLOW_ORIGINS.split(",")
|
||||||
|
|
||||||
@@ -373,8 +317,8 @@ class HttpConfig(BaseSettings):
|
|||||||
)
|
)
|
||||||
|
|
||||||
RESPECT_XFORWARD_HEADERS_ENABLED: bool = Field(
|
RESPECT_XFORWARD_HEADERS_ENABLED: bool = Field(
|
||||||
description="Enable handling of X-Forwarded-For, X-Forwarded-Proto, and X-Forwarded-Port headers"
|
description="Enable or disable the X-Forwarded-For Proxy Fix middleware from Werkzeug"
|
||||||
" when the app is behind a single trusted reverse proxy.",
|
" to respect X-* headers to redirect clients",
|
||||||
default=False,
|
default=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -489,28 +433,12 @@ class WorkflowConfig(BaseSettings):
|
|||||||
default=5,
|
default=5,
|
||||||
)
|
)
|
||||||
|
|
||||||
WORKFLOW_PARALLEL_DEPTH_LIMIT: PositiveInt = Field(
|
|
||||||
description="Maximum allowed depth for nested parallel executions",
|
|
||||||
default=3,
|
|
||||||
)
|
|
||||||
|
|
||||||
MAX_VARIABLE_SIZE: PositiveInt = Field(
|
MAX_VARIABLE_SIZE: PositiveInt = Field(
|
||||||
description="Maximum size in bytes for a single variable in workflows. Default to 200 KB.",
|
description="Maximum size in bytes for a single variable in workflows. Default to 200 KB.",
|
||||||
default=200 * 1024,
|
default=200 * 1024,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class WorkflowNodeExecutionConfig(BaseSettings):
|
|
||||||
"""
|
|
||||||
Configuration for workflow node execution
|
|
||||||
"""
|
|
||||||
|
|
||||||
MAX_SUBMIT_COUNT: PositiveInt = Field(
|
|
||||||
description="Maximum number of submitted thread count in a ThreadPool for parallel node execution",
|
|
||||||
default=100,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class AuthConfig(BaseSettings):
|
class AuthConfig(BaseSettings):
|
||||||
"""
|
"""
|
||||||
Configuration for authentication and OAuth
|
Configuration for authentication and OAuth
|
||||||
@@ -546,21 +474,6 @@ class AuthConfig(BaseSettings):
|
|||||||
default=60,
|
default=60,
|
||||||
)
|
)
|
||||||
|
|
||||||
REFRESH_TOKEN_EXPIRE_DAYS: PositiveFloat = Field(
|
|
||||||
description="Expiration time for refresh tokens in days",
|
|
||||||
default=30,
|
|
||||||
)
|
|
||||||
|
|
||||||
LOGIN_LOCKOUT_DURATION: PositiveInt = Field(
|
|
||||||
description="Time (in seconds) a user must wait before retrying login after exceeding the rate limit.",
|
|
||||||
default=86400,
|
|
||||||
)
|
|
||||||
|
|
||||||
FORGOT_PASSWORD_LOCKOUT_DURATION: PositiveInt = Field(
|
|
||||||
description="Time (in seconds) a user must wait before retrying password reset after exceeding the rate limit.",
|
|
||||||
default=86400,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ModerationConfig(BaseSettings):
|
class ModerationConfig(BaseSettings):
|
||||||
"""
|
"""
|
||||||
@@ -669,7 +582,7 @@ class RagEtlConfig(BaseSettings):
|
|||||||
|
|
||||||
UNSTRUCTURED_API_KEY: Optional[str] = Field(
|
UNSTRUCTURED_API_KEY: Optional[str] = Field(
|
||||||
description="API key for Unstructured.io service",
|
description="API key for Unstructured.io service",
|
||||||
default="",
|
default=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
SCARF_NO_ANALYTICS: Optional[str] = Field(
|
SCARF_NO_ANALYTICS: Optional[str] = Field(
|
||||||
@@ -713,6 +626,8 @@ class DataSetConfig(BaseSettings):
|
|||||||
default=30,
|
default=30,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
RETRIEVAL_TOP_N: int = Field(description="number of retrieval top_n", default=0)
|
||||||
|
|
||||||
|
|
||||||
class WorkspaceConfig(BaseSettings):
|
class WorkspaceConfig(BaseSettings):
|
||||||
"""
|
"""
|
||||||
@@ -735,15 +650,15 @@ class IndexingConfig(BaseSettings):
|
|||||||
default=4000,
|
default=4000,
|
||||||
)
|
)
|
||||||
|
|
||||||
CHILD_CHUNKS_PREVIEW_NUMBER: PositiveInt = Field(
|
|
||||||
description="Maximum number of child chunks to preview",
|
class VisionFormatConfig(BaseSettings):
|
||||||
default=50,
|
MULTIMODAL_SEND_IMAGE_FORMAT: Literal["base64", "url"] = Field(
|
||||||
|
description="Format for sending images in multimodal contexts ('base64' or 'url'), default is base64",
|
||||||
|
default="base64",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
MULTIMODAL_SEND_VIDEO_FORMAT: Literal["base64", "url"] = Field(
|
||||||
class MultiModalTransferConfig(BaseSettings):
|
description="Format for sending videos in multimodal contexts ('base64' or 'url'), default is base64",
|
||||||
MULTIMODAL_SEND_FORMAT: Literal["base64", "url"] = Field(
|
|
||||||
description="Format for sending files in multimodal contexts ('base64' or 'url'), default is base64",
|
|
||||||
default="base64",
|
default="base64",
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -786,27 +701,27 @@ class PositionConfig(BaseSettings):
|
|||||||
default="",
|
default="",
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@computed_field
|
||||||
def POSITION_PROVIDER_PINS_LIST(self) -> list[str]:
|
def POSITION_PROVIDER_PINS_LIST(self) -> list[str]:
|
||||||
return [item.strip() for item in self.POSITION_PROVIDER_PINS.split(",") if item.strip() != ""]
|
return [item.strip() for item in self.POSITION_PROVIDER_PINS.split(",") if item.strip() != ""]
|
||||||
|
|
||||||
@property
|
@computed_field
|
||||||
def POSITION_PROVIDER_INCLUDES_SET(self) -> set[str]:
|
def POSITION_PROVIDER_INCLUDES_SET(self) -> set[str]:
|
||||||
return {item.strip() for item in self.POSITION_PROVIDER_INCLUDES.split(",") if item.strip() != ""}
|
return {item.strip() for item in self.POSITION_PROVIDER_INCLUDES.split(",") if item.strip() != ""}
|
||||||
|
|
||||||
@property
|
@computed_field
|
||||||
def POSITION_PROVIDER_EXCLUDES_SET(self) -> set[str]:
|
def POSITION_PROVIDER_EXCLUDES_SET(self) -> set[str]:
|
||||||
return {item.strip() for item in self.POSITION_PROVIDER_EXCLUDES.split(",") if item.strip() != ""}
|
return {item.strip() for item in self.POSITION_PROVIDER_EXCLUDES.split(",") if item.strip() != ""}
|
||||||
|
|
||||||
@property
|
@computed_field
|
||||||
def POSITION_TOOL_PINS_LIST(self) -> list[str]:
|
def POSITION_TOOL_PINS_LIST(self) -> list[str]:
|
||||||
return [item.strip() for item in self.POSITION_TOOL_PINS.split(",") if item.strip() != ""]
|
return [item.strip() for item in self.POSITION_TOOL_PINS.split(",") if item.strip() != ""]
|
||||||
|
|
||||||
@property
|
@computed_field
|
||||||
def POSITION_TOOL_INCLUDES_SET(self) -> set[str]:
|
def POSITION_TOOL_INCLUDES_SET(self) -> set[str]:
|
||||||
return {item.strip() for item in self.POSITION_TOOL_INCLUDES.split(",") if item.strip() != ""}
|
return {item.strip() for item in self.POSITION_TOOL_INCLUDES.split(",") if item.strip() != ""}
|
||||||
|
|
||||||
@property
|
@computed_field
|
||||||
def POSITION_TOOL_EXCLUDES_SET(self) -> set[str]:
|
def POSITION_TOOL_EXCLUDES_SET(self) -> set[str]:
|
||||||
return {item.strip() for item in self.POSITION_TOOL_EXCLUDES.split(",") if item.strip() != ""}
|
return {item.strip() for item in self.POSITION_TOOL_EXCLUDES.split(",") if item.strip() != ""}
|
||||||
|
|
||||||
@@ -838,43 +753,32 @@ class LoginConfig(BaseSettings):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class AccountConfig(BaseSettings):
|
|
||||||
ACCOUNT_DELETION_TOKEN_EXPIRY_MINUTES: PositiveInt = Field(
|
|
||||||
description="Duration in minutes for which a account deletion token remains valid",
|
|
||||||
default=5,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class FeatureConfig(
|
class FeatureConfig(
|
||||||
# place the configs in alphabet order
|
# place the configs in alphabet order
|
||||||
AppExecutionConfig,
|
AppExecutionConfig,
|
||||||
AuthConfig, # Changed from OAuthConfig to AuthConfig
|
AuthConfig, # Changed from OAuthConfig to AuthConfig
|
||||||
BillingConfig,
|
BillingConfig,
|
||||||
CodeExecutionSandboxConfig,
|
CodeExecutionSandboxConfig,
|
||||||
PluginConfig,
|
|
||||||
MarketplaceConfig,
|
|
||||||
DataSetConfig,
|
DataSetConfig,
|
||||||
EndpointConfig,
|
EndpointConfig,
|
||||||
FileAccessConfig,
|
FileAccessConfig,
|
||||||
FileUploadConfig,
|
FileUploadConfig,
|
||||||
HttpConfig,
|
HttpConfig,
|
||||||
|
VisionFormatConfig,
|
||||||
InnerAPIConfig,
|
InnerAPIConfig,
|
||||||
IndexingConfig,
|
IndexingConfig,
|
||||||
LoggingConfig,
|
LoggingConfig,
|
||||||
MailConfig,
|
MailConfig,
|
||||||
ModelLoadBalanceConfig,
|
ModelLoadBalanceConfig,
|
||||||
ModerationConfig,
|
ModerationConfig,
|
||||||
MultiModalTransferConfig,
|
|
||||||
PositionConfig,
|
PositionConfig,
|
||||||
RagEtlConfig,
|
RagEtlConfig,
|
||||||
SecurityConfig,
|
SecurityConfig,
|
||||||
ToolConfig,
|
ToolConfig,
|
||||||
UpdateConfig,
|
UpdateConfig,
|
||||||
WorkflowConfig,
|
WorkflowConfig,
|
||||||
WorkflowNodeExecutionConfig,
|
|
||||||
WorkspaceConfig,
|
WorkspaceConfig,
|
||||||
LoginConfig,
|
LoginConfig,
|
||||||
AccountConfig,
|
|
||||||
# hosted services config
|
# hosted services config
|
||||||
HostedServiceConfig,
|
HostedServiceConfig,
|
||||||
CeleryBeatConfig,
|
CeleryBeatConfig,
|
||||||
|
|||||||
@@ -1,40 +1,9 @@
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import Field, NonNegativeInt, computed_field
|
from pydantic import Field, NonNegativeInt
|
||||||
from pydantic_settings import BaseSettings
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
class HostedCreditConfig(BaseSettings):
|
|
||||||
HOSTED_MODEL_CREDIT_CONFIG: str = Field(
|
|
||||||
description="Model credit configuration in format 'model:credits,model:credits', e.g., 'gpt-4:20,gpt-4o:10'",
|
|
||||||
default="",
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_model_credits(self, model_name: str) -> int:
|
|
||||||
"""
|
|
||||||
Get credit value for a specific model name.
|
|
||||||
Returns 1 if model is not found in configuration (default credit).
|
|
||||||
|
|
||||||
:param model_name: The name of the model to search for
|
|
||||||
:return: The credit value for the model
|
|
||||||
"""
|
|
||||||
if not self.HOSTED_MODEL_CREDIT_CONFIG:
|
|
||||||
return 1
|
|
||||||
|
|
||||||
try:
|
|
||||||
credit_map = dict(
|
|
||||||
item.strip().split(":", 1) for item in self.HOSTED_MODEL_CREDIT_CONFIG.split(",") if ":" in item
|
|
||||||
)
|
|
||||||
|
|
||||||
# Search for matching model pattern
|
|
||||||
for pattern, credit in credit_map.items():
|
|
||||||
if pattern.strip() == model_name:
|
|
||||||
return int(credit)
|
|
||||||
return 1 # Default quota if no match found
|
|
||||||
except (ValueError, AttributeError):
|
|
||||||
return 1 # Return default quota if parsing fails
|
|
||||||
|
|
||||||
|
|
||||||
class HostedOpenAiConfig(BaseSettings):
|
class HostedOpenAiConfig(BaseSettings):
|
||||||
"""
|
"""
|
||||||
Configuration for hosted OpenAI service
|
Configuration for hosted OpenAI service
|
||||||
@@ -212,7 +181,7 @@ class HostedFetchAppTemplateConfig(BaseSettings):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
HOSTED_FETCH_APP_TEMPLATES_MODE: str = Field(
|
HOSTED_FETCH_APP_TEMPLATES_MODE: str = Field(
|
||||||
description="Mode for fetching app templates: remote, db, or builtin default to remote,",
|
description="Mode for fetching app templates: remote, db, or builtin" " default to remote,",
|
||||||
default="remote",
|
default="remote",
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -233,7 +202,5 @@ class HostedServiceConfig(
|
|||||||
HostedZhipuAIConfig,
|
HostedZhipuAIConfig,
|
||||||
# moderation
|
# moderation
|
||||||
HostedModerationConfig,
|
HostedModerationConfig,
|
||||||
# credit config
|
|
||||||
HostedCreditConfig,
|
|
||||||
):
|
):
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -1,70 +1,54 @@
|
|||||||
import os
|
from typing import Any, Optional
|
||||||
from typing import Any, Literal, Optional
|
|
||||||
from urllib.parse import quote_plus
|
from urllib.parse import quote_plus
|
||||||
|
|
||||||
from pydantic import Field, NonNegativeInt, PositiveFloat, PositiveInt, computed_field
|
from pydantic import Field, NonNegativeInt, PositiveFloat, PositiveInt, computed_field
|
||||||
from pydantic_settings import BaseSettings
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
from .cache.redis_config import RedisConfig
|
from configs.middleware.cache.redis_config import RedisConfig
|
||||||
from .storage.aliyun_oss_storage_config import AliyunOSSStorageConfig
|
from configs.middleware.storage.aliyun_oss_storage_config import AliyunOSSStorageConfig
|
||||||
from .storage.amazon_s3_storage_config import S3StorageConfig
|
from configs.middleware.storage.amazon_s3_storage_config import S3StorageConfig
|
||||||
from .storage.azure_blob_storage_config import AzureBlobStorageConfig
|
from configs.middleware.storage.azure_blob_storage_config import AzureBlobStorageConfig
|
||||||
from .storage.baidu_obs_storage_config import BaiduOBSStorageConfig
|
from configs.middleware.storage.baidu_obs_storage_config import BaiduOBSStorageConfig
|
||||||
from .storage.google_cloud_storage_config import GoogleCloudStorageConfig
|
from configs.middleware.storage.google_cloud_storage_config import GoogleCloudStorageConfig
|
||||||
from .storage.huawei_obs_storage_config import HuaweiCloudOBSStorageConfig
|
from configs.middleware.storage.huawei_obs_storage_config import HuaweiCloudOBSStorageConfig
|
||||||
from .storage.oci_storage_config import OCIStorageConfig
|
from configs.middleware.storage.oci_storage_config import OCIStorageConfig
|
||||||
from .storage.opendal_storage_config import OpenDALStorageConfig
|
from configs.middleware.storage.supabase_storage_config import SupabaseStorageConfig
|
||||||
from .storage.supabase_storage_config import SupabaseStorageConfig
|
from configs.middleware.storage.tencent_cos_storage_config import TencentCloudCOSStorageConfig
|
||||||
from .storage.tencent_cos_storage_config import TencentCloudCOSStorageConfig
|
from configs.middleware.storage.volcengine_tos_storage_config import VolcengineTOSStorageConfig
|
||||||
from .storage.volcengine_tos_storage_config import VolcengineTOSStorageConfig
|
from configs.middleware.vdb.analyticdb_config import AnalyticdbConfig
|
||||||
from .vdb.analyticdb_config import AnalyticdbConfig
|
from configs.middleware.vdb.baidu_vector_config import BaiduVectorDBConfig
|
||||||
from .vdb.baidu_vector_config import BaiduVectorDBConfig
|
from configs.middleware.vdb.chroma_config import ChromaConfig
|
||||||
from .vdb.chroma_config import ChromaConfig
|
from configs.middleware.vdb.couchbase_config import CouchbaseConfig
|
||||||
from .vdb.couchbase_config import CouchbaseConfig
|
from configs.middleware.vdb.elasticsearch_config import ElasticsearchConfig
|
||||||
from .vdb.elasticsearch_config import ElasticsearchConfig
|
from configs.middleware.vdb.lindorm_config import LindormConfig
|
||||||
from .vdb.lindorm_config import LindormConfig
|
from configs.middleware.vdb.milvus_config import MilvusConfig
|
||||||
from .vdb.milvus_config import MilvusConfig
|
from configs.middleware.vdb.myscale_config import MyScaleConfig
|
||||||
from .vdb.myscale_config import MyScaleConfig
|
from configs.middleware.vdb.oceanbase_config import OceanBaseVectorConfig
|
||||||
from .vdb.oceanbase_config import OceanBaseVectorConfig
|
from configs.middleware.vdb.opensearch_config import OpenSearchConfig
|
||||||
from .vdb.opensearch_config import OpenSearchConfig
|
from configs.middleware.vdb.oracle_config import OracleConfig
|
||||||
from .vdb.oracle_config import OracleConfig
|
from configs.middleware.vdb.pgvector_config import PGVectorConfig
|
||||||
from .vdb.pgvector_config import PGVectorConfig
|
from configs.middleware.vdb.pgvectors_config import PGVectoRSConfig
|
||||||
from .vdb.pgvectors_config import PGVectoRSConfig
|
from configs.middleware.vdb.qdrant_config import QdrantConfig
|
||||||
from .vdb.qdrant_config import QdrantConfig
|
from configs.middleware.vdb.relyt_config import RelytConfig
|
||||||
from .vdb.relyt_config import RelytConfig
|
from configs.middleware.vdb.tencent_vector_config import TencentVectorDBConfig
|
||||||
from .vdb.tencent_vector_config import TencentVectorDBConfig
|
from configs.middleware.vdb.tidb_on_qdrant_config import TidbOnQdrantConfig
|
||||||
from .vdb.tidb_on_qdrant_config import TidbOnQdrantConfig
|
from configs.middleware.vdb.tidb_vector_config import TiDBVectorConfig
|
||||||
from .vdb.tidb_vector_config import TiDBVectorConfig
|
from configs.middleware.vdb.upstash_config import UpstashConfig
|
||||||
from .vdb.upstash_config import UpstashConfig
|
from configs.middleware.vdb.vikingdb_config import VikingDBConfig
|
||||||
from .vdb.vikingdb_config import VikingDBConfig
|
from configs.middleware.vdb.weaviate_config import WeaviateConfig
|
||||||
from .vdb.weaviate_config import WeaviateConfig
|
|
||||||
|
|
||||||
|
|
||||||
class StorageConfig(BaseSettings):
|
class StorageConfig(BaseSettings):
|
||||||
STORAGE_TYPE: Literal[
|
STORAGE_TYPE: str = Field(
|
||||||
"opendal",
|
|
||||||
"s3",
|
|
||||||
"aliyun-oss",
|
|
||||||
"azure-blob",
|
|
||||||
"baidu-obs",
|
|
||||||
"google-storage",
|
|
||||||
"huawei-obs",
|
|
||||||
"oci-storage",
|
|
||||||
"tencent-cos",
|
|
||||||
"volcengine-tos",
|
|
||||||
"supabase",
|
|
||||||
"local",
|
|
||||||
] = Field(
|
|
||||||
description="Type of storage to use."
|
description="Type of storage to use."
|
||||||
" Options: 'opendal', '(deprecated) local', 's3', 'aliyun-oss', 'azure-blob', 'baidu-obs', 'google-storage', "
|
" Options: 'local', 's3', 'aliyun-oss', 'azure-blob', 'baidu-obs', 'google-storage', 'huawei-obs', "
|
||||||
"'huawei-obs', 'oci-storage', 'tencent-cos', 'volcengine-tos', 'supabase'. Default is 'opendal'.",
|
"'oci-storage', 'tencent-cos', 'volcengine-tos', 'supabase'. Default is 'local'.",
|
||||||
default="opendal",
|
default="local",
|
||||||
)
|
)
|
||||||
|
|
||||||
STORAGE_LOCAL_PATH: str = Field(
|
STORAGE_LOCAL_PATH: str = Field(
|
||||||
description="Path for local storage when STORAGE_TYPE is set to 'local'.",
|
description="Path for local storage when STORAGE_TYPE is set to 'local'.",
|
||||||
default="storage",
|
default="storage",
|
||||||
deprecated=True,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -89,7 +73,7 @@ class KeywordStoreConfig(BaseSettings):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class DatabaseConfig(BaseSettings):
|
class DatabaseConfig:
|
||||||
DB_HOST: str = Field(
|
DB_HOST: str = Field(
|
||||||
description="Hostname or IP address of the database server.",
|
description="Hostname or IP address of the database server.",
|
||||||
default="localhost",
|
default="localhost",
|
||||||
@@ -131,6 +115,7 @@ class DatabaseConfig(BaseSettings):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@computed_field
|
@computed_field
|
||||||
|
@property
|
||||||
def SQLALCHEMY_DATABASE_URI(self) -> str:
|
def SQLALCHEMY_DATABASE_URI(self) -> str:
|
||||||
db_extras = (
|
db_extras = (
|
||||||
f"{self.DB_EXTRAS}&client_encoding={self.DB_CHARSET}" if self.DB_CHARSET else self.DB_EXTRAS
|
f"{self.DB_EXTRAS}&client_encoding={self.DB_CHARSET}" if self.DB_CHARSET else self.DB_EXTRAS
|
||||||
@@ -167,12 +152,8 @@ class DatabaseConfig(BaseSettings):
|
|||||||
default=False,
|
default=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
RETRIEVAL_SERVICE_EXECUTORS: NonNegativeInt = Field(
|
|
||||||
description="Number of processes for the retrieval service, default to CPU cores.",
|
|
||||||
default=os.cpu_count(),
|
|
||||||
)
|
|
||||||
|
|
||||||
@computed_field
|
@computed_field
|
||||||
|
@property
|
||||||
def SQLALCHEMY_ENGINE_OPTIONS(self) -> dict[str, Any]:
|
def SQLALCHEMY_ENGINE_OPTIONS(self) -> dict[str, Any]:
|
||||||
return {
|
return {
|
||||||
"pool_size": self.SQLALCHEMY_POOL_SIZE,
|
"pool_size": self.SQLALCHEMY_POOL_SIZE,
|
||||||
@@ -210,6 +191,7 @@ class CeleryConfig(DatabaseConfig):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@computed_field
|
@computed_field
|
||||||
|
@property
|
||||||
def CELERY_RESULT_BACKEND(self) -> str | None:
|
def CELERY_RESULT_BACKEND(self) -> str | None:
|
||||||
return (
|
return (
|
||||||
"db+{}".format(self.SQLALCHEMY_DATABASE_URI)
|
"db+{}".format(self.SQLALCHEMY_DATABASE_URI)
|
||||||
@@ -217,6 +199,7 @@ class CeleryConfig(DatabaseConfig):
|
|||||||
else self.CELERY_BROKER_URL
|
else self.CELERY_BROKER_URL
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@computed_field
|
||||||
@property
|
@property
|
||||||
def BROKER_USE_SSL(self) -> bool:
|
def BROKER_USE_SSL(self) -> bool:
|
||||||
return self.CELERY_BROKER_URL.startswith("rediss://") if self.CELERY_BROKER_URL else False
|
return self.CELERY_BROKER_URL.startswith("rediss://") if self.CELERY_BROKER_URL else False
|
||||||
@@ -252,7 +235,6 @@ class MiddlewareConfig(
|
|||||||
GoogleCloudStorageConfig,
|
GoogleCloudStorageConfig,
|
||||||
HuaweiCloudOBSStorageConfig,
|
HuaweiCloudOBSStorageConfig,
|
||||||
OCIStorageConfig,
|
OCIStorageConfig,
|
||||||
OpenDALStorageConfig,
|
|
||||||
S3StorageConfig,
|
S3StorageConfig,
|
||||||
SupabaseStorageConfig,
|
SupabaseStorageConfig,
|
||||||
TencentCloudCOSStorageConfig,
|
TencentCloudCOSStorageConfig,
|
||||||
|
|||||||
@@ -1,10 +1,9 @@
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import Field
|
from pydantic import BaseModel, Field
|
||||||
from pydantic_settings import BaseSettings
|
|
||||||
|
|
||||||
|
|
||||||
class BaiduOBSStorageConfig(BaseSettings):
|
class BaiduOBSStorageConfig(BaseModel):
|
||||||
"""
|
"""
|
||||||
Configuration settings for Baidu Object Storage Service (OBS)
|
Configuration settings for Baidu Object Storage Service (OBS)
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,10 +1,9 @@
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import Field
|
from pydantic import BaseModel, Field
|
||||||
from pydantic_settings import BaseSettings
|
|
||||||
|
|
||||||
|
|
||||||
class HuaweiCloudOBSStorageConfig(BaseSettings):
|
class HuaweiCloudOBSStorageConfig(BaseModel):
|
||||||
"""
|
"""
|
||||||
Configuration settings for Huawei Cloud Object Storage Service (OBS)
|
Configuration settings for Huawei Cloud Object Storage Service (OBS)
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,9 +0,0 @@
|
|||||||
from pydantic import Field
|
|
||||||
from pydantic_settings import BaseSettings
|
|
||||||
|
|
||||||
|
|
||||||
class OpenDALStorageConfig(BaseSettings):
|
|
||||||
OPENDAL_SCHEME: str = Field(
|
|
||||||
default="fs",
|
|
||||||
description="OpenDAL scheme.",
|
|
||||||
)
|
|
||||||
@@ -1,10 +1,9 @@
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import Field
|
from pydantic import BaseModel, Field
|
||||||
from pydantic_settings import BaseSettings
|
|
||||||
|
|
||||||
|
|
||||||
class SupabaseStorageConfig(BaseSettings):
|
class SupabaseStorageConfig(BaseModel):
|
||||||
"""
|
"""
|
||||||
Configuration settings for Supabase Object Storage Service
|
Configuration settings for Supabase Object Storage Service
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,10 +1,9 @@
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import Field
|
from pydantic import BaseModel, Field
|
||||||
from pydantic_settings import BaseSettings
|
|
||||||
|
|
||||||
|
|
||||||
class VolcengineTOSStorageConfig(BaseSettings):
|
class VolcengineTOSStorageConfig(BaseModel):
|
||||||
"""
|
"""
|
||||||
Configuration settings for Volcengine Tinder Object Storage (TOS)
|
Configuration settings for Volcengine Tinder Object Storage (TOS)
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,10 +1,9 @@
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import Field, PositiveInt
|
from pydantic import BaseModel, Field, PositiveInt
|
||||||
from pydantic_settings import BaseSettings
|
|
||||||
|
|
||||||
|
|
||||||
class AnalyticdbConfig(BaseSettings):
|
class AnalyticdbConfig(BaseModel):
|
||||||
"""
|
"""
|
||||||
Configuration for connecting to Alibaba Cloud AnalyticDB for PostgreSQL.
|
Configuration for connecting to Alibaba Cloud AnalyticDB for PostgreSQL.
|
||||||
Refer to the following documentation for details on obtaining credentials:
|
Refer to the following documentation for details on obtaining credentials:
|
||||||
|
|||||||
@@ -1,10 +1,9 @@
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import Field
|
from pydantic import BaseModel, Field
|
||||||
from pydantic_settings import BaseSettings
|
|
||||||
|
|
||||||
|
|
||||||
class CouchbaseConfig(BaseSettings):
|
class CouchbaseConfig(BaseModel):
|
||||||
"""
|
"""
|
||||||
Couchbase configs
|
Couchbase configs
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -21,14 +21,3 @@ class LindormConfig(BaseSettings):
|
|||||||
description="Lindorm password",
|
description="Lindorm password",
|
||||||
default=None,
|
default=None,
|
||||||
)
|
)
|
||||||
DEFAULT_INDEX_TYPE: Optional[str] = Field(
|
|
||||||
description="Lindorm Vector Index Type, hnsw or flat is available in dify",
|
|
||||||
default="hnsw",
|
|
||||||
)
|
|
||||||
DEFAULT_DISTANCE_TYPE: Optional[str] = Field(
|
|
||||||
description="Vector Distance Type, support l2, cosinesimil, innerproduct", default="l2"
|
|
||||||
)
|
|
||||||
USING_UGC_INDEX: Optional[bool] = Field(
|
|
||||||
description="Using UGC index will store the same type of Index in a single index but can retrieve separately.",
|
|
||||||
default=False,
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -33,9 +33,3 @@ class MilvusConfig(BaseSettings):
|
|||||||
description="Name of the Milvus database to connect to (default is 'default')",
|
description="Name of the Milvus database to connect to (default is 'default')",
|
||||||
default="default",
|
default="default",
|
||||||
)
|
)
|
||||||
|
|
||||||
MILVUS_ENABLE_HYBRID_SEARCH: bool = Field(
|
|
||||||
description="Enable hybrid search features (requires Milvus >= 2.5.0). Set to false for compatibility with "
|
|
||||||
"older versions",
|
|
||||||
default=True,
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
from pydantic import Field, PositiveInt
|
from pydantic import BaseModel, Field, PositiveInt
|
||||||
from pydantic_settings import BaseSettings
|
|
||||||
|
|
||||||
|
|
||||||
class MyScaleConfig(BaseSettings):
|
class MyScaleConfig(BaseModel):
|
||||||
"""
|
"""
|
||||||
Configuration settings for MyScale vector database
|
Configuration settings for MyScale vector database
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,10 +1,9 @@
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import Field
|
from pydantic import BaseModel, Field
|
||||||
from pydantic_settings import BaseSettings
|
|
||||||
|
|
||||||
|
|
||||||
class VikingDBConfig(BaseSettings):
|
class VikingDBConfig(BaseModel):
|
||||||
"""
|
"""
|
||||||
Configuration for connecting to Volcengine VikingDB.
|
Configuration for connecting to Volcengine VikingDB.
|
||||||
Refer to the following documentation for details on obtaining credentials:
|
Refer to the following documentation for details on obtaining credentials:
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ class PackagingInfo(BaseSettings):
|
|||||||
|
|
||||||
CURRENT_VERSION: str = Field(
|
CURRENT_VERSION: str = Field(
|
||||||
description="Dify version",
|
description="Dify version",
|
||||||
default="1.0.0",
|
default="0.12.1",
|
||||||
)
|
)
|
||||||
|
|
||||||
COMMIT_SHA: str = Field(
|
COMMIT_SHA: str = Field(
|
||||||
|
|||||||
@@ -1,17 +0,0 @@
|
|||||||
from typing import Optional
|
|
||||||
|
|
||||||
from pydantic import Field
|
|
||||||
|
|
||||||
from .apollo import ApolloSettingsSourceInfo
|
|
||||||
from .base import RemoteSettingsSource
|
|
||||||
from .enums import RemoteSettingsSourceName
|
|
||||||
|
|
||||||
|
|
||||||
class RemoteSettingsSourceConfig(ApolloSettingsSourceInfo):
|
|
||||||
REMOTE_SETTINGS_SOURCE_NAME: RemoteSettingsSourceName | str = Field(
|
|
||||||
description="name of remote config source",
|
|
||||||
default="",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["RemoteSettingsSource", "RemoteSettingsSourceConfig", "RemoteSettingsSourceName"]
|
|
||||||
@@ -1,55 +0,0 @@
|
|||||||
from collections.abc import Mapping
|
|
||||||
from typing import Any, Optional
|
|
||||||
|
|
||||||
from pydantic import Field
|
|
||||||
from pydantic.fields import FieldInfo
|
|
||||||
from pydantic_settings import BaseSettings
|
|
||||||
|
|
||||||
from configs.remote_settings_sources.base import RemoteSettingsSource
|
|
||||||
|
|
||||||
from .client import ApolloClient
|
|
||||||
|
|
||||||
|
|
||||||
class ApolloSettingsSourceInfo(BaseSettings):
|
|
||||||
"""
|
|
||||||
Packaging build information
|
|
||||||
"""
|
|
||||||
|
|
||||||
APOLLO_APP_ID: Optional[str] = Field(
|
|
||||||
description="apollo app_id",
|
|
||||||
default=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
APOLLO_CLUSTER: Optional[str] = Field(
|
|
||||||
description="apollo cluster",
|
|
||||||
default=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
APOLLO_CONFIG_URL: Optional[str] = Field(
|
|
||||||
description="apollo config url",
|
|
||||||
default=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
APOLLO_NAMESPACE: Optional[str] = Field(
|
|
||||||
description="apollo namespace",
|
|
||||||
default=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ApolloSettingsSource(RemoteSettingsSource):
|
|
||||||
def __init__(self, configs: Mapping[str, Any]):
|
|
||||||
self.client = ApolloClient(
|
|
||||||
app_id=configs["APOLLO_APP_ID"],
|
|
||||||
cluster=configs["APOLLO_CLUSTER"],
|
|
||||||
config_url=configs["APOLLO_CONFIG_URL"],
|
|
||||||
start_hot_update=False,
|
|
||||||
_notification_map={configs["APOLLO_NAMESPACE"]: -1},
|
|
||||||
)
|
|
||||||
self.namespace = configs["APOLLO_NAMESPACE"]
|
|
||||||
self.remote_configs = self.client.get_all_dicts(self.namespace)
|
|
||||||
|
|
||||||
def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]:
|
|
||||||
if not isinstance(self.remote_configs, dict):
|
|
||||||
raise ValueError(f"remote configs is not dict, but {type(self.remote_configs)}")
|
|
||||||
field_value = self.remote_configs.get(field_name)
|
|
||||||
return field_value, field_name, False
|
|
||||||
@@ -1,304 +0,0 @@
|
|||||||
import hashlib
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import threading
|
|
||||||
import time
|
|
||||||
from collections.abc import Mapping
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from .python_3x import http_request, makedirs_wrapper
|
|
||||||
from .utils import (
|
|
||||||
CONFIGURATIONS,
|
|
||||||
NAMESPACE_NAME,
|
|
||||||
NOTIFICATION_ID,
|
|
||||||
get_value_from_dict,
|
|
||||||
init_ip,
|
|
||||||
no_key_cache_key,
|
|
||||||
signature,
|
|
||||||
url_encode_wrapper,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class ApolloClient:
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
config_url,
|
|
||||||
app_id,
|
|
||||||
cluster="default",
|
|
||||||
secret="",
|
|
||||||
start_hot_update=True,
|
|
||||||
change_listener=None,
|
|
||||||
_notification_map=None,
|
|
||||||
):
|
|
||||||
# Core routing parameters
|
|
||||||
self.config_url = config_url
|
|
||||||
self.cluster = cluster
|
|
||||||
self.app_id = app_id
|
|
||||||
|
|
||||||
# Non-core parameters
|
|
||||||
self.ip = init_ip()
|
|
||||||
self.secret = secret
|
|
||||||
|
|
||||||
# Check the parameter variables
|
|
||||||
|
|
||||||
# Private control variables
|
|
||||||
self._cycle_time = 5
|
|
||||||
self._stopping = False
|
|
||||||
self._cache = {}
|
|
||||||
self._no_key = {}
|
|
||||||
self._hash = {}
|
|
||||||
self._pull_timeout = 75
|
|
||||||
self._cache_file_path = os.path.expanduser("~") + "/.dify/config/remote-settings/apollo/cache/"
|
|
||||||
self._long_poll_thread = None
|
|
||||||
self._change_listener = change_listener # "add" "delete" "update"
|
|
||||||
if _notification_map is None:
|
|
||||||
_notification_map = {"application": -1}
|
|
||||||
self._notification_map = _notification_map
|
|
||||||
self.last_release_key = None
|
|
||||||
# Private startup method
|
|
||||||
self._path_checker()
|
|
||||||
if start_hot_update:
|
|
||||||
self._start_hot_update()
|
|
||||||
|
|
||||||
# start the heartbeat thread
|
|
||||||
heartbeat = threading.Thread(target=self._heart_beat)
|
|
||||||
heartbeat.daemon = True
|
|
||||||
heartbeat.start()
|
|
||||||
|
|
||||||
def get_json_from_net(self, namespace="application"):
|
|
||||||
url = "{}/configs/{}/{}/{}?releaseKey={}&ip={}".format(
|
|
||||||
self.config_url, self.app_id, self.cluster, namespace, "", self.ip
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
code, body = http_request(url, timeout=3, headers=self._sign_headers(url))
|
|
||||||
if code == 200:
|
|
||||||
if not body:
|
|
||||||
logger.error(f"get_json_from_net load configs failed, body is {body}")
|
|
||||||
return None
|
|
||||||
data = json.loads(body)
|
|
||||||
data = data["configurations"]
|
|
||||||
return_data = {CONFIGURATIONS: data}
|
|
||||||
return return_data
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
except Exception:
|
|
||||||
logger.exception("an error occurred in get_json_from_net")
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_value(self, key, default_val=None, namespace="application"):
|
|
||||||
try:
|
|
||||||
# read memory configuration
|
|
||||||
namespace_cache = self._cache.get(namespace)
|
|
||||||
val = get_value_from_dict(namespace_cache, key)
|
|
||||||
if val is not None:
|
|
||||||
return val
|
|
||||||
|
|
||||||
no_key = no_key_cache_key(namespace, key)
|
|
||||||
if no_key in self._no_key:
|
|
||||||
return default_val
|
|
||||||
|
|
||||||
# read the network configuration
|
|
||||||
namespace_data = self.get_json_from_net(namespace)
|
|
||||||
val = get_value_from_dict(namespace_data, key)
|
|
||||||
if val is not None:
|
|
||||||
self._update_cache_and_file(namespace_data, namespace)
|
|
||||||
return val
|
|
||||||
|
|
||||||
# read the file configuration
|
|
||||||
namespace_cache = self._get_local_cache(namespace)
|
|
||||||
val = get_value_from_dict(namespace_cache, key)
|
|
||||||
if val is not None:
|
|
||||||
self._update_cache_and_file(namespace_cache, namespace)
|
|
||||||
return val
|
|
||||||
|
|
||||||
# If all of them are not obtained, the default value is returned
|
|
||||||
# and the local cache is set to None
|
|
||||||
self._set_local_cache_none(namespace, key)
|
|
||||||
return default_val
|
|
||||||
except Exception:
|
|
||||||
logger.exception("get_value has error, [key is %s], [namespace is %s]", key, namespace)
|
|
||||||
return default_val
|
|
||||||
|
|
||||||
# Set the key of a namespace to none, and do not set default val
|
|
||||||
# to ensure the real-time correctness of the function call.
|
|
||||||
# If the user does not have the same default val twice
|
|
||||||
# and the default val is used here, there may be a problem.
|
|
||||||
def _set_local_cache_none(self, namespace, key):
|
|
||||||
no_key = no_key_cache_key(namespace, key)
|
|
||||||
self._no_key[no_key] = key
|
|
||||||
|
|
||||||
def _start_hot_update(self):
|
|
||||||
self._long_poll_thread = threading.Thread(target=self._listener)
|
|
||||||
# When the asynchronous thread is started, the daemon thread will automatically exit
|
|
||||||
# when the main thread is launched.
|
|
||||||
self._long_poll_thread.daemon = True
|
|
||||||
self._long_poll_thread.start()
|
|
||||||
|
|
||||||
def stop(self):
|
|
||||||
self._stopping = True
|
|
||||||
logger.info("Stopping listener...")
|
|
||||||
|
|
||||||
# Call the set callback function, and if it is abnormal, try it out
|
|
||||||
def _call_listener(self, namespace, old_kv, new_kv):
|
|
||||||
if self._change_listener is None:
|
|
||||||
return
|
|
||||||
if old_kv is None:
|
|
||||||
old_kv = {}
|
|
||||||
if new_kv is None:
|
|
||||||
new_kv = {}
|
|
||||||
try:
|
|
||||||
for key in old_kv:
|
|
||||||
new_value = new_kv.get(key)
|
|
||||||
old_value = old_kv.get(key)
|
|
||||||
if new_value is None:
|
|
||||||
# If newValue is empty, it means key, and the value is deleted.
|
|
||||||
self._change_listener("delete", namespace, key, old_value)
|
|
||||||
continue
|
|
||||||
if new_value != old_value:
|
|
||||||
self._change_listener("update", namespace, key, new_value)
|
|
||||||
continue
|
|
||||||
for key in new_kv:
|
|
||||||
new_value = new_kv.get(key)
|
|
||||||
old_value = old_kv.get(key)
|
|
||||||
if old_value is None:
|
|
||||||
self._change_listener("add", namespace, key, new_value)
|
|
||||||
except BaseException as e:
|
|
||||||
logger.warning(str(e))
|
|
||||||
|
|
||||||
def _path_checker(self):
|
|
||||||
if not os.path.isdir(self._cache_file_path):
|
|
||||||
makedirs_wrapper(self._cache_file_path)
|
|
||||||
|
|
||||||
# update the local cache and file cache
|
|
||||||
def _update_cache_and_file(self, namespace_data, namespace="application"):
|
|
||||||
# update the local cache
|
|
||||||
self._cache[namespace] = namespace_data
|
|
||||||
# update the file cache
|
|
||||||
new_string = json.dumps(namespace_data)
|
|
||||||
new_hash = hashlib.md5(new_string.encode("utf-8")).hexdigest()
|
|
||||||
if self._hash.get(namespace) == new_hash:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
file_path = Path(self._cache_file_path) / f"{self.app_id}_configuration_{namespace}.txt"
|
|
||||||
file_path.write_text(new_string)
|
|
||||||
self._hash[namespace] = new_hash
|
|
||||||
|
|
||||||
# get the configuration from the local file
|
|
||||||
def _get_local_cache(self, namespace="application"):
|
|
||||||
cache_file_path = os.path.join(self._cache_file_path, f"{self.app_id}_configuration_{namespace}.txt")
|
|
||||||
if os.path.isfile(cache_file_path):
|
|
||||||
with open(cache_file_path) as f:
|
|
||||||
result = json.loads(f.readline())
|
|
||||||
return result
|
|
||||||
return {}
|
|
||||||
|
|
||||||
def _long_poll(self):
|
|
||||||
notifications = []
|
|
||||||
for key in self._cache:
|
|
||||||
namespace_data = self._cache[key]
|
|
||||||
notification_id = -1
|
|
||||||
if NOTIFICATION_ID in namespace_data:
|
|
||||||
notification_id = self._cache[key][NOTIFICATION_ID]
|
|
||||||
notifications.append({NAMESPACE_NAME: key, NOTIFICATION_ID: notification_id})
|
|
||||||
try:
|
|
||||||
# if the length is 0 it is returned directly
|
|
||||||
if len(notifications) == 0:
|
|
||||||
return
|
|
||||||
url = "{}/notifications/v2".format(self.config_url)
|
|
||||||
params = {
|
|
||||||
"appId": self.app_id,
|
|
||||||
"cluster": self.cluster,
|
|
||||||
"notifications": json.dumps(notifications, ensure_ascii=False),
|
|
||||||
}
|
|
||||||
param_str = url_encode_wrapper(params)
|
|
||||||
url = url + "?" + param_str
|
|
||||||
code, body = http_request(url, self._pull_timeout, headers=self._sign_headers(url))
|
|
||||||
http_code = code
|
|
||||||
if http_code == 304:
|
|
||||||
logger.debug("No change, loop...")
|
|
||||||
return
|
|
||||||
if http_code == 200:
|
|
||||||
if not body:
|
|
||||||
logger.error(f"_long_poll load configs failed,body is {body}")
|
|
||||||
return
|
|
||||||
data = json.loads(body)
|
|
||||||
for entry in data:
|
|
||||||
namespace = entry[NAMESPACE_NAME]
|
|
||||||
n_id = entry[NOTIFICATION_ID]
|
|
||||||
logger.info("%s has changes: notificationId=%d", namespace, n_id)
|
|
||||||
self._get_net_and_set_local(namespace, n_id, call_change=True)
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
logger.warning("Sleep...")
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning(str(e))
|
|
||||||
|
|
||||||
def _get_net_and_set_local(self, namespace, n_id, call_change=False):
|
|
||||||
namespace_data = self.get_json_from_net(namespace)
|
|
||||||
if not namespace_data:
|
|
||||||
return
|
|
||||||
namespace_data[NOTIFICATION_ID] = n_id
|
|
||||||
old_namespace = self._cache.get(namespace)
|
|
||||||
self._update_cache_and_file(namespace_data, namespace)
|
|
||||||
if self._change_listener is not None and call_change and old_namespace:
|
|
||||||
old_kv = old_namespace.get(CONFIGURATIONS)
|
|
||||||
new_kv = namespace_data.get(CONFIGURATIONS)
|
|
||||||
self._call_listener(namespace, old_kv, new_kv)
|
|
||||||
|
|
||||||
def _listener(self):
|
|
||||||
logger.info("start long_poll")
|
|
||||||
while not self._stopping:
|
|
||||||
self._long_poll()
|
|
||||||
time.sleep(self._cycle_time)
|
|
||||||
logger.info("stopped, long_poll")
|
|
||||||
|
|
||||||
# add the need for endorsement to the header
|
|
||||||
def _sign_headers(self, url: str) -> Mapping[str, str]:
|
|
||||||
headers: dict[str, str] = {}
|
|
||||||
if self.secret == "":
|
|
||||||
return headers
|
|
||||||
uri = url[len(self.config_url) : len(url)]
|
|
||||||
time_unix_now = str(int(round(time.time() * 1000)))
|
|
||||||
headers["Authorization"] = "Apollo " + self.app_id + ":" + signature(time_unix_now, uri, self.secret)
|
|
||||||
headers["Timestamp"] = time_unix_now
|
|
||||||
return headers
|
|
||||||
|
|
||||||
def _heart_beat(self):
|
|
||||||
while not self._stopping:
|
|
||||||
for namespace in self._notification_map:
|
|
||||||
self._do_heart_beat(namespace)
|
|
||||||
time.sleep(60 * 10) # 10分钟
|
|
||||||
|
|
||||||
def _do_heart_beat(self, namespace):
|
|
||||||
url = "{}/configs/{}/{}/{}?ip={}".format(self.config_url, self.app_id, self.cluster, namespace, self.ip)
|
|
||||||
try:
|
|
||||||
code, body = http_request(url, timeout=3, headers=self._sign_headers(url))
|
|
||||||
if code == 200:
|
|
||||||
if not body:
|
|
||||||
logger.error(f"_do_heart_beat load configs failed,body is {body}")
|
|
||||||
return None
|
|
||||||
data = json.loads(body)
|
|
||||||
if self.last_release_key == data["releaseKey"]:
|
|
||||||
return None
|
|
||||||
self.last_release_key = data["releaseKey"]
|
|
||||||
data = data["configurations"]
|
|
||||||
self._update_cache_and_file(data, namespace)
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
except Exception:
|
|
||||||
logger.exception("an error occurred in _do_heart_beat")
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_all_dicts(self, namespace):
|
|
||||||
namespace_data = self._cache.get(namespace)
|
|
||||||
if namespace_data is None:
|
|
||||||
net_namespace_data = self.get_json_from_net(namespace)
|
|
||||||
if not net_namespace_data:
|
|
||||||
return namespace_data
|
|
||||||
namespace_data = net_namespace_data.get(CONFIGURATIONS)
|
|
||||||
if namespace_data:
|
|
||||||
self._update_cache_and_file(namespace_data, namespace)
|
|
||||||
return namespace_data
|
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
import logging
|
|
||||||
import os
|
|
||||||
import ssl
|
|
||||||
import urllib.request
|
|
||||||
from urllib import parse
|
|
||||||
from urllib.error import HTTPError
|
|
||||||
|
|
||||||
# Create an SSL context that allows for a lower level of security
|
|
||||||
ssl_context = ssl.create_default_context()
|
|
||||||
ssl_context.set_ciphers("HIGH:!DH:!aNULL")
|
|
||||||
ssl_context.check_hostname = False
|
|
||||||
ssl_context.verify_mode = ssl.CERT_NONE
|
|
||||||
|
|
||||||
# Create an opener object and pass in a custom SSL context
|
|
||||||
opener = urllib.request.build_opener(urllib.request.HTTPSHandler(context=ssl_context))
|
|
||||||
|
|
||||||
urllib.request.install_opener(opener)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def http_request(url, timeout, headers={}):
|
|
||||||
try:
|
|
||||||
request = urllib.request.Request(url, headers=headers)
|
|
||||||
res = urllib.request.urlopen(request, timeout=timeout)
|
|
||||||
body = res.read().decode("utf-8")
|
|
||||||
return res.code, body
|
|
||||||
except HTTPError as e:
|
|
||||||
if e.code == 304:
|
|
||||||
logger.warning("http_request error,code is 304, maybe you should check secret")
|
|
||||||
return 304, None
|
|
||||||
logger.warning("http_request error,code is %d, msg is %s", e.code, e.msg)
|
|
||||||
raise e
|
|
||||||
|
|
||||||
|
|
||||||
def url_encode(params):
|
|
||||||
return parse.urlencode(params)
|
|
||||||
|
|
||||||
|
|
||||||
def makedirs_wrapper(path):
|
|
||||||
os.makedirs(path, exist_ok=True)
|
|
||||||
@@ -1,51 +0,0 @@
|
|||||||
import hashlib
|
|
||||||
import socket
|
|
||||||
|
|
||||||
from .python_3x import url_encode
|
|
||||||
|
|
||||||
# define constants
|
|
||||||
CONFIGURATIONS = "configurations"
|
|
||||||
NOTIFICATION_ID = "notificationId"
|
|
||||||
NAMESPACE_NAME = "namespaceName"
|
|
||||||
|
|
||||||
|
|
||||||
# add timestamps uris and keys
|
|
||||||
def signature(timestamp, uri, secret):
|
|
||||||
import base64
|
|
||||||
import hmac
|
|
||||||
|
|
||||||
string_to_sign = "" + timestamp + "\n" + uri
|
|
||||||
hmac_code = hmac.new(secret.encode(), string_to_sign.encode(), hashlib.sha1).digest()
|
|
||||||
return base64.b64encode(hmac_code).decode()
|
|
||||||
|
|
||||||
|
|
||||||
def url_encode_wrapper(params):
|
|
||||||
return url_encode(params)
|
|
||||||
|
|
||||||
|
|
||||||
def no_key_cache_key(namespace, key):
|
|
||||||
return "{}{}{}".format(namespace, len(namespace), key)
|
|
||||||
|
|
||||||
|
|
||||||
# Returns whether the obtained value is obtained, and None if it does not
|
|
||||||
def get_value_from_dict(namespace_cache, key):
|
|
||||||
if namespace_cache:
|
|
||||||
kv_data = namespace_cache.get(CONFIGURATIONS)
|
|
||||||
if kv_data is None:
|
|
||||||
return None
|
|
||||||
if key in kv_data:
|
|
||||||
return kv_data[key]
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def init_ip():
|
|
||||||
ip = ""
|
|
||||||
s = None
|
|
||||||
try:
|
|
||||||
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
|
||||||
s.connect(("8.8.8.8", 53))
|
|
||||||
ip = s.getsockname()[0]
|
|
||||||
finally:
|
|
||||||
if s:
|
|
||||||
s.close()
|
|
||||||
return ip
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
from collections.abc import Mapping
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from pydantic.fields import FieldInfo
|
|
||||||
|
|
||||||
|
|
||||||
class RemoteSettingsSource:
|
|
||||||
def __init__(self, configs: Mapping[str, Any]):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]:
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def prepare_field_value(self, field_name: str, field: FieldInfo, value: Any, value_is_complex: bool) -> Any:
|
|
||||||
return value
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
from enum import StrEnum
|
|
||||||
|
|
||||||
|
|
||||||
class RemoteSettingsSourceName(StrEnum):
|
|
||||||
APOLLO = "apollo"
|
|
||||||
@@ -14,11 +14,11 @@ AUDIO_EXTENSIONS.extend([ext.upper() for ext in AUDIO_EXTENSIONS])
|
|||||||
|
|
||||||
|
|
||||||
if dify_config.ETL_TYPE == "Unstructured":
|
if dify_config.ETL_TYPE == "Unstructured":
|
||||||
DOCUMENT_EXTENSIONS = ["txt", "markdown", "md", "mdx", "pdf", "html", "htm", "xlsx", "xls"]
|
DOCUMENT_EXTENSIONS = ["txt", "markdown", "md", "pdf", "html", "htm", "xlsx", "xls"]
|
||||||
DOCUMENT_EXTENSIONS.extend(("doc", "docx", "csv", "eml", "msg", "pptx", "xml", "epub"))
|
DOCUMENT_EXTENSIONS.extend(("docx", "csv", "eml", "msg", "pptx", "xml", "epub"))
|
||||||
if dify_config.UNSTRUCTURED_API_URL:
|
if dify_config.UNSTRUCTURED_API_URL:
|
||||||
DOCUMENT_EXTENSIONS.append("ppt")
|
DOCUMENT_EXTENSIONS.append("ppt")
|
||||||
DOCUMENT_EXTENSIONS.extend([ext.upper() for ext in DOCUMENT_EXTENSIONS])
|
DOCUMENT_EXTENSIONS.extend([ext.upper() for ext in DOCUMENT_EXTENSIONS])
|
||||||
else:
|
else:
|
||||||
DOCUMENT_EXTENSIONS = ["txt", "markdown", "md", "mdx", "pdf", "html", "htm", "xlsx", "xls", "docx", "csv"]
|
DOCUMENT_EXTENSIONS = ["txt", "markdown", "md", "pdf", "html", "htm", "xlsx", "xls", "docx", "csv"]
|
||||||
DOCUMENT_EXTENSIONS.extend([ext.upper() for ext in DOCUMENT_EXTENSIONS])
|
DOCUMENT_EXTENSIONS.extend([ext.upper() for ext in DOCUMENT_EXTENSIONS])
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
import json
|
import json
|
||||||
from collections.abc import Mapping
|
|
||||||
|
|
||||||
from models.model import AppMode
|
from models.model import AppMode
|
||||||
|
|
||||||
default_app_templates: Mapping[AppMode, Mapping] = {
|
default_app_templates = {
|
||||||
# workflow default mode
|
# workflow default mode
|
||||||
AppMode.WORKFLOW: {
|
AppMode.WORKFLOW: {
|
||||||
"app": {
|
"app": {
|
||||||
|
|||||||
@@ -1,30 +1,9 @@
|
|||||||
from contextvars import ContextVar
|
from contextvars import ContextVar
|
||||||
from threading import Lock
|
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from contexts.wrapper import RecyclableContextVar
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from core.plugin.entities.plugin_daemon import PluginModelProviderEntity
|
|
||||||
from core.tools.plugin_tool.provider import PluginToolProviderController
|
|
||||||
from core.workflow.entities.variable_pool import VariablePool
|
from core.workflow.entities.variable_pool import VariablePool
|
||||||
|
|
||||||
|
|
||||||
tenant_id: ContextVar[str] = ContextVar("tenant_id")
|
tenant_id: ContextVar[str] = ContextVar("tenant_id")
|
||||||
|
|
||||||
workflow_variable_pool: ContextVar["VariablePool"] = ContextVar("workflow_variable_pool")
|
workflow_variable_pool: ContextVar["VariablePool"] = ContextVar("workflow_variable_pool")
|
||||||
|
|
||||||
"""
|
|
||||||
To avoid race-conditions caused by gunicorn thread recycling, using RecyclableContextVar to replace with
|
|
||||||
"""
|
|
||||||
plugin_tool_providers: RecyclableContextVar[dict[str, "PluginToolProviderController"]] = RecyclableContextVar(
|
|
||||||
ContextVar("plugin_tool_providers")
|
|
||||||
)
|
|
||||||
plugin_tool_providers_lock: RecyclableContextVar[Lock] = RecyclableContextVar(ContextVar("plugin_tool_providers_lock"))
|
|
||||||
|
|
||||||
plugin_model_providers: RecyclableContextVar[list["PluginModelProviderEntity"] | None] = RecyclableContextVar(
|
|
||||||
ContextVar("plugin_model_providers")
|
|
||||||
)
|
|
||||||
plugin_model_providers_lock: RecyclableContextVar[Lock] = RecyclableContextVar(
|
|
||||||
ContextVar("plugin_model_providers_lock")
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -1,65 +0,0 @@
|
|||||||
from contextvars import ContextVar
|
|
||||||
from typing import Generic, TypeVar
|
|
||||||
|
|
||||||
T = TypeVar("T")
|
|
||||||
|
|
||||||
|
|
||||||
class HiddenValue:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
_default = HiddenValue()
|
|
||||||
|
|
||||||
|
|
||||||
class RecyclableContextVar(Generic[T]):
|
|
||||||
"""
|
|
||||||
RecyclableContextVar is a wrapper around ContextVar
|
|
||||||
It's safe to use in gunicorn with thread recycling, but features like `reset` are not available for now
|
|
||||||
|
|
||||||
NOTE: you need to call `increment_thread_recycles` before requests
|
|
||||||
"""
|
|
||||||
|
|
||||||
_thread_recycles: ContextVar[int] = ContextVar("thread_recycles")
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def increment_thread_recycles(cls):
|
|
||||||
try:
|
|
||||||
recycles = cls._thread_recycles.get()
|
|
||||||
cls._thread_recycles.set(recycles + 1)
|
|
||||||
except LookupError:
|
|
||||||
cls._thread_recycles.set(0)
|
|
||||||
|
|
||||||
def __init__(self, context_var: ContextVar[T]):
|
|
||||||
self._context_var = context_var
|
|
||||||
self._updates = ContextVar[int](context_var.name + "_updates", default=0)
|
|
||||||
|
|
||||||
def get(self, default: T | HiddenValue = _default) -> T:
|
|
||||||
thread_recycles = self._thread_recycles.get(0)
|
|
||||||
self_updates = self._updates.get()
|
|
||||||
if thread_recycles > self_updates:
|
|
||||||
self._updates.set(thread_recycles)
|
|
||||||
|
|
||||||
# check if thread is recycled and should be updated
|
|
||||||
if thread_recycles < self_updates:
|
|
||||||
return self._context_var.get()
|
|
||||||
else:
|
|
||||||
# thread_recycles >= self_updates, means current context is invalid
|
|
||||||
if isinstance(default, HiddenValue) or default is _default:
|
|
||||||
raise LookupError
|
|
||||||
else:
|
|
||||||
return default
|
|
||||||
|
|
||||||
def set(self, value: T):
|
|
||||||
# it leads to a situation that self.updates is less than cls.thread_recycles if `set` was never called before
|
|
||||||
# increase it manually
|
|
||||||
thread_recycles = self._thread_recycles.get(0)
|
|
||||||
self_updates = self._updates.get()
|
|
||||||
if thread_recycles > self_updates:
|
|
||||||
self._updates.set(thread_recycles)
|
|
||||||
|
|
||||||
if self._updates.get() == self._thread_recycles.get(0):
|
|
||||||
# after increment,
|
|
||||||
self._updates.set(self._updates.get() + 1)
|
|
||||||
|
|
||||||
# set the context
|
|
||||||
self._context_var.set(value)
|
|
||||||
@@ -4,8 +4,3 @@ from werkzeug.exceptions import HTTPException
|
|||||||
class FilenameNotExistsError(HTTPException):
|
class FilenameNotExistsError(HTTPException):
|
||||||
code = 400
|
code = 400
|
||||||
description = "The specified filename does not exist."
|
description = "The specified filename does not exist."
|
||||||
|
|
||||||
|
|
||||||
class RemoteFileUploadError(HTTPException):
|
|
||||||
code = 400
|
|
||||||
description = "Error uploading remote file."
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from flask_restful import fields # type: ignore
|
from flask_restful import fields
|
||||||
|
|
||||||
parameters__system_parameters = {
|
parameters__system_parameters = {
|
||||||
"image_file_size_limit": fields.Integer,
|
"image_file_size_limit": fields.Integer,
|
||||||
|
|||||||
@@ -1,32 +1,12 @@
|
|||||||
import mimetypes
|
import mimetypes
|
||||||
import os
|
import os
|
||||||
import platform
|
|
||||||
import re
|
import re
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
import warnings
|
|
||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
|
|
||||||
try:
|
|
||||||
import magic
|
|
||||||
except ImportError:
|
|
||||||
if platform.system() == "Windows":
|
|
||||||
warnings.warn(
|
|
||||||
"To use python-magic guess MIMETYPE, you need to run `pip install python-magic-bin`", stacklevel=2
|
|
||||||
)
|
|
||||||
elif platform.system() == "Darwin":
|
|
||||||
warnings.warn("To use python-magic guess MIMETYPE, you need to run `brew install libmagic`", stacklevel=2)
|
|
||||||
elif platform.system() == "Linux":
|
|
||||||
warnings.warn(
|
|
||||||
"To use python-magic guess MIMETYPE, you need to run `sudo apt-get install libmagic1`", stacklevel=2
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
warnings.warn("To use python-magic guess MIMETYPE, you need to install `libmagic`", stacklevel=2)
|
|
||||||
magic = None # type: ignore
|
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
|
||||||
from configs import dify_config
|
from configs import dify_config
|
||||||
@@ -67,13 +47,6 @@ def guess_file_info_from_response(response: httpx.Response):
|
|||||||
# If guessing fails, use Content-Type from response headers
|
# If guessing fails, use Content-Type from response headers
|
||||||
mimetype = response.headers.get("Content-Type", "application/octet-stream")
|
mimetype = response.headers.get("Content-Type", "application/octet-stream")
|
||||||
|
|
||||||
# Use python-magic to guess MIME type if still unknown or generic
|
|
||||||
if mimetype == "application/octet-stream" and magic is not None:
|
|
||||||
try:
|
|
||||||
mimetype = magic.from_buffer(response.content[:1024], mime=True)
|
|
||||||
except magic.MagicException:
|
|
||||||
pass
|
|
||||||
|
|
||||||
extension = os.path.splitext(filename)[1]
|
extension = os.path.splitext(filename)[1]
|
||||||
|
|
||||||
# Ensure filename has an extension
|
# Ensure filename has an extension
|
||||||
|
|||||||
@@ -2,26 +2,7 @@ from flask import Blueprint
|
|||||||
|
|
||||||
from libs.external_api import ExternalApi
|
from libs.external_api import ExternalApi
|
||||||
|
|
||||||
from .app.app_import import AppImportApi, AppImportCheckDependenciesApi, AppImportConfirmApi
|
from .app.app_import import AppImportApi, AppImportConfirmApi
|
||||||
from .explore.audio import ChatAudioApi, ChatTextApi
|
|
||||||
from .explore.completion import ChatApi, ChatStopApi, CompletionApi, CompletionStopApi
|
|
||||||
from .explore.conversation import (
|
|
||||||
ConversationApi,
|
|
||||||
ConversationListApi,
|
|
||||||
ConversationPinApi,
|
|
||||||
ConversationRenameApi,
|
|
||||||
ConversationUnPinApi,
|
|
||||||
)
|
|
||||||
from .explore.message import (
|
|
||||||
MessageFeedbackApi,
|
|
||||||
MessageListApi,
|
|
||||||
MessageMoreLikeThisApi,
|
|
||||||
MessageSuggestedQuestionApi,
|
|
||||||
)
|
|
||||||
from .explore.workflow import (
|
|
||||||
InstalledAppWorkflowRunApi,
|
|
||||||
InstalledAppWorkflowTaskStopApi,
|
|
||||||
)
|
|
||||||
from .files import FileApi, FilePreviewApi, FileSupportTypeApi
|
from .files import FileApi, FilePreviewApi, FileSupportTypeApi
|
||||||
from .remote_files import RemoteFileInfoApi, RemoteFileUploadApi
|
from .remote_files import RemoteFileInfoApi, RemoteFileUploadApi
|
||||||
|
|
||||||
@@ -40,7 +21,6 @@ api.add_resource(RemoteFileUploadApi, "/remote-files/upload")
|
|||||||
# Import App
|
# Import App
|
||||||
api.add_resource(AppImportApi, "/apps/imports")
|
api.add_resource(AppImportApi, "/apps/imports")
|
||||||
api.add_resource(AppImportConfirmApi, "/apps/imports/<string:import_id>/confirm")
|
api.add_resource(AppImportConfirmApi, "/apps/imports/<string:import_id>/confirm")
|
||||||
api.add_resource(AppImportCheckDependenciesApi, "/apps/imports/<string:app_id>/check-dependencies")
|
|
||||||
|
|
||||||
# Import other controllers
|
# Import other controllers
|
||||||
from . import admin, apikey, extension, feature, ping, setup, version
|
from . import admin, apikey, extension, feature, ping, setup, version
|
||||||
@@ -86,96 +66,19 @@ from .datasets import (
|
|||||||
|
|
||||||
# Import explore controllers
|
# Import explore controllers
|
||||||
from .explore import (
|
from .explore import (
|
||||||
|
audio,
|
||||||
|
completion,
|
||||||
|
conversation,
|
||||||
installed_app,
|
installed_app,
|
||||||
|
message,
|
||||||
parameter,
|
parameter,
|
||||||
recommended_app,
|
recommended_app,
|
||||||
saved_message,
|
saved_message,
|
||||||
)
|
workflow,
|
||||||
|
|
||||||
# Explore Audio
|
|
||||||
api.add_resource(ChatAudioApi, "/installed-apps/<uuid:installed_app_id>/audio-to-text", endpoint="installed_app_audio")
|
|
||||||
api.add_resource(ChatTextApi, "/installed-apps/<uuid:installed_app_id>/text-to-audio", endpoint="installed_app_text")
|
|
||||||
|
|
||||||
# Explore Completion
|
|
||||||
api.add_resource(
|
|
||||||
CompletionApi, "/installed-apps/<uuid:installed_app_id>/completion-messages", endpoint="installed_app_completion"
|
|
||||||
)
|
|
||||||
api.add_resource(
|
|
||||||
CompletionStopApi,
|
|
||||||
"/installed-apps/<uuid:installed_app_id>/completion-messages/<string:task_id>/stop",
|
|
||||||
endpoint="installed_app_stop_completion",
|
|
||||||
)
|
|
||||||
api.add_resource(
|
|
||||||
ChatApi, "/installed-apps/<uuid:installed_app_id>/chat-messages", endpoint="installed_app_chat_completion"
|
|
||||||
)
|
|
||||||
api.add_resource(
|
|
||||||
ChatStopApi,
|
|
||||||
"/installed-apps/<uuid:installed_app_id>/chat-messages/<string:task_id>/stop",
|
|
||||||
endpoint="installed_app_stop_chat_completion",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Explore Conversation
|
|
||||||
api.add_resource(
|
|
||||||
ConversationRenameApi,
|
|
||||||
"/installed-apps/<uuid:installed_app_id>/conversations/<uuid:c_id>/name",
|
|
||||||
endpoint="installed_app_conversation_rename",
|
|
||||||
)
|
|
||||||
api.add_resource(
|
|
||||||
ConversationListApi, "/installed-apps/<uuid:installed_app_id>/conversations", endpoint="installed_app_conversations"
|
|
||||||
)
|
|
||||||
api.add_resource(
|
|
||||||
ConversationApi,
|
|
||||||
"/installed-apps/<uuid:installed_app_id>/conversations/<uuid:c_id>",
|
|
||||||
endpoint="installed_app_conversation",
|
|
||||||
)
|
|
||||||
api.add_resource(
|
|
||||||
ConversationPinApi,
|
|
||||||
"/installed-apps/<uuid:installed_app_id>/conversations/<uuid:c_id>/pin",
|
|
||||||
endpoint="installed_app_conversation_pin",
|
|
||||||
)
|
|
||||||
api.add_resource(
|
|
||||||
ConversationUnPinApi,
|
|
||||||
"/installed-apps/<uuid:installed_app_id>/conversations/<uuid:c_id>/unpin",
|
|
||||||
endpoint="installed_app_conversation_unpin",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# Explore Message
|
|
||||||
api.add_resource(MessageListApi, "/installed-apps/<uuid:installed_app_id>/messages", endpoint="installed_app_messages")
|
|
||||||
api.add_resource(
|
|
||||||
MessageFeedbackApi,
|
|
||||||
"/installed-apps/<uuid:installed_app_id>/messages/<uuid:message_id>/feedbacks",
|
|
||||||
endpoint="installed_app_message_feedback",
|
|
||||||
)
|
|
||||||
api.add_resource(
|
|
||||||
MessageMoreLikeThisApi,
|
|
||||||
"/installed-apps/<uuid:installed_app_id>/messages/<uuid:message_id>/more-like-this",
|
|
||||||
endpoint="installed_app_more_like_this",
|
|
||||||
)
|
|
||||||
api.add_resource(
|
|
||||||
MessageSuggestedQuestionApi,
|
|
||||||
"/installed-apps/<uuid:installed_app_id>/messages/<uuid:message_id>/suggested-questions",
|
|
||||||
endpoint="installed_app_suggested_question",
|
|
||||||
)
|
|
||||||
# Explore Workflow
|
|
||||||
api.add_resource(InstalledAppWorkflowRunApi, "/installed-apps/<uuid:installed_app_id>/workflows/run")
|
|
||||||
api.add_resource(
|
|
||||||
InstalledAppWorkflowTaskStopApi, "/installed-apps/<uuid:installed_app_id>/workflows/tasks/<string:task_id>/stop"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Import tag controllers
|
# Import tag controllers
|
||||||
from .tag import tags
|
from .tag import tags
|
||||||
|
|
||||||
# Import workspace controllers
|
# Import workspace controllers
|
||||||
from .workspace import (
|
from .workspace import account, load_balancing_config, members, model_providers, models, tool_providers, workspace
|
||||||
account,
|
|
||||||
agent_providers,
|
|
||||||
endpoint,
|
|
||||||
load_balancing_config,
|
|
||||||
members,
|
|
||||||
model_providers,
|
|
||||||
models,
|
|
||||||
plugin,
|
|
||||||
tool_providers,
|
|
||||||
workspace,
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -1,9 +1,7 @@
|
|||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
from sqlalchemy import select
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
from werkzeug.exceptions import NotFound, Unauthorized
|
from werkzeug.exceptions import NotFound, Unauthorized
|
||||||
|
|
||||||
from configs import dify_config
|
from configs import dify_config
|
||||||
@@ -33,7 +31,7 @@ def admin_required(view):
|
|||||||
if auth_scheme != "bearer":
|
if auth_scheme != "bearer":
|
||||||
raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
|
raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
|
||||||
|
|
||||||
if auth_token != dify_config.ADMIN_API_KEY:
|
if dify_config.ADMIN_API_KEY != auth_token:
|
||||||
raise Unauthorized("API key is invalid.")
|
raise Unauthorized("API key is invalid.")
|
||||||
|
|
||||||
return view(*args, **kwargs)
|
return view(*args, **kwargs)
|
||||||
@@ -56,10 +54,9 @@ class InsertExploreAppListApi(Resource):
|
|||||||
parser.add_argument("position", type=int, required=True, nullable=False, location="json")
|
parser.add_argument("position", type=int, required=True, nullable=False, location="json")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
with Session(db.engine) as session:
|
app = App.query.filter(App.id == args["app_id"]).first()
|
||||||
app = session.execute(select(App).filter(App.id == args["app_id"])).scalar_one_or_none()
|
|
||||||
if not app:
|
if not app:
|
||||||
raise NotFound(f"App '{args['app_id']}' is not found")
|
raise NotFound(f'App \'{args["app_id"]}\' is not found')
|
||||||
|
|
||||||
site = app.site
|
site = app.site
|
||||||
if not site:
|
if not site:
|
||||||
@@ -73,10 +70,7 @@ class InsertExploreAppListApi(Resource):
|
|||||||
privacy_policy = site.privacy_policy or args["privacy_policy"] or ""
|
privacy_policy = site.privacy_policy or args["privacy_policy"] or ""
|
||||||
custom_disclaimer = site.custom_disclaimer or args["custom_disclaimer"] or ""
|
custom_disclaimer = site.custom_disclaimer or args["custom_disclaimer"] or ""
|
||||||
|
|
||||||
with Session(db.engine) as session:
|
recommended_app = RecommendedApp.query.filter(RecommendedApp.app_id == args["app_id"]).first()
|
||||||
recommended_app = session.execute(
|
|
||||||
select(RecommendedApp).filter(RecommendedApp.app_id == args["app_id"])
|
|
||||||
).scalar_one_or_none()
|
|
||||||
|
|
||||||
if not recommended_app:
|
if not recommended_app:
|
||||||
recommended_app = RecommendedApp(
|
recommended_app = RecommendedApp(
|
||||||
@@ -116,27 +110,17 @@ class InsertExploreAppApi(Resource):
|
|||||||
@only_edition_cloud
|
@only_edition_cloud
|
||||||
@admin_required
|
@admin_required
|
||||||
def delete(self, app_id):
|
def delete(self, app_id):
|
||||||
with Session(db.engine) as session:
|
recommended_app = RecommendedApp.query.filter(RecommendedApp.app_id == str(app_id)).first()
|
||||||
recommended_app = session.execute(
|
|
||||||
select(RecommendedApp).filter(RecommendedApp.app_id == str(app_id))
|
|
||||||
).scalar_one_or_none()
|
|
||||||
|
|
||||||
if not recommended_app:
|
if not recommended_app:
|
||||||
return {"result": "success"}, 204
|
return {"result": "success"}, 204
|
||||||
|
|
||||||
with Session(db.engine) as session:
|
app = App.query.filter(App.id == recommended_app.app_id).first()
|
||||||
app = session.execute(select(App).filter(App.id == recommended_app.app_id)).scalar_one_or_none()
|
|
||||||
|
|
||||||
if app:
|
if app:
|
||||||
app.is_public = False
|
app.is_public = False
|
||||||
|
|
||||||
with Session(db.engine) as session:
|
installed_apps = InstalledApp.query.filter(
|
||||||
installed_apps = session.execute(
|
InstalledApp.app_id == recommended_app.app_id, InstalledApp.tenant_id != InstalledApp.app_owner_tenant_id
|
||||||
select(InstalledApp).filter(
|
).all()
|
||||||
InstalledApp.app_id == recommended_app.app_id,
|
|
||||||
InstalledApp.tenant_id != InstalledApp.app_owner_tenant_id,
|
|
||||||
)
|
|
||||||
).all()
|
|
||||||
|
|
||||||
for installed_app in installed_apps:
|
for installed_app in installed_apps:
|
||||||
db.session.delete(installed_app)
|
db.session.delete(installed_app)
|
||||||
|
|||||||
@@ -1,10 +1,6 @@
|
|||||||
from typing import Any
|
import flask_restful
|
||||||
|
from flask_login import current_user
|
||||||
import flask_restful # type: ignore
|
|
||||||
from flask_login import current_user # type: ignore
|
|
||||||
from flask_restful import Resource, fields, marshal_with
|
from flask_restful import Resource, fields, marshal_with
|
||||||
from sqlalchemy import select
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
from werkzeug.exceptions import Forbidden
|
from werkzeug.exceptions import Forbidden
|
||||||
|
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
@@ -28,16 +24,7 @@ api_key_list = {"data": fields.List(fields.Nested(api_key_fields), attribute="it
|
|||||||
|
|
||||||
|
|
||||||
def _get_resource(resource_id, tenant_id, resource_model):
|
def _get_resource(resource_id, tenant_id, resource_model):
|
||||||
if resource_model == App:
|
resource = resource_model.query.filter_by(id=resource_id, tenant_id=tenant_id).first()
|
||||||
with Session(db.engine) as session:
|
|
||||||
resource = session.execute(
|
|
||||||
select(resource_model).filter_by(id=resource_id, tenant_id=tenant_id)
|
|
||||||
).scalar_one_or_none()
|
|
||||||
else:
|
|
||||||
with Session(db.engine) as session:
|
|
||||||
resource = session.execute(
|
|
||||||
select(resource_model).filter_by(id=resource_id, tenant_id=tenant_id)
|
|
||||||
).scalar_one_or_none()
|
|
||||||
|
|
||||||
if resource is None:
|
if resource is None:
|
||||||
flask_restful.abort(404, message=f"{resource_model.__name__} not found.")
|
flask_restful.abort(404, message=f"{resource_model.__name__} not found.")
|
||||||
@@ -48,15 +35,14 @@ def _get_resource(resource_id, tenant_id, resource_model):
|
|||||||
class BaseApiKeyListResource(Resource):
|
class BaseApiKeyListResource(Resource):
|
||||||
method_decorators = [account_initialization_required, login_required, setup_required]
|
method_decorators = [account_initialization_required, login_required, setup_required]
|
||||||
|
|
||||||
resource_type: str | None = None
|
resource_type = None
|
||||||
resource_model: Any = None
|
resource_model = None
|
||||||
resource_id_field: str | None = None
|
resource_id_field = None
|
||||||
token_prefix: str | None = None
|
token_prefix = None
|
||||||
max_keys = 10
|
max_keys = 10
|
||||||
|
|
||||||
@marshal_with(api_key_list)
|
@marshal_with(api_key_list)
|
||||||
def get(self, resource_id):
|
def get(self, resource_id):
|
||||||
assert self.resource_id_field is not None, "resource_id_field must be set"
|
|
||||||
resource_id = str(resource_id)
|
resource_id = str(resource_id)
|
||||||
_get_resource(resource_id, current_user.current_tenant_id, self.resource_model)
|
_get_resource(resource_id, current_user.current_tenant_id, self.resource_model)
|
||||||
keys = (
|
keys = (
|
||||||
@@ -68,7 +54,6 @@ class BaseApiKeyListResource(Resource):
|
|||||||
|
|
||||||
@marshal_with(api_key_fields)
|
@marshal_with(api_key_fields)
|
||||||
def post(self, resource_id):
|
def post(self, resource_id):
|
||||||
assert self.resource_id_field is not None, "resource_id_field must be set"
|
|
||||||
resource_id = str(resource_id)
|
resource_id = str(resource_id)
|
||||||
_get_resource(resource_id, current_user.current_tenant_id, self.resource_model)
|
_get_resource(resource_id, current_user.current_tenant_id, self.resource_model)
|
||||||
if not current_user.is_editor:
|
if not current_user.is_editor:
|
||||||
@@ -101,12 +86,11 @@ class BaseApiKeyListResource(Resource):
|
|||||||
class BaseApiKeyResource(Resource):
|
class BaseApiKeyResource(Resource):
|
||||||
method_decorators = [account_initialization_required, login_required, setup_required]
|
method_decorators = [account_initialization_required, login_required, setup_required]
|
||||||
|
|
||||||
resource_type: str | None = None
|
resource_type = None
|
||||||
resource_model: Any = None
|
resource_model = None
|
||||||
resource_id_field: str | None = None
|
resource_id_field = None
|
||||||
|
|
||||||
def delete(self, resource_id, api_key_id):
|
def delete(self, resource_id, api_key_id):
|
||||||
assert self.resource_id_field is not None, "resource_id_field must be set"
|
|
||||||
resource_id = str(resource_id)
|
resource_id = str(resource_id)
|
||||||
api_key_id = str(api_key_id)
|
api_key_id = str(api_key_id)
|
||||||
_get_resource(resource_id, current_user.current_tenant_id, self.resource_model)
|
_get_resource(resource_id, current_user.current_tenant_id, self.resource_model)
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.wraps import account_initialization_required, setup_required
|
from controllers.console.wraps import account_initialization_required, setup_required
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.app.wraps import get_app_model
|
from controllers.console.app.wraps import get_app_model
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
from flask import request
|
from flask import request
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, marshal, marshal_with, reqparse # type: ignore
|
from flask_restful import Resource, marshal, marshal_with, reqparse
|
||||||
from werkzeug.exceptions import Forbidden
|
from werkzeug.exceptions import Forbidden
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
@@ -110,7 +110,7 @@ class AnnotationListApi(Resource):
|
|||||||
|
|
||||||
page = request.args.get("page", default=1, type=int)
|
page = request.args.get("page", default=1, type=int)
|
||||||
limit = request.args.get("limit", default=20, type=int)
|
limit = request.args.get("limit", default=20, type=int)
|
||||||
keyword = request.args.get("keyword", default="", type=str)
|
keyword = request.args.get("keyword", default=None, type=str)
|
||||||
|
|
||||||
app_id = str(app_id)
|
app_id = str(app_id)
|
||||||
annotation_list, total = AppAnnotationService.get_annotation_list_by_app_id(app_id, page, limit, keyword)
|
annotation_list, total = AppAnnotationService.get_annotation_list_by_app_id(app_id, page, limit, keyword)
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
import uuid
|
import uuid
|
||||||
from typing import cast
|
from typing import cast
|
||||||
|
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, inputs, marshal, marshal_with, reqparse # type: ignore
|
from flask_restful import Resource, inputs, marshal, marshal_with, reqparse
|
||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
from werkzeug.exceptions import BadRequest, Forbidden, abort
|
from werkzeug.exceptions import BadRequest, Forbidden, abort
|
||||||
@@ -57,13 +57,12 @@ class AppListApi(Resource):
|
|||||||
)
|
)
|
||||||
parser.add_argument("name", type=str, location="args", required=False)
|
parser.add_argument("name", type=str, location="args", required=False)
|
||||||
parser.add_argument("tag_ids", type=uuid_list, location="args", required=False)
|
parser.add_argument("tag_ids", type=uuid_list, location="args", required=False)
|
||||||
parser.add_argument("is_created_by_me", type=inputs.boolean, location="args", required=False)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
# get app list
|
# get app list
|
||||||
app_service = AppService()
|
app_service = AppService()
|
||||||
app_pagination = app_service.get_paginate_apps(current_user.id, current_user.current_tenant_id, args)
|
app_pagination = app_service.get_paginate_apps(current_user.current_tenant_id, args)
|
||||||
if not app_pagination:
|
if not app_pagination:
|
||||||
return {"data": [], "total": 0, "page": 1, "limit": 20, "has_more": False}
|
return {"data": [], "total": 0, "page": 1, "limit": 20, "has_more": False}
|
||||||
|
|
||||||
|
|||||||
@@ -1,20 +1,18 @@
|
|||||||
from typing import cast
|
from typing import cast
|
||||||
|
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, marshal_with, reqparse # type: ignore
|
from flask_restful import Resource, marshal_with, reqparse
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
from werkzeug.exceptions import Forbidden
|
from werkzeug.exceptions import Forbidden
|
||||||
|
|
||||||
from controllers.console.app.wraps import get_app_model
|
|
||||||
from controllers.console.wraps import (
|
from controllers.console.wraps import (
|
||||||
account_initialization_required,
|
account_initialization_required,
|
||||||
setup_required,
|
setup_required,
|
||||||
)
|
)
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
from fields.app_fields import app_import_check_dependencies_fields, app_import_fields
|
from fields.app_fields import app_import_fields
|
||||||
from libs.login import login_required
|
from libs.login import login_required
|
||||||
from models import Account
|
from models import Account
|
||||||
from models.model import App
|
|
||||||
from services.app_dsl_service import AppDslService, ImportStatus
|
from services.app_dsl_service import AppDslService, ImportStatus
|
||||||
|
|
||||||
|
|
||||||
@@ -90,20 +88,3 @@ class AppImportConfirmApi(Resource):
|
|||||||
if result.status == ImportStatus.FAILED.value:
|
if result.status == ImportStatus.FAILED.value:
|
||||||
return result.model_dump(mode="json"), 400
|
return result.model_dump(mode="json"), 400
|
||||||
return result.model_dump(mode="json"), 200
|
return result.model_dump(mode="json"), 200
|
||||||
|
|
||||||
|
|
||||||
class AppImportCheckDependenciesApi(Resource):
|
|
||||||
@setup_required
|
|
||||||
@login_required
|
|
||||||
@get_app_model
|
|
||||||
@account_initialization_required
|
|
||||||
@marshal_with(app_import_check_dependencies_fields)
|
|
||||||
def get(self, app_model: App):
|
|
||||||
if not current_user.is_editor:
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
with Session(db.engine) as session:
|
|
||||||
import_service = AppDslService(session)
|
|
||||||
result = import_service.check_dependencies(app_model=app_model)
|
|
||||||
|
|
||||||
return result.model_dump(mode="json"), 200
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
from werkzeug.exceptions import InternalServerError
|
from werkzeug.exceptions import InternalServerError
|
||||||
|
|
||||||
import services
|
import services
|
||||||
@@ -22,7 +22,7 @@ from controllers.console.wraps import account_initialization_required, setup_req
|
|||||||
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
|
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
|
||||||
from core.model_runtime.errors.invoke import InvokeError
|
from core.model_runtime.errors.invoke import InvokeError
|
||||||
from libs.login import login_required
|
from libs.login import login_required
|
||||||
from models import App, AppMode
|
from models.model import AppMode
|
||||||
from services.audio_service import AudioService
|
from services.audio_service import AudioService
|
||||||
from services.errors.audio import (
|
from services.errors.audio import (
|
||||||
AudioTooLargeServiceError,
|
AudioTooLargeServiceError,
|
||||||
@@ -79,7 +79,7 @@ class ChatMessageTextApi(Resource):
|
|||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@get_app_model
|
@get_app_model
|
||||||
def post(self, app_model: App):
|
def post(self, app_model):
|
||||||
from werkzeug.exceptions import InternalServerError
|
from werkzeug.exceptions import InternalServerError
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -98,13 +98,9 @@ class ChatMessageTextApi(Resource):
|
|||||||
and app_model.workflow.features_dict
|
and app_model.workflow.features_dict
|
||||||
):
|
):
|
||||||
text_to_speech = app_model.workflow.features_dict.get("text_to_speech")
|
text_to_speech = app_model.workflow.features_dict.get("text_to_speech")
|
||||||
if text_to_speech is None:
|
|
||||||
raise ValueError("TTS is not enabled")
|
|
||||||
voice = args.get("voice") or text_to_speech.get("voice")
|
voice = args.get("voice") or text_to_speech.get("voice")
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
if app_model.app_model_config is None:
|
|
||||||
raise ValueError("AppModelConfig not found")
|
|
||||||
voice = args.get("voice") or app_model.app_model_config.text_to_speech_dict.get("voice")
|
voice = args.get("voice") or app_model.app_model_config.text_to_speech_dict.get("voice")
|
||||||
except Exception:
|
except Exception:
|
||||||
voice = None
|
voice = None
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
import flask_login # type: ignore
|
import flask_login
|
||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
from werkzeug.exceptions import InternalServerError, NotFound
|
from werkzeug.exceptions import InternalServerError, NotFound
|
||||||
|
|
||||||
import services
|
import services
|
||||||
@@ -20,6 +20,7 @@ from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpErr
|
|||||||
from core.app.apps.base_app_queue_manager import AppQueueManager
|
from core.app.apps.base_app_queue_manager import AppQueueManager
|
||||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||||
from core.errors.error import (
|
from core.errors.error import (
|
||||||
|
AppInvokeQuotaExceededError,
|
||||||
ModelCurrentlyNotSupportError,
|
ModelCurrentlyNotSupportError,
|
||||||
ProviderTokenNotInitError,
|
ProviderTokenNotInitError,
|
||||||
QuotaExceededError,
|
QuotaExceededError,
|
||||||
@@ -75,7 +76,7 @@ class CompletionMessageApi(Resource):
|
|||||||
raise ProviderModelCurrentlyNotSupportError()
|
raise ProviderModelCurrentlyNotSupportError()
|
||||||
except InvokeError as e:
|
except InvokeError as e:
|
||||||
raise CompletionRequestError(e.description)
|
raise CompletionRequestError(e.description)
|
||||||
except ValueError as e:
|
except (ValueError, AppInvokeQuotaExceededError) as e:
|
||||||
raise e
|
raise e
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.exception("internal server error.")
|
logging.exception("internal server error.")
|
||||||
@@ -140,7 +141,7 @@ class ChatMessageApi(Resource):
|
|||||||
raise InvokeRateLimitHttpError(ex.description)
|
raise InvokeRateLimitHttpError(ex.description)
|
||||||
except InvokeError as e:
|
except InvokeError as e:
|
||||||
raise CompletionRequestError(e.description)
|
raise CompletionRequestError(e.description)
|
||||||
except ValueError as e:
|
except (ValueError, AppInvokeQuotaExceededError) as e:
|
||||||
raise e
|
raise e
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.exception("internal server error.")
|
logging.exception("internal server error.")
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
from datetime import UTC, datetime
|
from datetime import UTC, datetime
|
||||||
|
|
||||||
import pytz # pip install pytz
|
import pytz
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, marshal_with, reqparse # type: ignore
|
from flask_restful import Resource, marshal_with, reqparse
|
||||||
from flask_restful.inputs import int_range # type: ignore
|
from flask_restful.inputs import int_range
|
||||||
from sqlalchemy import func, or_
|
from sqlalchemy import func, or_
|
||||||
from sqlalchemy.orm import joinedload
|
from sqlalchemy.orm import joinedload
|
||||||
from werkzeug.exceptions import Forbidden, NotFound
|
from werkzeug.exceptions import Forbidden, NotFound
|
||||||
@@ -77,9 +77,8 @@ class CompletionConversationApi(Resource):
|
|||||||
|
|
||||||
query = query.where(Conversation.created_at < end_datetime_utc)
|
query = query.where(Conversation.created_at < end_datetime_utc)
|
||||||
|
|
||||||
# FIXME, the type ignore in this file
|
|
||||||
if args["annotation_status"] == "annotated":
|
if args["annotation_status"] == "annotated":
|
||||||
query = query.options(joinedload(Conversation.message_annotations)).join( # type: ignore
|
query = query.options(joinedload(Conversation.message_annotations)).join(
|
||||||
MessageAnnotation, MessageAnnotation.conversation_id == Conversation.id
|
MessageAnnotation, MessageAnnotation.conversation_id == Conversation.id
|
||||||
)
|
)
|
||||||
elif args["annotation_status"] == "not_annotated":
|
elif args["annotation_status"] == "not_annotated":
|
||||||
@@ -223,7 +222,7 @@ class ChatConversationApi(Resource):
|
|||||||
query = query.where(Conversation.created_at <= end_datetime_utc)
|
query = query.where(Conversation.created_at <= end_datetime_utc)
|
||||||
|
|
||||||
if args["annotation_status"] == "annotated":
|
if args["annotation_status"] == "annotated":
|
||||||
query = query.options(joinedload(Conversation.message_annotations)).join( # type: ignore
|
query = query.options(joinedload(Conversation.message_annotations)).join(
|
||||||
MessageAnnotation, MessageAnnotation.conversation_id == Conversation.id
|
MessageAnnotation, MessageAnnotation.conversation_id == Conversation.id
|
||||||
)
|
)
|
||||||
elif args["annotation_status"] == "not_annotated":
|
elif args["annotation_status"] == "not_annotated":
|
||||||
@@ -235,7 +234,7 @@ class ChatConversationApi(Resource):
|
|||||||
|
|
||||||
if args["message_count_gte"] and args["message_count_gte"] >= 1:
|
if args["message_count_gte"] and args["message_count_gte"] >= 1:
|
||||||
query = (
|
query = (
|
||||||
query.options(joinedload(Conversation.messages)) # type: ignore
|
query.options(joinedload(Conversation.messages))
|
||||||
.join(Message, Message.conversation_id == Conversation.id)
|
.join(Message, Message.conversation_id == Conversation.id)
|
||||||
.group_by(Conversation.id)
|
.group_by(Conversation.id)
|
||||||
.having(func.count(Message.id) >= args["message_count_gte"])
|
.having(func.count(Message.id) >= args["message_count_gte"])
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from flask_restful import Resource, marshal_with, reqparse # type: ignore
|
from flask_restful import Resource, marshal_with, reqparse
|
||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.app.error import (
|
from controllers.console.app.error import (
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, fields, marshal_with, reqparse # type: ignore
|
from flask_restful import Resource, fields, marshal_with, reqparse
|
||||||
from flask_restful.inputs import int_range # type: ignore
|
from flask_restful.inputs import int_range
|
||||||
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
|
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
import json
|
import json
|
||||||
from typing import cast
|
|
||||||
|
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource # type: ignore
|
from flask_restful import Resource
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.app.wraps import get_app_model
|
from controllers.console.app.wraps import get_app_model
|
||||||
@@ -27,9 +26,7 @@ class ModelConfigResource(Resource):
|
|||||||
"""Modify app model config"""
|
"""Modify app model config"""
|
||||||
# validate config
|
# validate config
|
||||||
model_configuration = AppModelConfigService.validate_configuration(
|
model_configuration = AppModelConfigService.validate_configuration(
|
||||||
tenant_id=current_user.current_tenant_id,
|
tenant_id=current_user.current_tenant_id, config=request.json, app_mode=AppMode.value_of(app_model.mode)
|
||||||
config=cast(dict, request.json),
|
|
||||||
app_mode=AppMode.value_of(app_model.mode),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
new_app_model_config = AppModelConfig(
|
new_app_model_config = AppModelConfig(
|
||||||
@@ -41,11 +38,9 @@ class ModelConfigResource(Resource):
|
|||||||
|
|
||||||
if app_model.mode == AppMode.AGENT_CHAT.value or app_model.is_agent:
|
if app_model.mode == AppMode.AGENT_CHAT.value or app_model.is_agent:
|
||||||
# get original app model config
|
# get original app model config
|
||||||
original_app_model_config = (
|
original_app_model_config: AppModelConfig = (
|
||||||
db.session.query(AppModelConfig).filter(AppModelConfig.id == app_model.app_model_config_id).first()
|
db.session.query(AppModelConfig).filter(AppModelConfig.id == app_model.app_model_config_id).first()
|
||||||
)
|
)
|
||||||
if original_app_model_config is None:
|
|
||||||
raise ValueError("Original app model config not found")
|
|
||||||
agent_mode = original_app_model_config.agent_mode_dict
|
agent_mode = original_app_model_config.agent_mode_dict
|
||||||
# decrypt agent tool parameters if it's secret-input
|
# decrypt agent tool parameters if it's secret-input
|
||||||
parameter_map = {}
|
parameter_map = {}
|
||||||
@@ -70,7 +65,7 @@ class ModelConfigResource(Resource):
|
|||||||
provider_type=agent_tool_entity.provider_type,
|
provider_type=agent_tool_entity.provider_type,
|
||||||
identity_id=f"AGENT.{app_model.id}",
|
identity_id=f"AGENT.{app_model.id}",
|
||||||
)
|
)
|
||||||
except Exception:
|
except Exception as e:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# get decrypted parameters
|
# get decrypted parameters
|
||||||
@@ -102,7 +97,7 @@ class ModelConfigResource(Resource):
|
|||||||
app_id=app_model.id,
|
app_id=app_model.id,
|
||||||
agent_tool=agent_tool_entity,
|
agent_tool=agent_tool_entity,
|
||||||
)
|
)
|
||||||
except Exception:
|
except Exception as e:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
manager = ToolParameterConfigurationManager(
|
manager = ToolParameterConfigurationManager(
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
from werkzeug.exceptions import BadRequest
|
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.app.error import TracingConfigCheckError, TracingConfigIsExist, TracingConfigNotExist
|
from controllers.console.app.error import TracingConfigCheckError, TracingConfigIsExist, TracingConfigNotExist
|
||||||
@@ -27,7 +26,7 @@ class TraceAppConfigApi(Resource):
|
|||||||
return {"has_not_configured": True}
|
return {"has_not_configured": True}
|
||||||
return trace_config
|
return trace_config
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise BadRequest(str(e))
|
raise e
|
||||||
|
|
||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@@ -49,7 +48,7 @@ class TraceAppConfigApi(Resource):
|
|||||||
raise TracingConfigCheckError()
|
raise TracingConfigCheckError()
|
||||||
return result
|
return result
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise BadRequest(str(e))
|
raise e
|
||||||
|
|
||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@@ -69,7 +68,7 @@ class TraceAppConfigApi(Resource):
|
|||||||
raise TracingConfigNotExist()
|
raise TracingConfigNotExist()
|
||||||
return {"result": "success"}
|
return {"result": "success"}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise BadRequest(str(e))
|
raise e
|
||||||
|
|
||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@@ -86,7 +85,7 @@ class TraceAppConfigApi(Resource):
|
|||||||
raise TracingConfigNotExist()
|
raise TracingConfigNotExist()
|
||||||
return {"result": "success"}
|
return {"result": "success"}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise BadRequest(str(e))
|
raise e
|
||||||
|
|
||||||
|
|
||||||
api.add_resource(TraceAppConfigApi, "/apps/<uuid:app_id>/trace-config")
|
api.add_resource(TraceAppConfigApi, "/apps/<uuid:app_id>/trace-config")
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
from datetime import UTC, datetime
|
from datetime import UTC, datetime
|
||||||
|
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, marshal_with, reqparse # type: ignore
|
from flask_restful import Resource, marshal_with, reqparse
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
from werkzeug.exceptions import Forbidden, NotFound
|
from werkzeug.exceptions import Forbidden, NotFound
|
||||||
|
|
||||||
from constants.languages import supported_language
|
from constants.languages import supported_language
|
||||||
@@ -51,37 +50,33 @@ class AppSite(Resource):
|
|||||||
if not current_user.is_editor:
|
if not current_user.is_editor:
|
||||||
raise Forbidden()
|
raise Forbidden()
|
||||||
|
|
||||||
with Session(db.engine) as session:
|
site = db.session.query(Site).filter(Site.app_id == app_model.id).one_or_404()
|
||||||
site = session.query(Site).filter(Site.app_id == app_model.id).first()
|
|
||||||
|
|
||||||
if not site:
|
for attr_name in [
|
||||||
raise NotFound
|
"title",
|
||||||
|
"icon_type",
|
||||||
|
"icon",
|
||||||
|
"icon_background",
|
||||||
|
"description",
|
||||||
|
"default_language",
|
||||||
|
"chat_color_theme",
|
||||||
|
"chat_color_theme_inverted",
|
||||||
|
"customize_domain",
|
||||||
|
"copyright",
|
||||||
|
"privacy_policy",
|
||||||
|
"custom_disclaimer",
|
||||||
|
"customize_token_strategy",
|
||||||
|
"prompt_public",
|
||||||
|
"show_workflow_steps",
|
||||||
|
"use_icon_as_answer_icon",
|
||||||
|
]:
|
||||||
|
value = args.get(attr_name)
|
||||||
|
if value is not None:
|
||||||
|
setattr(site, attr_name, value)
|
||||||
|
|
||||||
for attr_name in [
|
site.updated_by = current_user.id
|
||||||
"title",
|
site.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
"icon_type",
|
db.session.commit()
|
||||||
"icon",
|
|
||||||
"icon_background",
|
|
||||||
"description",
|
|
||||||
"default_language",
|
|
||||||
"chat_color_theme",
|
|
||||||
"chat_color_theme_inverted",
|
|
||||||
"customize_domain",
|
|
||||||
"copyright",
|
|
||||||
"privacy_policy",
|
|
||||||
"custom_disclaimer",
|
|
||||||
"customize_token_strategy",
|
|
||||||
"prompt_public",
|
|
||||||
"show_workflow_steps",
|
|
||||||
"use_icon_as_answer_icon",
|
|
||||||
]:
|
|
||||||
value = args.get(attr_name)
|
|
||||||
if value is not None:
|
|
||||||
setattr(site, attr_name, value)
|
|
||||||
|
|
||||||
site.updated_by = current_user.id
|
|
||||||
site.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
|
||||||
session.commit()
|
|
||||||
|
|
||||||
return site
|
return site
|
||||||
|
|
||||||
|
|||||||
@@ -3,8 +3,8 @@ from decimal import Decimal
|
|||||||
|
|
||||||
import pytz
|
import pytz
|
||||||
from flask import jsonify
|
from flask import jsonify
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.app.wraps import get_app_model
|
from controllers.console.app.wraps import get_app_model
|
||||||
@@ -273,7 +273,8 @@ FROM
|
|||||||
messages m
|
messages m
|
||||||
ON c.id = m.conversation_id
|
ON c.id = m.conversation_id
|
||||||
WHERE
|
WHERE
|
||||||
c.app_id = :app_id"""
|
c.override_model_configs IS NULL
|
||||||
|
AND c.app_id = :app_id"""
|
||||||
arg_dict = {"tz": account.timezone, "app_id": app_model.id}
|
arg_dict = {"tz": account.timezone, "app_id": app_model.id}
|
||||||
|
|
||||||
timezone = pytz.timezone(account.timezone)
|
timezone = pytz.timezone(account.timezone)
|
||||||
|
|||||||
@@ -2,11 +2,10 @@ import json
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from flask import abort, request
|
from flask import abort, request
|
||||||
from flask_restful import Resource, inputs, marshal_with, reqparse # type: ignore
|
from flask_restful import Resource, marshal_with, reqparse
|
||||||
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
|
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
|
||||||
|
|
||||||
import services
|
import services
|
||||||
from configs import dify_config
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.app.error import ConversationCompletedError, DraftWorkflowNotExist, DraftWorkflowNotSync
|
from controllers.console.app.error import ConversationCompletedError, DraftWorkflowNotExist, DraftWorkflowNotSync
|
||||||
from controllers.console.app.wraps import get_app_model
|
from controllers.console.app.wraps import get_app_model
|
||||||
@@ -14,13 +13,12 @@ from controllers.console.wraps import account_initialization_required, setup_req
|
|||||||
from core.app.apps.base_app_queue_manager import AppQueueManager
|
from core.app.apps.base_app_queue_manager import AppQueueManager
|
||||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||||
from factories import variable_factory
|
from factories import variable_factory
|
||||||
from fields.workflow_fields import workflow_fields, workflow_pagination_fields
|
from fields.workflow_fields import workflow_fields
|
||||||
from fields.workflow_run_fields import workflow_run_node_execution_fields
|
from fields.workflow_run_fields import workflow_run_node_execution_fields
|
||||||
from libs import helper
|
from libs import helper
|
||||||
from libs.helper import TimestampField, uuid_value
|
from libs.helper import TimestampField, uuid_value
|
||||||
from libs.login import current_user, login_required
|
from libs.login import current_user, login_required
|
||||||
from models import App
|
from models import App
|
||||||
from models.account import Account
|
|
||||||
from models.model import AppMode
|
from models.model import AppMode
|
||||||
from services.app_generate_service import AppGenerateService
|
from services.app_generate_service import AppGenerateService
|
||||||
from services.errors.app import WorkflowHashNotEqualError
|
from services.errors.app import WorkflowHashNotEqualError
|
||||||
@@ -97,19 +95,16 @@ class DraftWorkflowApi(Resource):
|
|||||||
else:
|
else:
|
||||||
abort(415)
|
abort(415)
|
||||||
|
|
||||||
if not isinstance(current_user, Account):
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
workflow_service = WorkflowService()
|
workflow_service = WorkflowService()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
environment_variables_list = args.get("environment_variables") or []
|
environment_variables_list = args.get("environment_variables") or []
|
||||||
environment_variables = [
|
environment_variables = [
|
||||||
variable_factory.build_environment_variable_from_mapping(obj) for obj in environment_variables_list
|
variable_factory.build_variable_from_mapping(obj) for obj in environment_variables_list
|
||||||
]
|
]
|
||||||
conversation_variables_list = args.get("conversation_variables") or []
|
conversation_variables_list = args.get("conversation_variables") or []
|
||||||
conversation_variables = [
|
conversation_variables = [
|
||||||
variable_factory.build_conversation_variable_from_mapping(obj) for obj in conversation_variables_list
|
variable_factory.build_variable_from_mapping(obj) for obj in conversation_variables_list
|
||||||
]
|
]
|
||||||
workflow = workflow_service.sync_draft_workflow(
|
workflow = workflow_service.sync_draft_workflow(
|
||||||
app_model=app_model,
|
app_model=app_model,
|
||||||
@@ -143,9 +138,6 @@ class AdvancedChatDraftWorkflowRunApi(Resource):
|
|||||||
if not current_user.is_editor:
|
if not current_user.is_editor:
|
||||||
raise Forbidden()
|
raise Forbidden()
|
||||||
|
|
||||||
if not isinstance(current_user, Account):
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
parser = reqparse.RequestParser()
|
parser = reqparse.RequestParser()
|
||||||
parser.add_argument("inputs", type=dict, location="json")
|
parser.add_argument("inputs", type=dict, location="json")
|
||||||
parser.add_argument("query", type=str, required=True, location="json", default="")
|
parser.add_argument("query", type=str, required=True, location="json", default="")
|
||||||
@@ -167,7 +159,7 @@ class AdvancedChatDraftWorkflowRunApi(Resource):
|
|||||||
raise ConversationCompletedError()
|
raise ConversationCompletedError()
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise e
|
raise e
|
||||||
except Exception:
|
except Exception as e:
|
||||||
logging.exception("internal server error.")
|
logging.exception("internal server error.")
|
||||||
raise InternalServerError()
|
raise InternalServerError()
|
||||||
|
|
||||||
@@ -185,9 +177,6 @@ class AdvancedChatDraftRunIterationNodeApi(Resource):
|
|||||||
if not current_user.is_editor:
|
if not current_user.is_editor:
|
||||||
raise Forbidden()
|
raise Forbidden()
|
||||||
|
|
||||||
if not isinstance(current_user, Account):
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
parser = reqparse.RequestParser()
|
parser = reqparse.RequestParser()
|
||||||
parser.add_argument("inputs", type=dict, location="json")
|
parser.add_argument("inputs", type=dict, location="json")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
@@ -204,7 +193,7 @@ class AdvancedChatDraftRunIterationNodeApi(Resource):
|
|||||||
raise ConversationCompletedError()
|
raise ConversationCompletedError()
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise e
|
raise e
|
||||||
except Exception:
|
except Exception as e:
|
||||||
logging.exception("internal server error.")
|
logging.exception("internal server error.")
|
||||||
raise InternalServerError()
|
raise InternalServerError()
|
||||||
|
|
||||||
@@ -222,9 +211,6 @@ class WorkflowDraftRunIterationNodeApi(Resource):
|
|||||||
if not current_user.is_editor:
|
if not current_user.is_editor:
|
||||||
raise Forbidden()
|
raise Forbidden()
|
||||||
|
|
||||||
if not isinstance(current_user, Account):
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
parser = reqparse.RequestParser()
|
parser = reqparse.RequestParser()
|
||||||
parser.add_argument("inputs", type=dict, location="json")
|
parser.add_argument("inputs", type=dict, location="json")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
@@ -241,7 +227,7 @@ class WorkflowDraftRunIterationNodeApi(Resource):
|
|||||||
raise ConversationCompletedError()
|
raise ConversationCompletedError()
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise e
|
raise e
|
||||||
except Exception:
|
except Exception as e:
|
||||||
logging.exception("internal server error.")
|
logging.exception("internal server error.")
|
||||||
raise InternalServerError()
|
raise InternalServerError()
|
||||||
|
|
||||||
@@ -259,9 +245,6 @@ class DraftWorkflowRunApi(Resource):
|
|||||||
if not current_user.is_editor:
|
if not current_user.is_editor:
|
||||||
raise Forbidden()
|
raise Forbidden()
|
||||||
|
|
||||||
if not isinstance(current_user, Account):
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
parser = reqparse.RequestParser()
|
parser = reqparse.RequestParser()
|
||||||
parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
|
parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
|
||||||
parser.add_argument("files", type=list, required=False, location="json")
|
parser.add_argument("files", type=list, required=False, location="json")
|
||||||
@@ -310,20 +293,13 @@ class DraftWorkflowNodeRunApi(Resource):
|
|||||||
if not current_user.is_editor:
|
if not current_user.is_editor:
|
||||||
raise Forbidden()
|
raise Forbidden()
|
||||||
|
|
||||||
if not isinstance(current_user, Account):
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
parser = reqparse.RequestParser()
|
parser = reqparse.RequestParser()
|
||||||
parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
|
parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
inputs = args.get("inputs")
|
|
||||||
if inputs == None:
|
|
||||||
raise ValueError("missing inputs")
|
|
||||||
|
|
||||||
workflow_service = WorkflowService()
|
workflow_service = WorkflowService()
|
||||||
workflow_node_execution = workflow_service.run_draft_workflow_node(
|
workflow_node_execution = workflow_service.run_draft_workflow_node(
|
||||||
app_model=app_model, node_id=node_id, user_inputs=inputs, account=current_user
|
app_model=app_model, node_id=node_id, user_inputs=args.get("inputs"), account=current_user
|
||||||
)
|
)
|
||||||
|
|
||||||
return workflow_node_execution
|
return workflow_node_execution
|
||||||
@@ -362,9 +338,6 @@ class PublishedWorkflowApi(Resource):
|
|||||||
if not current_user.is_editor:
|
if not current_user.is_editor:
|
||||||
raise Forbidden()
|
raise Forbidden()
|
||||||
|
|
||||||
if not isinstance(current_user, Account):
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
workflow_service = WorkflowService()
|
workflow_service = WorkflowService()
|
||||||
workflow = workflow_service.publish_workflow(app_model=app_model, account=current_user)
|
workflow = workflow_service.publish_workflow(app_model=app_model, account=current_user)
|
||||||
|
|
||||||
@@ -402,19 +375,14 @@ class DefaultBlockConfigApi(Resource):
|
|||||||
if not current_user.is_editor:
|
if not current_user.is_editor:
|
||||||
raise Forbidden()
|
raise Forbidden()
|
||||||
|
|
||||||
if not isinstance(current_user, Account):
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
parser = reqparse.RequestParser()
|
parser = reqparse.RequestParser()
|
||||||
parser.add_argument("q", type=str, location="args")
|
parser.add_argument("q", type=str, location="args")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
q = args.get("q")
|
|
||||||
|
|
||||||
filters = None
|
filters = None
|
||||||
if q:
|
if args.get("q"):
|
||||||
try:
|
try:
|
||||||
filters = json.loads(args.get("q", ""))
|
filters = json.loads(args.get("q"))
|
||||||
except json.JSONDecodeError:
|
except json.JSONDecodeError:
|
||||||
raise ValueError("Invalid filters")
|
raise ValueError("Invalid filters")
|
||||||
|
|
||||||
@@ -438,9 +406,6 @@ class ConvertToWorkflowApi(Resource):
|
|||||||
if not current_user.is_editor:
|
if not current_user.is_editor:
|
||||||
raise Forbidden()
|
raise Forbidden()
|
||||||
|
|
||||||
if not isinstance(current_user, Account):
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
if request.data:
|
if request.data:
|
||||||
parser = reqparse.RequestParser()
|
parser = reqparse.RequestParser()
|
||||||
parser.add_argument("name", type=str, required=False, nullable=True, location="json")
|
parser.add_argument("name", type=str, required=False, nullable=True, location="json")
|
||||||
@@ -461,46 +426,7 @@ class ConvertToWorkflowApi(Resource):
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class WorkflowConfigApi(Resource):
|
|
||||||
"""Resource for workflow configuration."""
|
|
||||||
|
|
||||||
@setup_required
|
|
||||||
@login_required
|
|
||||||
@account_initialization_required
|
|
||||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
|
||||||
def get(self, app_model: App):
|
|
||||||
return {
|
|
||||||
"parallel_depth_limit": dify_config.WORKFLOW_PARALLEL_DEPTH_LIMIT,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class PublishedAllWorkflowApi(Resource):
|
|
||||||
@setup_required
|
|
||||||
@login_required
|
|
||||||
@account_initialization_required
|
|
||||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
|
||||||
@marshal_with(workflow_pagination_fields)
|
|
||||||
def get(self, app_model: App):
|
|
||||||
"""
|
|
||||||
Get published workflows
|
|
||||||
"""
|
|
||||||
if not current_user.is_editor:
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
parser = reqparse.RequestParser()
|
|
||||||
parser.add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args")
|
|
||||||
parser.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args")
|
|
||||||
args = parser.parse_args()
|
|
||||||
page = args.get("page")
|
|
||||||
limit = args.get("limit")
|
|
||||||
workflow_service = WorkflowService()
|
|
||||||
workflows, has_more = workflow_service.get_all_published_workflow(app_model=app_model, page=page, limit=limit)
|
|
||||||
|
|
||||||
return {"items": workflows, "page": page, "limit": limit, "has_more": has_more}
|
|
||||||
|
|
||||||
|
|
||||||
api.add_resource(DraftWorkflowApi, "/apps/<uuid:app_id>/workflows/draft")
|
api.add_resource(DraftWorkflowApi, "/apps/<uuid:app_id>/workflows/draft")
|
||||||
api.add_resource(WorkflowConfigApi, "/apps/<uuid:app_id>/workflows/draft/config")
|
|
||||||
api.add_resource(AdvancedChatDraftWorkflowRunApi, "/apps/<uuid:app_id>/advanced-chat/workflows/draft/run")
|
api.add_resource(AdvancedChatDraftWorkflowRunApi, "/apps/<uuid:app_id>/advanced-chat/workflows/draft/run")
|
||||||
api.add_resource(DraftWorkflowRunApi, "/apps/<uuid:app_id>/workflows/draft/run")
|
api.add_resource(DraftWorkflowRunApi, "/apps/<uuid:app_id>/workflows/draft/run")
|
||||||
api.add_resource(WorkflowTaskStopApi, "/apps/<uuid:app_id>/workflow-runs/tasks/<string:task_id>/stop")
|
api.add_resource(WorkflowTaskStopApi, "/apps/<uuid:app_id>/workflow-runs/tasks/<string:task_id>/stop")
|
||||||
@@ -513,7 +439,6 @@ api.add_resource(
|
|||||||
WorkflowDraftRunIterationNodeApi, "/apps/<uuid:app_id>/workflows/draft/iteration/nodes/<string:node_id>/run"
|
WorkflowDraftRunIterationNodeApi, "/apps/<uuid:app_id>/workflows/draft/iteration/nodes/<string:node_id>/run"
|
||||||
)
|
)
|
||||||
api.add_resource(PublishedWorkflowApi, "/apps/<uuid:app_id>/workflows/publish")
|
api.add_resource(PublishedWorkflowApi, "/apps/<uuid:app_id>/workflows/publish")
|
||||||
api.add_resource(PublishedAllWorkflowApi, "/apps/<uuid:app_id>/workflows")
|
|
||||||
api.add_resource(DefaultBlockConfigsApi, "/apps/<uuid:app_id>/workflows/default-workflow-block-configs")
|
api.add_resource(DefaultBlockConfigsApi, "/apps/<uuid:app_id>/workflows/default-workflow-block-configs")
|
||||||
api.add_resource(
|
api.add_resource(
|
||||||
DefaultBlockConfigApi, "/apps/<uuid:app_id>/workflows/default-workflow-block-configs/<string:block_type>"
|
DefaultBlockConfigApi, "/apps/<uuid:app_id>/workflows/default-workflow-block-configs/<string:block_type>"
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
from flask_restful import Resource, marshal_with, reqparse # type: ignore
|
from flask_restful import Resource, marshal_with, reqparse
|
||||||
from flask_restful.inputs import int_range # type: ignore
|
from flask_restful.inputs import int_range
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.app.wraps import get_app_model
|
from controllers.console.app.wraps import get_app_model
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
from flask_restful import Resource, marshal_with, reqparse # type: ignore
|
from flask_restful import Resource, marshal_with, reqparse
|
||||||
from flask_restful.inputs import int_range # type: ignore
|
from flask_restful.inputs import int_range
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.app.wraps import get_app_model
|
from controllers.console.app.wraps import get_app_model
|
||||||
|
|||||||
@@ -3,8 +3,8 @@ from decimal import Decimal
|
|||||||
|
|
||||||
import pytz
|
import pytz
|
||||||
from flask import jsonify
|
from flask import jsonify
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.app.wraps import get_app_model
|
from controllers.console.app.wraps import get_app_model
|
||||||
|
|||||||
@@ -5,10 +5,11 @@ from typing import Optional, Union
|
|||||||
from controllers.console.app.error import AppNotFoundError
|
from controllers.console.app.error import AppNotFoundError
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
from libs.login import current_user
|
from libs.login import current_user
|
||||||
from models import App, AppMode
|
from models import App
|
||||||
|
from models.model import AppMode
|
||||||
|
|
||||||
|
|
||||||
def get_app_model(view: Optional[Callable] = None, *, mode: Union[AppMode, list[AppMode], None] = None):
|
def get_app_model(view: Optional[Callable] = None, *, mode: Union[AppMode, list[AppMode]] = None):
|
||||||
def decorator(view_func):
|
def decorator(view_func):
|
||||||
@wraps(view_func)
|
@wraps(view_func)
|
||||||
def decorated_view(*args, **kwargs):
|
def decorated_view(*args, **kwargs):
|
||||||
|
|||||||
@@ -1,14 +1,14 @@
|
|||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
|
|
||||||
from constants.languages import supported_language
|
from constants.languages import supported_language
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.error import AlreadyActivateError
|
from controllers.console.error import AlreadyActivateError
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
from libs.helper import StrLen, email, extract_remote_ip, timezone
|
from libs.helper import StrLen, email, extract_remote_ip, timezone
|
||||||
from models.account import AccountStatus
|
from models.account import AccountStatus, Tenant
|
||||||
from services.account_service import AccountService, RegisterService
|
from services.account_service import AccountService, RegisterService
|
||||||
|
|
||||||
|
|
||||||
@@ -27,7 +27,7 @@ class ActivateCheckApi(Resource):
|
|||||||
invitation = RegisterService.get_invitation_if_token_valid(workspaceId, reg_email, token)
|
invitation = RegisterService.get_invitation_if_token_valid(workspaceId, reg_email, token)
|
||||||
if invitation:
|
if invitation:
|
||||||
data = invitation.get("data", {})
|
data = invitation.get("data", {})
|
||||||
tenant = invitation.get("tenant", None)
|
tenant: Tenant = invitation.get("tenant", None)
|
||||||
workspace_name = tenant.name if tenant else None
|
workspace_name = tenant.name if tenant else None
|
||||||
workspace_id = tenant.id if tenant else None
|
workspace_id = tenant.id if tenant else None
|
||||||
invitee_email = data.get("email") if data else None
|
invitee_email = data.get("email") if data else None
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
from werkzeug.exceptions import Forbidden
|
from werkzeug.exceptions import Forbidden
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
|
|||||||
@@ -2,8 +2,8 @@ import logging
|
|||||||
|
|
||||||
import requests
|
import requests
|
||||||
from flask import current_app, redirect, request
|
from flask import current_app, redirect, request
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource # type: ignore
|
from flask_restful import Resource
|
||||||
from werkzeug.exceptions import Forbidden
|
from werkzeug.exceptions import Forbidden
|
||||||
|
|
||||||
from configs import dify_config
|
from configs import dify_config
|
||||||
@@ -17,8 +17,8 @@ from ..wraps import account_initialization_required, setup_required
|
|||||||
def get_oauth_providers():
|
def get_oauth_providers():
|
||||||
with current_app.app_context():
|
with current_app.app_context():
|
||||||
notion_oauth = NotionOAuth(
|
notion_oauth = NotionOAuth(
|
||||||
client_id=dify_config.NOTION_CLIENT_ID or "",
|
client_id=dify_config.NOTION_CLIENT_ID,
|
||||||
client_secret=dify_config.NOTION_CLIENT_SECRET or "",
|
client_secret=dify_config.NOTION_CLIENT_SECRET,
|
||||||
redirect_uri=dify_config.CONSOLE_API_URL + "/console/api/oauth/data-source/callback/notion",
|
redirect_uri=dify_config.CONSOLE_API_URL + "/console/api/oauth/data-source/callback/notion",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -53,15 +53,3 @@ class EmailCodeLoginRateLimitExceededError(BaseHTTPException):
|
|||||||
error_code = "email_code_login_rate_limit_exceeded"
|
error_code = "email_code_login_rate_limit_exceeded"
|
||||||
description = "Too many login emails have been sent. Please try again in 5 minutes."
|
description = "Too many login emails have been sent. Please try again in 5 minutes."
|
||||||
code = 429
|
code = 429
|
||||||
|
|
||||||
|
|
||||||
class EmailCodeAccountDeletionRateLimitExceededError(BaseHTTPException):
|
|
||||||
error_code = "email_code_account_deletion_rate_limit_exceeded"
|
|
||||||
description = "Too many account deletion emails have been sent. Please try again in 5 minutes."
|
|
||||||
code = 429
|
|
||||||
|
|
||||||
|
|
||||||
class EmailPasswordResetLimitError(BaseHTTPException):
|
|
||||||
error_code = "email_password_reset_limit"
|
|
||||||
description = "Too many failed password reset attempts. Please try again in 24 hours."
|
|
||||||
code = 429
|
|
||||||
|
|||||||
@@ -2,20 +2,17 @@ import base64
|
|||||||
import secrets
|
import secrets
|
||||||
|
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
from sqlalchemy import select
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
|
|
||||||
from constants.languages import languages
|
from constants.languages import languages
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.auth.error import (
|
from controllers.console.auth.error import (
|
||||||
EmailCodeError,
|
EmailCodeError,
|
||||||
EmailPasswordResetLimitError,
|
|
||||||
InvalidEmailError,
|
InvalidEmailError,
|
||||||
InvalidTokenError,
|
InvalidTokenError,
|
||||||
PasswordMismatchError,
|
PasswordMismatchError,
|
||||||
)
|
)
|
||||||
from controllers.console.error import AccountInFreezeError, AccountNotFound, EmailSendIpLimitError
|
from controllers.console.error import AccountNotFound, EmailSendIpLimitError
|
||||||
from controllers.console.wraps import setup_required
|
from controllers.console.wraps import setup_required
|
||||||
from events.tenant_event import tenant_was_created
|
from events.tenant_event import tenant_was_created
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
@@ -23,7 +20,6 @@ from libs.helper import email, extract_remote_ip
|
|||||||
from libs.password import hash_password, valid_password
|
from libs.password import hash_password, valid_password
|
||||||
from models.account import Account
|
from models.account import Account
|
||||||
from services.account_service import AccountService, TenantService
|
from services.account_service import AccountService, TenantService
|
||||||
from services.errors.account import AccountRegisterError
|
|
||||||
from services.errors.workspace import WorkSpaceNotAllowedCreateError
|
from services.errors.workspace import WorkSpaceNotAllowedCreateError
|
||||||
from services.feature_service import FeatureService
|
from services.feature_service import FeatureService
|
||||||
|
|
||||||
@@ -45,8 +41,7 @@ class ForgotPasswordSendEmailApi(Resource):
|
|||||||
else:
|
else:
|
||||||
language = "en-US"
|
language = "en-US"
|
||||||
|
|
||||||
with Session(db.engine) as session:
|
account = Account.query.filter_by(email=args["email"]).first()
|
||||||
account = session.execute(select(Account).filter_by(email=args["email"])).scalar_one_or_none()
|
|
||||||
token = None
|
token = None
|
||||||
if account is None:
|
if account is None:
|
||||||
if FeatureService.get_system_features().is_allow_register:
|
if FeatureService.get_system_features().is_allow_register:
|
||||||
@@ -71,10 +66,6 @@ class ForgotPasswordCheckApi(Resource):
|
|||||||
|
|
||||||
user_email = args["email"]
|
user_email = args["email"]
|
||||||
|
|
||||||
is_forgot_password_error_rate_limit = AccountService.is_forgot_password_error_rate_limit(args["email"])
|
|
||||||
if is_forgot_password_error_rate_limit:
|
|
||||||
raise EmailPasswordResetLimitError()
|
|
||||||
|
|
||||||
token_data = AccountService.get_reset_password_data(args["token"])
|
token_data = AccountService.get_reset_password_data(args["token"])
|
||||||
if token_data is None:
|
if token_data is None:
|
||||||
raise InvalidTokenError()
|
raise InvalidTokenError()
|
||||||
@@ -83,10 +74,8 @@ class ForgotPasswordCheckApi(Resource):
|
|||||||
raise InvalidEmailError()
|
raise InvalidEmailError()
|
||||||
|
|
||||||
if args["code"] != token_data.get("code"):
|
if args["code"] != token_data.get("code"):
|
||||||
AccountService.add_forgot_password_error_rate_limit(args["email"])
|
|
||||||
raise EmailCodeError()
|
raise EmailCodeError()
|
||||||
|
|
||||||
AccountService.reset_forgot_password_error_rate_limit(args["email"])
|
|
||||||
return {"is_valid": True, "email": token_data.get("email")}
|
return {"is_valid": True, "email": token_data.get("email")}
|
||||||
|
|
||||||
|
|
||||||
@@ -119,8 +108,7 @@ class ForgotPasswordResetApi(Resource):
|
|||||||
password_hashed = hash_password(new_password, salt)
|
password_hashed = hash_password(new_password, salt)
|
||||||
base64_password_hashed = base64.b64encode(password_hashed).decode()
|
base64_password_hashed = base64.b64encode(password_hashed).decode()
|
||||||
|
|
||||||
with Session(db.engine) as session:
|
account = Account.query.filter_by(email=reset_data.get("email")).first()
|
||||||
account = session.execute(select(Account).filter_by(email=reset_data.get("email"))).scalar_one_or_none()
|
|
||||||
if account:
|
if account:
|
||||||
account.password = base64_password_hashed
|
account.password = base64_password_hashed
|
||||||
account.password_salt = base64_salt
|
account.password_salt = base64_salt
|
||||||
@@ -134,15 +122,13 @@ class ForgotPasswordResetApi(Resource):
|
|||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
account = AccountService.create_account_and_tenant(
|
account = AccountService.create_account_and_tenant(
|
||||||
email=reset_data.get("email", ""),
|
email=reset_data.get("email"),
|
||||||
name=reset_data.get("email", ""),
|
name=reset_data.get("email"),
|
||||||
password=password_confirm,
|
password=password_confirm,
|
||||||
interface_language=languages[0],
|
interface_language=languages[0],
|
||||||
)
|
)
|
||||||
except WorkSpaceNotAllowedCreateError:
|
except WorkSpaceNotAllowedCreateError:
|
||||||
pass
|
pass
|
||||||
except AccountRegisterError:
|
|
||||||
raise AccountInFreezeError()
|
|
||||||
|
|
||||||
return {"result": "success"}
|
return {"result": "success"}
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,10 @@
|
|||||||
from typing import cast
|
from typing import cast
|
||||||
|
|
||||||
import flask_login # type: ignore
|
import flask_login
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
|
|
||||||
import services
|
import services
|
||||||
from configs import dify_config
|
|
||||||
from constants.languages import languages
|
from constants.languages import languages
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.auth.error import (
|
from controllers.console.auth.error import (
|
||||||
@@ -17,7 +16,6 @@ from controllers.console.auth.error import (
|
|||||||
)
|
)
|
||||||
from controllers.console.error import (
|
from controllers.console.error import (
|
||||||
AccountBannedError,
|
AccountBannedError,
|
||||||
AccountInFreezeError,
|
|
||||||
AccountNotFound,
|
AccountNotFound,
|
||||||
EmailSendIpLimitError,
|
EmailSendIpLimitError,
|
||||||
NotAllowedCreateWorkspace,
|
NotAllowedCreateWorkspace,
|
||||||
@@ -28,8 +26,6 @@ from libs.helper import email, extract_remote_ip
|
|||||||
from libs.password import valid_password
|
from libs.password import valid_password
|
||||||
from models.account import Account
|
from models.account import Account
|
||||||
from services.account_service import AccountService, RegisterService, TenantService
|
from services.account_service import AccountService, RegisterService, TenantService
|
||||||
from services.billing_service import BillingService
|
|
||||||
from services.errors.account import AccountRegisterError
|
|
||||||
from services.errors.workspace import WorkSpaceNotAllowedCreateError
|
from services.errors.workspace import WorkSpaceNotAllowedCreateError
|
||||||
from services.feature_service import FeatureService
|
from services.feature_service import FeatureService
|
||||||
|
|
||||||
@@ -48,9 +44,6 @@ class LoginApi(Resource):
|
|||||||
parser.add_argument("language", type=str, required=False, default="en-US", location="json")
|
parser.add_argument("language", type=str, required=False, default="en-US", location="json")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
if dify_config.BILLING_ENABLED and BillingService.is_email_in_freeze(args["email"]):
|
|
||||||
raise AccountInFreezeError()
|
|
||||||
|
|
||||||
is_login_error_rate_limit = AccountService.is_login_error_rate_limit(args["email"])
|
is_login_error_rate_limit = AccountService.is_login_error_rate_limit(args["email"])
|
||||||
if is_login_error_rate_limit:
|
if is_login_error_rate_limit:
|
||||||
raise EmailPasswordLoginLimitError()
|
raise EmailPasswordLoginLimitError()
|
||||||
@@ -120,10 +113,8 @@ class ResetPasswordSendEmailApi(Resource):
|
|||||||
language = "zh-Hans"
|
language = "zh-Hans"
|
||||||
else:
|
else:
|
||||||
language = "en-US"
|
language = "en-US"
|
||||||
try:
|
|
||||||
account = AccountService.get_user_through_email(args["email"])
|
account = AccountService.get_user_through_email(args["email"])
|
||||||
except AccountRegisterError as are:
|
|
||||||
raise AccountInFreezeError()
|
|
||||||
if account is None:
|
if account is None:
|
||||||
if FeatureService.get_system_features().is_allow_register:
|
if FeatureService.get_system_features().is_allow_register:
|
||||||
token = AccountService.send_reset_password_email(email=args["email"], language=language)
|
token = AccountService.send_reset_password_email(email=args["email"], language=language)
|
||||||
@@ -151,11 +142,8 @@ class EmailCodeLoginSendEmailApi(Resource):
|
|||||||
language = "zh-Hans"
|
language = "zh-Hans"
|
||||||
else:
|
else:
|
||||||
language = "en-US"
|
language = "en-US"
|
||||||
try:
|
|
||||||
account = AccountService.get_user_through_email(args["email"])
|
|
||||||
except AccountRegisterError as are:
|
|
||||||
raise AccountInFreezeError()
|
|
||||||
|
|
||||||
|
account = AccountService.get_user_through_email(args["email"])
|
||||||
if account is None:
|
if account is None:
|
||||||
if FeatureService.get_system_features().is_allow_register:
|
if FeatureService.get_system_features().is_allow_register:
|
||||||
token = AccountService.send_email_code_login_email(email=args["email"], language=language)
|
token = AccountService.send_email_code_login_email(email=args["email"], language=language)
|
||||||
@@ -189,10 +177,7 @@ class EmailCodeLoginApi(Resource):
|
|||||||
raise EmailCodeError()
|
raise EmailCodeError()
|
||||||
|
|
||||||
AccountService.revoke_email_code_login_token(args["token"])
|
AccountService.revoke_email_code_login_token(args["token"])
|
||||||
try:
|
account = AccountService.get_user_through_email(user_email)
|
||||||
account = AccountService.get_user_through_email(user_email)
|
|
||||||
except AccountRegisterError as are:
|
|
||||||
raise AccountInFreezeError()
|
|
||||||
if account:
|
if account:
|
||||||
tenant = TenantService.get_join_tenants(account)
|
tenant = TenantService.get_join_tenants(account)
|
||||||
if not tenant:
|
if not tenant:
|
||||||
@@ -211,8 +196,6 @@ class EmailCodeLoginApi(Resource):
|
|||||||
)
|
)
|
||||||
except WorkSpaceNotAllowedCreateError:
|
except WorkSpaceNotAllowedCreateError:
|
||||||
return NotAllowedCreateWorkspace()
|
return NotAllowedCreateWorkspace()
|
||||||
except AccountRegisterError as are:
|
|
||||||
raise AccountInFreezeError()
|
|
||||||
token_pair = AccountService.login(account, ip_address=extract_remote_ip(request))
|
token_pair = AccountService.login(account, ip_address=extract_remote_ip(request))
|
||||||
AccountService.reset_login_error_rate_limit(args["email"])
|
AccountService.reset_login_error_rate_limit(args["email"])
|
||||||
return {"result": "success", "data": token_pair.model_dump()}
|
return {"result": "success", "data": token_pair.model_dump()}
|
||||||
|
|||||||
@@ -4,9 +4,7 @@ from typing import Optional
|
|||||||
|
|
||||||
import requests
|
import requests
|
||||||
from flask import current_app, redirect, request
|
from flask import current_app, redirect, request
|
||||||
from flask_restful import Resource # type: ignore
|
from flask_restful import Resource
|
||||||
from sqlalchemy import select
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
from werkzeug.exceptions import Unauthorized
|
from werkzeug.exceptions import Unauthorized
|
||||||
|
|
||||||
from configs import dify_config
|
from configs import dify_config
|
||||||
@@ -18,7 +16,7 @@ from libs.oauth import GitHubOAuth, GoogleOAuth, OAuthUserInfo
|
|||||||
from models import Account
|
from models import Account
|
||||||
from models.account import AccountStatus
|
from models.account import AccountStatus
|
||||||
from services.account_service import AccountService, RegisterService, TenantService
|
from services.account_service import AccountService, RegisterService, TenantService
|
||||||
from services.errors.account import AccountNotFoundError, AccountRegisterError
|
from services.errors.account import AccountNotFoundError
|
||||||
from services.errors.workspace import WorkSpaceNotAllowedCreateError, WorkSpaceNotFoundError
|
from services.errors.workspace import WorkSpaceNotAllowedCreateError, WorkSpaceNotFoundError
|
||||||
from services.feature_service import FeatureService
|
from services.feature_service import FeatureService
|
||||||
|
|
||||||
@@ -78,9 +76,8 @@ class OAuthCallback(Resource):
|
|||||||
try:
|
try:
|
||||||
token = oauth_provider.get_access_token(code)
|
token = oauth_provider.get_access_token(code)
|
||||||
user_info = oauth_provider.get_user_info(token)
|
user_info = oauth_provider.get_user_info(token)
|
||||||
except requests.exceptions.RequestException as e:
|
except requests.exceptions.HTTPError as e:
|
||||||
error_text = e.response.text if e.response else str(e)
|
logging.exception(f"An error occurred during the OAuth process with {provider}: {e.response.text}")
|
||||||
logging.exception(f"An error occurred during the OAuth process with {provider}: {error_text}")
|
|
||||||
return {"error": "OAuth process failed"}, 400
|
return {"error": "OAuth process failed"}, 400
|
||||||
|
|
||||||
if invite_token and RegisterService.is_valid_invite_token(invite_token):
|
if invite_token and RegisterService.is_valid_invite_token(invite_token):
|
||||||
@@ -101,8 +98,6 @@ class OAuthCallback(Resource):
|
|||||||
f"{dify_config.CONSOLE_WEB_URL}/signin"
|
f"{dify_config.CONSOLE_WEB_URL}/signin"
|
||||||
"?message=Workspace not found, please contact system admin to invite you to join in a workspace."
|
"?message=Workspace not found, please contact system admin to invite you to join in a workspace."
|
||||||
)
|
)
|
||||||
except AccountRegisterError as e:
|
|
||||||
return redirect(f"{dify_config.CONSOLE_WEB_URL}/signin?message={e.description}")
|
|
||||||
|
|
||||||
# Check account status
|
# Check account status
|
||||||
if account.status == AccountStatus.BANNED.value:
|
if account.status == AccountStatus.BANNED.value:
|
||||||
@@ -134,11 +129,10 @@ class OAuthCallback(Resource):
|
|||||||
|
|
||||||
|
|
||||||
def _get_account_by_openid_or_email(provider: str, user_info: OAuthUserInfo) -> Optional[Account]:
|
def _get_account_by_openid_or_email(provider: str, user_info: OAuthUserInfo) -> Optional[Account]:
|
||||||
account: Optional[Account] = Account.get_by_openid(provider, user_info.id)
|
account = Account.get_by_openid(provider, user_info.id)
|
||||||
|
|
||||||
if not account:
|
if not account:
|
||||||
with Session(db.engine) as session:
|
account = Account.query.filter_by(email=user_info.email).first()
|
||||||
account = session.execute(select(Account).filter_by(email=user_info.email)).scalar_one_or_none()
|
|
||||||
|
|
||||||
return account
|
return account
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.wraps import account_initialization_required, only_edition_cloud, setup_required
|
from controllers.console.wraps import account_initialization_required, only_edition_cloud, setup_required
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user