mirror of
https://github.com/langgenius/dify.git
synced 2026-02-11 02:44:05 +00:00
Compare commits
8 Commits
dev/plugin
...
feat/node-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fde3fe0ab6 | ||
|
|
07528f82b9 | ||
|
|
127291a90f | ||
|
|
9e0c28791d | ||
|
|
b411087bb7 | ||
|
|
357769c72e | ||
|
|
853b9af09c | ||
|
|
b99f1a09f4 |
@@ -1,12 +1,11 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
npm add -g pnpm@9.12.2
|
cd web && npm install
|
||||||
cd web && pnpm install
|
|
||||||
pipx install poetry
|
pipx install poetry
|
||||||
|
|
||||||
echo 'alias start-api="cd /workspaces/dify/api && poetry run python -m flask run --host 0.0.0.0 --port=5001 --debug"' >> ~/.bashrc
|
echo 'alias start-api="cd /workspaces/dify/api && poetry run python -m flask run --host 0.0.0.0 --port=5001 --debug"' >> ~/.bashrc
|
||||||
echo 'alias start-worker="cd /workspaces/dify/api && poetry run python -m celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion"' >> ~/.bashrc
|
echo 'alias start-worker="cd /workspaces/dify/api && poetry run python -m celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion"' >> ~/.bashrc
|
||||||
echo 'alias start-web="cd /workspaces/dify/web && pnpm dev"' >> ~/.bashrc
|
echo 'alias start-web="cd /workspaces/dify/web && npm run dev"' >> ~/.bashrc
|
||||||
echo 'alias start-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify up -d"' >> ~/.bashrc
|
echo 'alias start-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify up -d"' >> ~/.bashrc
|
||||||
echo 'alias stop-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify down"' >> ~/.bashrc
|
echo 'alias stop-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify down"' >> ~/.bashrc
|
||||||
|
|
||||||
|
|||||||
2
.github/actions/setup-poetry/action.yml
vendored
2
.github/actions/setup-poetry/action.yml
vendored
@@ -8,7 +8,7 @@ inputs:
|
|||||||
poetry-version:
|
poetry-version:
|
||||||
description: Poetry version to set up
|
description: Poetry version to set up
|
||||||
required: true
|
required: true
|
||||||
default: '2.0.1'
|
default: '1.8.4'
|
||||||
poetry-lockfile:
|
poetry-lockfile:
|
||||||
description: Path to the Poetry lockfile to restore cache from
|
description: Path to the Poetry lockfile to restore cache from
|
||||||
required: true
|
required: true
|
||||||
|
|||||||
19
.github/workflows/api-tests.yml
vendored
19
.github/workflows/api-tests.yml
vendored
@@ -26,9 +26,6 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- name: Setup Poetry and Python ${{ matrix.python-version }}
|
- name: Setup Poetry and Python ${{ matrix.python-version }}
|
||||||
uses: ./.github/actions/setup-poetry
|
uses: ./.github/actions/setup-poetry
|
||||||
@@ -45,17 +42,19 @@ jobs:
|
|||||||
run: poetry install -C api --with dev
|
run: poetry install -C api --with dev
|
||||||
|
|
||||||
- name: Check dependencies in pyproject.toml
|
- name: Check dependencies in pyproject.toml
|
||||||
run: poetry run -P api bash dev/pytest/pytest_artifacts.sh
|
run: poetry run -C api bash dev/pytest/pytest_artifacts.sh
|
||||||
|
|
||||||
- name: Run Unit tests
|
- name: Run Unit tests
|
||||||
run: poetry run -P api bash dev/pytest/pytest_unit_tests.sh
|
run: poetry run -C api bash dev/pytest/pytest_unit_tests.sh
|
||||||
|
|
||||||
|
- name: Run ModelRuntime
|
||||||
|
run: poetry run -C api bash dev/pytest/pytest_model_runtime.sh
|
||||||
|
|
||||||
- name: Run dify config tests
|
- name: Run dify config tests
|
||||||
run: poetry run -P api python dev/pytest/pytest_config_tests.py
|
run: poetry run -C api python dev/pytest/pytest_config_tests.py
|
||||||
|
|
||||||
- name: Run mypy
|
- name: Run Tool
|
||||||
run: |
|
run: poetry run -C api bash dev/pytest/pytest_tools.sh
|
||||||
poetry run -C api python -m mypy --install-types --non-interactive .
|
|
||||||
|
|
||||||
- name: Set up dotenvs
|
- name: Set up dotenvs
|
||||||
run: |
|
run: |
|
||||||
@@ -75,4 +74,4 @@ jobs:
|
|||||||
ssrf_proxy
|
ssrf_proxy
|
||||||
|
|
||||||
- name: Run Workflow
|
- name: Run Workflow
|
||||||
run: poetry run -P api bash dev/pytest/pytest_workflow.sh
|
run: poetry run -C api bash dev/pytest/pytest_workflow.sh
|
||||||
|
|||||||
16
.github/workflows/build-push.yml
vendored
16
.github/workflows/build-push.yml
vendored
@@ -5,7 +5,6 @@ on:
|
|||||||
branches:
|
branches:
|
||||||
- "main"
|
- "main"
|
||||||
- "deploy/dev"
|
- "deploy/dev"
|
||||||
- "dev/plugin-deploy"
|
|
||||||
release:
|
release:
|
||||||
types: [published]
|
types: [published]
|
||||||
|
|
||||||
@@ -80,12 +79,10 @@ jobs:
|
|||||||
cache-to: type=gha,mode=max,scope=${{ matrix.service_name }}
|
cache-to: type=gha,mode=max,scope=${{ matrix.service_name }}
|
||||||
|
|
||||||
- name: Export digest
|
- name: Export digest
|
||||||
env:
|
|
||||||
DIGEST: ${{ steps.build.outputs.digest }}
|
|
||||||
run: |
|
run: |
|
||||||
mkdir -p /tmp/digests
|
mkdir -p /tmp/digests
|
||||||
sanitized_digest=${DIGEST#sha256:}
|
digest="${{ steps.build.outputs.digest }}"
|
||||||
touch "/tmp/digests/${sanitized_digest}"
|
touch "/tmp/digests/${digest#sha256:}"
|
||||||
|
|
||||||
- name: Upload digest
|
- name: Upload digest
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
@@ -135,15 +132,10 @@ jobs:
|
|||||||
|
|
||||||
- name: Create manifest list and push
|
- name: Create manifest list and push
|
||||||
working-directory: /tmp/digests
|
working-directory: /tmp/digests
|
||||||
env:
|
|
||||||
IMAGE_NAME: ${{ env[matrix.image_name_env] }}
|
|
||||||
run: |
|
run: |
|
||||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||||
$(printf "$IMAGE_NAME@sha256:%s " *)
|
$(printf '${{ env[matrix.image_name_env] }}@sha256:%s ' *)
|
||||||
|
|
||||||
- name: Inspect image
|
- name: Inspect image
|
||||||
env:
|
|
||||||
IMAGE_NAME: ${{ env[matrix.image_name_env] }}
|
|
||||||
IMAGE_VERSION: ${{ steps.meta.outputs.version }}
|
|
||||||
run: |
|
run: |
|
||||||
docker buildx imagetools inspect "$IMAGE_NAME:$IMAGE_VERSION"
|
docker buildx imagetools inspect ${{ env[matrix.image_name_env] }}:${{ steps.meta.outputs.version }}
|
||||||
|
|||||||
4
.github/workflows/db-migration-test.yml
vendored
4
.github/workflows/db-migration-test.yml
vendored
@@ -4,7 +4,6 @@ on:
|
|||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- plugins/beta
|
|
||||||
paths:
|
paths:
|
||||||
- api/migrations/**
|
- api/migrations/**
|
||||||
- .github/workflows/db-migration-test.yml
|
- .github/workflows/db-migration-test.yml
|
||||||
@@ -20,9 +19,6 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- name: Setup Poetry and Python
|
- name: Setup Poetry and Python
|
||||||
uses: ./.github/actions/setup-poetry
|
uses: ./.github/actions/setup-poetry
|
||||||
|
|||||||
47
.github/workflows/docker-build.yml
vendored
47
.github/workflows/docker-build.yml
vendored
@@ -1,47 +0,0 @@
|
|||||||
name: Build docker image
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
branches:
|
|
||||||
- "main"
|
|
||||||
paths:
|
|
||||||
- api/Dockerfile
|
|
||||||
- web/Dockerfile
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: docker-build-${{ github.head_ref || github.run_id }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-docker:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- service_name: "api-amd64"
|
|
||||||
platform: linux/amd64
|
|
||||||
context: "api"
|
|
||||||
- service_name: "api-arm64"
|
|
||||||
platform: linux/arm64
|
|
||||||
context: "api"
|
|
||||||
- service_name: "web-amd64"
|
|
||||||
platform: linux/amd64
|
|
||||||
context: "web"
|
|
||||||
- service_name: "web-arm64"
|
|
||||||
platform: linux/arm64
|
|
||||||
context: "web"
|
|
||||||
steps:
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v3
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
|
|
||||||
- name: Build Docker Image
|
|
||||||
uses: docker/build-push-action@v6
|
|
||||||
with:
|
|
||||||
push: false
|
|
||||||
context: "{{defaultContext}}:${{ matrix.context }}"
|
|
||||||
platforms: ${{ matrix.platform }}
|
|
||||||
cache-from: type=gha
|
|
||||||
cache-to: type=gha,mode=max
|
|
||||||
2
.github/workflows/expose_service_ports.sh
vendored
2
.github/workflows/expose_service_ports.sh
vendored
@@ -9,6 +9,6 @@ yq eval '.services["pgvecto-rs"].ports += ["5431:5432"]' -i docker/docker-compos
|
|||||||
yq eval '.services["elasticsearch"].ports += ["9200:9200"]' -i docker/docker-compose.yaml
|
yq eval '.services["elasticsearch"].ports += ["9200:9200"]' -i docker/docker-compose.yaml
|
||||||
yq eval '.services.couchbase-server.ports += ["8091-8096:8091-8096"]' -i docker/docker-compose.yaml
|
yq eval '.services.couchbase-server.ports += ["8091-8096:8091-8096"]' -i docker/docker-compose.yaml
|
||||||
yq eval '.services.couchbase-server.ports += ["11210:11210"]' -i docker/docker-compose.yaml
|
yq eval '.services.couchbase-server.ports += ["11210:11210"]' -i docker/docker-compose.yaml
|
||||||
yq eval '.services.tidb.ports += ["4000:4000"]' -i docker/tidb/docker-compose.yaml
|
yq eval '.services.tidb.ports += ["4000:4000"]' -i docker/docker-compose.yaml
|
||||||
|
|
||||||
echo "Ports exposed for sandbox, weaviate, tidb, qdrant, chroma, milvus, pgvector, pgvecto-rs, elasticsearch, couchbase"
|
echo "Ports exposed for sandbox, weaviate, tidb, qdrant, chroma, milvus, pgvector, pgvecto-rs, elasticsearch, couchbase"
|
||||||
|
|||||||
57
.github/workflows/style.yml
vendored
57
.github/workflows/style.yml
vendored
@@ -17,9 +17,6 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- name: Check changed files
|
- name: Check changed files
|
||||||
id: changed-files
|
id: changed-files
|
||||||
@@ -41,12 +38,12 @@ jobs:
|
|||||||
if: steps.changed-files.outputs.any_changed == 'true'
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
run: |
|
run: |
|
||||||
poetry run -C api ruff --version
|
poetry run -C api ruff --version
|
||||||
poetry run -C api ruff check ./
|
poetry run -C api ruff check ./api
|
||||||
poetry run -C api ruff format --check ./
|
poetry run -C api ruff format --check ./api
|
||||||
|
|
||||||
- name: Dotenv check
|
- name: Dotenv check
|
||||||
if: steps.changed-files.outputs.any_changed == 'true'
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
run: poetry run -P api dotenv-linter ./api/.env.example ./web/.env.example
|
run: poetry run -C api dotenv-linter ./api/.env.example ./web/.env.example
|
||||||
|
|
||||||
- name: Lint hints
|
- name: Lint hints
|
||||||
if: failure()
|
if: failure()
|
||||||
@@ -62,9 +59,6 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- name: Check changed files
|
- name: Check changed files
|
||||||
id: changed-files
|
id: changed-files
|
||||||
@@ -72,58 +66,22 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
files: web/**
|
files: web/**
|
||||||
|
|
||||||
- name: Install pnpm
|
|
||||||
uses: pnpm/action-setup@v4
|
|
||||||
with:
|
|
||||||
version: 10
|
|
||||||
run_install: false
|
|
||||||
|
|
||||||
- name: Setup NodeJS
|
- name: Setup NodeJS
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
if: steps.changed-files.outputs.any_changed == 'true'
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
with:
|
with:
|
||||||
node-version: 20
|
node-version: 20
|
||||||
cache: pnpm
|
cache: yarn
|
||||||
cache-dependency-path: ./web/package.json
|
cache-dependency-path: ./web/package.json
|
||||||
|
|
||||||
- name: Web dependencies
|
- name: Web dependencies
|
||||||
if: steps.changed-files.outputs.any_changed == 'true'
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
run: pnpm install --frozen-lockfile
|
run: yarn install --frozen-lockfile
|
||||||
|
|
||||||
- name: Web style check
|
- name: Web style check
|
||||||
if: steps.changed-files.outputs.any_changed == 'true'
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
run: pnpm run lint
|
run: yarn run lint
|
||||||
|
|
||||||
docker-compose-template:
|
|
||||||
name: Docker Compose Template
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- name: Check changed files
|
|
||||||
id: changed-files
|
|
||||||
uses: tj-actions/changed-files@v45
|
|
||||||
with:
|
|
||||||
files: |
|
|
||||||
docker/generate_docker_compose
|
|
||||||
docker/.env.example
|
|
||||||
docker/docker-compose-template.yaml
|
|
||||||
docker/docker-compose.yaml
|
|
||||||
|
|
||||||
- name: Generate Docker Compose
|
|
||||||
if: steps.changed-files.outputs.any_changed == 'true'
|
|
||||||
run: |
|
|
||||||
cd docker
|
|
||||||
./generate_docker_compose
|
|
||||||
|
|
||||||
- name: Check for changes
|
|
||||||
if: steps.changed-files.outputs.any_changed == 'true'
|
|
||||||
run: git diff --exit-code
|
|
||||||
|
|
||||||
superlinter:
|
superlinter:
|
||||||
name: SuperLinter
|
name: SuperLinter
|
||||||
@@ -132,9 +90,6 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- name: Check changed files
|
- name: Check changed files
|
||||||
id: changed-files
|
id: changed-files
|
||||||
|
|||||||
9
.github/workflows/tool-test-sdks.yaml
vendored
9
.github/workflows/tool-test-sdks.yaml
vendored
@@ -26,19 +26,16 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- name: Use Node.js ${{ matrix.node-version }}
|
- name: Use Node.js ${{ matrix.node-version }}
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: ${{ matrix.node-version }}
|
node-version: ${{ matrix.node-version }}
|
||||||
cache: ''
|
cache: ''
|
||||||
cache-dependency-path: 'pnpm-lock.yaml'
|
cache-dependency-path: 'yarn.lock'
|
||||||
|
|
||||||
- name: Install Dependencies
|
- name: Install Dependencies
|
||||||
run: pnpm install --frozen-lockfile
|
run: yarn install
|
||||||
|
|
||||||
- name: Test
|
- name: Test
|
||||||
run: pnpm test
|
run: yarn test
|
||||||
|
|||||||
@@ -16,7 +16,6 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 2 # last 2 commits
|
fetch-depth: 2 # last 2 commits
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- name: Check for file changes in i18n/en-US
|
- name: Check for file changes in i18n/en-US
|
||||||
id: check_files
|
id: check_files
|
||||||
@@ -39,11 +38,11 @@ jobs:
|
|||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
if: env.FILES_CHANGED == 'true'
|
if: env.FILES_CHANGED == 'true'
|
||||||
run: pnpm install --frozen-lockfile
|
run: yarn install --frozen-lockfile
|
||||||
|
|
||||||
- name: Run npm script
|
- name: Run npm script
|
||||||
if: env.FILES_CHANGED == 'true'
|
if: env.FILES_CHANGED == 'true'
|
||||||
run: pnpm run auto-gen-i18n
|
run: npm run auto-gen-i18n
|
||||||
|
|
||||||
- name: Create Pull Request
|
- name: Create Pull Request
|
||||||
if: env.FILES_CHANGED == 'true'
|
if: env.FILES_CHANGED == 'true'
|
||||||
|
|||||||
19
.github/workflows/vdb-tests.yml
vendored
19
.github/workflows/vdb-tests.yml
vendored
@@ -28,9 +28,6 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- name: Setup Poetry and Python ${{ matrix.python-version }}
|
- name: Setup Poetry and Python ${{ matrix.python-version }}
|
||||||
uses: ./.github/actions/setup-poetry
|
uses: ./.github/actions/setup-poetry
|
||||||
@@ -54,15 +51,7 @@ jobs:
|
|||||||
- name: Expose Service Ports
|
- name: Expose Service Ports
|
||||||
run: sh .github/workflows/expose_service_ports.sh
|
run: sh .github/workflows/expose_service_ports.sh
|
||||||
|
|
||||||
- name: Set up Vector Store (TiDB)
|
- name: Set up Vector Stores (TiDB, Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma, MyScale, ElasticSearch, Couchbase)
|
||||||
uses: hoverkraft-tech/compose-action@v2.0.2
|
|
||||||
with:
|
|
||||||
compose-file: docker/tidb/docker-compose.yaml
|
|
||||||
services: |
|
|
||||||
tidb
|
|
||||||
tiflash
|
|
||||||
|
|
||||||
- name: Set up Vector Stores (Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma, MyScale, ElasticSearch, Couchbase)
|
|
||||||
uses: hoverkraft-tech/compose-action@v2.0.2
|
uses: hoverkraft-tech/compose-action@v2.0.2
|
||||||
with:
|
with:
|
||||||
compose-file: |
|
compose-file: |
|
||||||
@@ -78,9 +67,7 @@ jobs:
|
|||||||
pgvector
|
pgvector
|
||||||
chroma
|
chroma
|
||||||
elasticsearch
|
elasticsearch
|
||||||
|
tidb
|
||||||
- name: Check TiDB Ready
|
|
||||||
run: poetry run -P api python api/tests/integration_tests/vdb/tidb_vector/check_tiflash_ready.py
|
|
||||||
|
|
||||||
- name: Test Vector Stores
|
- name: Test Vector Stores
|
||||||
run: poetry run -P api bash dev/pytest/pytest_vdb.sh
|
run: poetry run -C api bash dev/pytest/pytest_vdb.sh
|
||||||
|
|||||||
35
.github/workflows/web-tests.yml
vendored
35
.github/workflows/web-tests.yml
vendored
@@ -22,34 +22,25 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- name: Check changed files
|
- name: Check changed files
|
||||||
id: changed-files
|
id: changed-files
|
||||||
uses: tj-actions/changed-files@v45
|
uses: tj-actions/changed-files@v45
|
||||||
with:
|
with:
|
||||||
files: web/**
|
files: web/**
|
||||||
# to run pnpm, should install package canvas, but it always install failed on amd64 under ubuntu-latest
|
|
||||||
# - name: Install pnpm
|
|
||||||
# uses: pnpm/action-setup@v4
|
|
||||||
# with:
|
|
||||||
# version: 10
|
|
||||||
# run_install: false
|
|
||||||
|
|
||||||
# - name: Setup Node.js
|
- name: Setup Node.js
|
||||||
# uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
# if: steps.changed-files.outputs.any_changed == 'true'
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
# with:
|
with:
|
||||||
# node-version: 20
|
node-version: 20
|
||||||
# cache: pnpm
|
cache: yarn
|
||||||
# cache-dependency-path: ./web/package.json
|
cache-dependency-path: ./web/package.json
|
||||||
|
|
||||||
# - name: Install dependencies
|
- name: Install dependencies
|
||||||
# if: steps.changed-files.outputs.any_changed == 'true'
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
# run: pnpm install --frozen-lockfile
|
run: yarn install --frozen-lockfile
|
||||||
|
|
||||||
# - name: Run tests
|
- name: Run tests
|
||||||
# if: steps.changed-files.outputs.any_changed == 'true'
|
if: steps.changed-files.outputs.any_changed == 'true'
|
||||||
# run: pnpm test
|
run: yarn test
|
||||||
|
|||||||
8
.gitignore
vendored
8
.gitignore
vendored
@@ -163,7 +163,6 @@ docker/volumes/db/data/*
|
|||||||
docker/volumes/redis/data/*
|
docker/volumes/redis/data/*
|
||||||
docker/volumes/weaviate/*
|
docker/volumes/weaviate/*
|
||||||
docker/volumes/qdrant/*
|
docker/volumes/qdrant/*
|
||||||
docker/tidb/volumes/*
|
|
||||||
docker/volumes/etcd/*
|
docker/volumes/etcd/*
|
||||||
docker/volumes/minio/*
|
docker/volumes/minio/*
|
||||||
docker/volumes/milvus/*
|
docker/volumes/milvus/*
|
||||||
@@ -176,7 +175,6 @@ docker/volumes/pgvector/data/*
|
|||||||
docker/volumes/pgvecto_rs/data/*
|
docker/volumes/pgvecto_rs/data/*
|
||||||
docker/volumes/couchbase/*
|
docker/volumes/couchbase/*
|
||||||
docker/volumes/oceanbase/*
|
docker/volumes/oceanbase/*
|
||||||
docker/volumes/plugin_daemon/*
|
|
||||||
!docker/volumes/oceanbase/init.d
|
!docker/volumes/oceanbase/init.d
|
||||||
|
|
||||||
docker/nginx/conf.d/default.conf
|
docker/nginx/conf.d/default.conf
|
||||||
@@ -195,9 +193,3 @@ api/.vscode
|
|||||||
|
|
||||||
.idea/
|
.idea/
|
||||||
.vscode
|
.vscode
|
||||||
|
|
||||||
# pnpm
|
|
||||||
/.pnpm-store
|
|
||||||
|
|
||||||
# plugin migrate
|
|
||||||
plugins.jsonl
|
|
||||||
|
|||||||
@@ -73,7 +73,7 @@ Dify requires the following dependencies to build, make sure they're installed o
|
|||||||
* [Docker](https://www.docker.com/)
|
* [Docker](https://www.docker.com/)
|
||||||
* [Docker Compose](https://docs.docker.com/compose/install/)
|
* [Docker Compose](https://docs.docker.com/compose/install/)
|
||||||
* [Node.js v18.x (LTS)](http://nodejs.org)
|
* [Node.js v18.x (LTS)](http://nodejs.org)
|
||||||
* [pnpm](https://pnpm.io/)
|
* [npm](https://www.npmjs.com/) version 8.x.x or [Yarn](https://yarnpkg.com/)
|
||||||
* [Python](https://www.python.org/) version 3.11.x or 3.12.x
|
* [Python](https://www.python.org/) version 3.11.x or 3.12.x
|
||||||
|
|
||||||
### 4. Installations
|
### 4. Installations
|
||||||
|
|||||||
@@ -70,7 +70,7 @@ Dify 依赖以下工具和库:
|
|||||||
- [Docker](https://www.docker.com/)
|
- [Docker](https://www.docker.com/)
|
||||||
- [Docker Compose](https://docs.docker.com/compose/install/)
|
- [Docker Compose](https://docs.docker.com/compose/install/)
|
||||||
- [Node.js v18.x (LTS)](http://nodejs.org)
|
- [Node.js v18.x (LTS)](http://nodejs.org)
|
||||||
- [pnpm](https://pnpm.io/)
|
- [npm](https://www.npmjs.com/) version 8.x.x or [Yarn](https://yarnpkg.com/)
|
||||||
- [Python](https://www.python.org/) version 3.11.x or 3.12.x
|
- [Python](https://www.python.org/) version 3.11.x or 3.12.x
|
||||||
|
|
||||||
### 4. 安装
|
### 4. 安装
|
||||||
|
|||||||
@@ -73,7 +73,7 @@ Dify を構築するには次の依存関係が必要です。それらがシス
|
|||||||
- [Docker](https://www.docker.com/)
|
- [Docker](https://www.docker.com/)
|
||||||
- [Docker Compose](https://docs.docker.com/compose/install/)
|
- [Docker Compose](https://docs.docker.com/compose/install/)
|
||||||
- [Node.js v18.x (LTS)](http://nodejs.org)
|
- [Node.js v18.x (LTS)](http://nodejs.org)
|
||||||
- [pnpm](https://pnpm.io/)
|
- [npm](https://www.npmjs.com/) version 8.x.x or [Yarn](https://yarnpkg.com/)
|
||||||
- [Python](https://www.python.org/) version 3.11.x or 3.12.x
|
- [Python](https://www.python.org/) version 3.11.x or 3.12.x
|
||||||
|
|
||||||
### 4. インストール
|
### 4. インストール
|
||||||
|
|||||||
@@ -72,7 +72,7 @@ Dify yêu cầu các phụ thuộc sau để build, hãy đảm bảo chúng đ
|
|||||||
- [Docker](https://www.docker.com/)
|
- [Docker](https://www.docker.com/)
|
||||||
- [Docker Compose](https://docs.docker.com/compose/install/)
|
- [Docker Compose](https://docs.docker.com/compose/install/)
|
||||||
- [Node.js v18.x (LTS)](http://nodejs.org)
|
- [Node.js v18.x (LTS)](http://nodejs.org)
|
||||||
- [pnpm](https://pnpm.io/)
|
- [npm](https://www.npmjs.com/) phiên bản 8.x.x hoặc [Yarn](https://yarnpkg.com/)
|
||||||
- [Python](https://www.python.org/) phiên bản 3.11.x hoặc 3.12.x
|
- [Python](https://www.python.org/) phiên bản 3.11.x hoặc 3.12.x
|
||||||
|
|
||||||
### 4. Cài đặt
|
### 4. Cài đặt
|
||||||
|
|||||||
23
LICENSE
23
LICENSE
@@ -1,12 +1,12 @@
|
|||||||
# Open Source License
|
# Open Source License
|
||||||
|
|
||||||
Dify is licensed under a modified version of the Apache License 2.0, with the following additional conditions:
|
Dify is licensed under the Apache License 2.0, with the following additional conditions:
|
||||||
|
|
||||||
1. Dify may be utilized commercially, including as a backend service for other applications or as an application development platform for enterprises. Should the conditions below be met, a commercial license must be obtained from the producer:
|
1. Dify may be utilized commercially, including as a backend service for other applications or as an application development platform for enterprises. Should the conditions below be met, a commercial license must be obtained from the producer:
|
||||||
|
|
||||||
a. Multi-tenant service: Unless explicitly authorized by Dify in writing, you may not use the Dify source code to operate a multi-tenant environment.
|
a. Multi-tenant service: Unless explicitly authorized by Dify in writing, you may not use the Dify source code to operate a multi-tenant environment.
|
||||||
- Tenant Definition: Within the context of Dify, one tenant corresponds to one workspace. The workspace provides a separated area for each tenant's data and configurations.
|
- Tenant Definition: Within the context of Dify, one tenant corresponds to one workspace. The workspace provides a separated area for each tenant's data and configurations.
|
||||||
|
|
||||||
b. LOGO and copyright information: In the process of using Dify's frontend, you may not remove or modify the LOGO or copyright information in the Dify console or applications. This restriction is inapplicable to uses of Dify that do not involve its frontend.
|
b. LOGO and copyright information: In the process of using Dify's frontend, you may not remove or modify the LOGO or copyright information in the Dify console or applications. This restriction is inapplicable to uses of Dify that do not involve its frontend.
|
||||||
- Frontend Definition: For the purposes of this license, the "frontend" of Dify includes all components located in the `web/` directory when running Dify from the raw source code, or the "web" image when running Dify with Docker.
|
- Frontend Definition: For the purposes of this license, the "frontend" of Dify includes all components located in the `web/` directory when running Dify from the raw source code, or the "web" image when running Dify with Docker.
|
||||||
|
|
||||||
@@ -21,4 +21,19 @@ Apart from the specific conditions mentioned above, all other rights and restric
|
|||||||
|
|
||||||
The interactive design of this product is protected by appearance patent.
|
The interactive design of this product is protected by appearance patent.
|
||||||
|
|
||||||
© 2025 LangGenius, Inc.
|
© 2024 LangGenius, Inc.
|
||||||
|
|
||||||
|
|
||||||
|
----------
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
|||||||
69
README.md
69
README.md
@@ -25,9 +25,6 @@
|
|||||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||||
alt="follow on X(Twitter)"></a>
|
alt="follow on X(Twitter)"></a>
|
||||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
|
||||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
|
||||||
alt="follow on LinkedIn"></a>
|
|
||||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||||
@@ -108,72 +105,6 @@ Please refer to our [FAQ](https://docs.dify.ai/getting-started/install-self-host
|
|||||||
**7. Backend-as-a-Service**:
|
**7. Backend-as-a-Service**:
|
||||||
All of Dify's offerings come with corresponding APIs, so you could effortlessly integrate Dify into your own business logic.
|
All of Dify's offerings come with corresponding APIs, so you could effortlessly integrate Dify into your own business logic.
|
||||||
|
|
||||||
## Feature Comparison
|
|
||||||
<table style="width: 100%;">
|
|
||||||
<tr>
|
|
||||||
<th align="center">Feature</th>
|
|
||||||
<th align="center">Dify.AI</th>
|
|
||||||
<th align="center">LangChain</th>
|
|
||||||
<th align="center">Flowise</th>
|
|
||||||
<th align="center">OpenAI Assistants API</th>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">Programming Approach</td>
|
|
||||||
<td align="center">API + App-oriented</td>
|
|
||||||
<td align="center">Python Code</td>
|
|
||||||
<td align="center">App-oriented</td>
|
|
||||||
<td align="center">API-oriented</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">Supported LLMs</td>
|
|
||||||
<td align="center">Rich Variety</td>
|
|
||||||
<td align="center">Rich Variety</td>
|
|
||||||
<td align="center">Rich Variety</td>
|
|
||||||
<td align="center">OpenAI-only</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">RAG Engine</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">Agent</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">Workflow</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">Observability</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">Enterprise Feature (SSO/Access control)</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">Local Deployment</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
## Using Dify
|
## Using Dify
|
||||||
|
|
||||||
|
|||||||
@@ -21,9 +21,6 @@
|
|||||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||||
alt="follow on X(Twitter)"></a>
|
alt="follow on X(Twitter)"></a>
|
||||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
|
||||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
|
||||||
alt="follow on LinkedIn"></a>
|
|
||||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||||
|
|||||||
@@ -21,9 +21,6 @@
|
|||||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||||
alt="follow on X(Twitter)"></a>
|
alt="follow on X(Twitter)"></a>
|
||||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
|
||||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
|
||||||
alt="follow on LinkedIn"></a>
|
|
||||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||||
|
|||||||
@@ -21,9 +21,6 @@
|
|||||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||||
alt="seguir en X(Twitter)"></a>
|
alt="seguir en X(Twitter)"></a>
|
||||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
|
||||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
|
||||||
alt="seguir en LinkedIn"></a>
|
|
||||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||||
<img alt="Descargas de Docker" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
<img alt="Descargas de Docker" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||||
|
|||||||
19
README_FR.md
19
README_FR.md
@@ -21,9 +21,6 @@
|
|||||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||||
alt="suivre sur X(Twitter)"></a>
|
alt="suivre sur X(Twitter)"></a>
|
||||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
|
||||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
|
||||||
alt="suivre sur LinkedIn"></a>
|
|
||||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||||
<img alt="Tirages Docker" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
<img alt="Tirages Docker" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||||
@@ -55,7 +52,7 @@
|
|||||||
Dify est une plateforme de développement d'applications LLM open source. Son interface intuitive combine un flux de travail d'IA, un pipeline RAG, des capacités d'agent, une gestion de modèles, des fonctionnalités d'observabilité, et plus encore, vous permettant de passer rapidement du prototype à la production. Voici une liste des fonctionnalités principales:
|
Dify est une plateforme de développement d'applications LLM open source. Son interface intuitive combine un flux de travail d'IA, un pipeline RAG, des capacités d'agent, une gestion de modèles, des fonctionnalités d'observabilité, et plus encore, vous permettant de passer rapidement du prototype à la production. Voici une liste des fonctionnalités principales:
|
||||||
</br> </br>
|
</br> </br>
|
||||||
|
|
||||||
**1. Flux de travail** :
|
**1. Flux de travail**:
|
||||||
Construisez et testez des flux de travail d'IA puissants sur un canevas visuel, en utilisant toutes les fonctionnalités suivantes et plus encore.
|
Construisez et testez des flux de travail d'IA puissants sur un canevas visuel, en utilisant toutes les fonctionnalités suivantes et plus encore.
|
||||||
|
|
||||||
|
|
||||||
@@ -63,25 +60,27 @@ Dify est une plateforme de développement d'applications LLM open source. Son in
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
**2. Prise en charge complète des modèles** :
|
**2. Prise en charge complète des modèles**:
|
||||||
Intégration transparente avec des centaines de LLM propriétaires / open source provenant de dizaines de fournisseurs d'inférence et de solutions auto-hébergées, couvrant GPT, Mistral, Llama3, et tous les modèles compatibles avec l'API OpenAI. Une liste complète des fournisseurs de modèles pris en charge se trouve [ici](https://docs.dify.ai/getting-started/readme/model-providers).
|
Intégration transparente avec des centaines de LLM propriétaires / open source provenant de dizaines de fournisseurs d'inférence et de solutions auto-hébergées, couvrant GPT, Mistral, Llama3, et tous les modèles compatibles avec l'API OpenAI. Une liste complète des fournisseurs de modèles pris en charge se trouve [ici](https://docs.dify.ai/getting-started/readme/model-providers).
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
|
||||||
**3. IDE de prompt** :
|
**3. IDE de prompt**:
|
||||||
Interface intuitive pour créer des prompts, comparer les performances des modèles et ajouter des fonctionnalités supplémentaires telles que la synthèse vocale à une application basée sur des chats.
|
Interface intuitive pour créer des prompts, comparer les performances des modèles et ajouter des fonctionnalités supplémentaires telles que la synthèse vocale à une application basée sur des chats.
|
||||||
|
|
||||||
**4. Pipeline RAG** :
|
**4. Pipeline RAG**:
|
||||||
Des capacités RAG étendues qui couvrent tout, de l'ingestion de documents à la récupération, avec un support prêt à l'emploi pour l'extraction de texte à partir de PDF, PPT et autres formats de document courants.
|
Des capacités RAG étendues qui couvrent tout, de l'ingestion de documents à la récupération, avec un support prêt à l'emploi pour l'extraction de texte à partir de PDF, PPT et autres formats de document courants.
|
||||||
|
|
||||||
**5. Capacités d'agent** :
|
**5. Capac
|
||||||
|
|
||||||
|
ités d'agent**:
|
||||||
Vous pouvez définir des agents basés sur l'appel de fonction LLM ou ReAct, et ajouter des outils pré-construits ou personnalisés pour l'agent. Dify fournit plus de 50 outils intégrés pour les agents d'IA, tels que la recherche Google, DALL·E, Stable Diffusion et WolframAlpha.
|
Vous pouvez définir des agents basés sur l'appel de fonction LLM ou ReAct, et ajouter des outils pré-construits ou personnalisés pour l'agent. Dify fournit plus de 50 outils intégrés pour les agents d'IA, tels que la recherche Google, DALL·E, Stable Diffusion et WolframAlpha.
|
||||||
|
|
||||||
**6. LLMOps** :
|
**6. LLMOps**:
|
||||||
Surveillez et analysez les journaux d'application et les performances au fil du temps. Vous pouvez continuellement améliorer les prompts, les ensembles de données et les modèles en fonction des données de production et des annotations.
|
Surveillez et analysez les journaux d'application et les performances au fil du temps. Vous pouvez continuellement améliorer les prompts, les ensembles de données et les modèles en fonction des données de production et des annotations.
|
||||||
|
|
||||||
**7. Backend-as-a-Service** :
|
**7. Backend-as-a-Service**:
|
||||||
Toutes les offres de Dify sont accompagnées d'API correspondantes, vous permettant d'intégrer facilement Dify dans votre propre logique métier.
|
Toutes les offres de Dify sont accompagnées d'API correspondantes, vous permettant d'intégrer facilement Dify dans votre propre logique métier.
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -21,9 +21,6 @@
|
|||||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||||
alt="X(Twitter)でフォロー"></a>
|
alt="X(Twitter)でフォロー"></a>
|
||||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
|
||||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
|
||||||
alt="LinkedInでフォロー"></a>
|
|
||||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||||
@@ -164,7 +161,7 @@ DifyはオープンソースのLLMアプリケーション開発プラットフ
|
|||||||
|
|
||||||
- **企業/組織向けのDify</br>**
|
- **企業/組織向けのDify</br>**
|
||||||
企業中心の機能を提供しています。[メールを送信](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)して企業のニーズについて相談してください。 </br>
|
企業中心の機能を提供しています。[メールを送信](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)して企業のニーズについて相談してください。 </br>
|
||||||
> AWSを使用しているスタートアップ企業や中小企業の場合は、[AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t23mebxzwjhu6)のDify Premiumをチェックして、ワンクリックで自分のAWS VPCにデプロイできます。さらに、手頃な価格のAMIオファリングとして、ロゴやブランディングをカスタマイズしてアプリケーションを作成するオプションがあります。
|
> AWSを使用しているスタートアップ企業や中小企業の場合は、[AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6)のDify Premiumをチェックして、ワンクリックで自分のAWS VPCにデプロイできます。さらに、手頃な価格のAMIオファリングどして、ロゴやブランディングをカスタマイズしてアプリケーションを作成するオプションがあります。
|
||||||
|
|
||||||
|
|
||||||
## 最新の情報を入手
|
## 最新の情報を入手
|
||||||
|
|||||||
@@ -21,9 +21,6 @@
|
|||||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||||
alt="follow on X(Twitter)"></a>
|
alt="follow on X(Twitter)"></a>
|
||||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
|
||||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
|
||||||
alt="follow on LinkedIn"></a>
|
|
||||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||||
@@ -87,7 +84,9 @@ Dify is an open-source LLM app development platform. Its intuitive interface com
|
|||||||
|
|
||||||
## Feature Comparison
|
## Feature Comparison
|
||||||
<table style="width: 100%;">
|
<table style="width: 100%;">
|
||||||
<tr>
|
<tr
|
||||||
|
|
||||||
|
>
|
||||||
<th align="center">Feature</th>
|
<th align="center">Feature</th>
|
||||||
<th align="center">Dify.AI</th>
|
<th align="center">Dify.AI</th>
|
||||||
<th align="center">LangChain</th>
|
<th align="center">LangChain</th>
|
||||||
|
|||||||
@@ -21,9 +21,6 @@
|
|||||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||||
alt="follow on X(Twitter)"></a>
|
alt="follow on X(Twitter)"></a>
|
||||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
|
||||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
|
||||||
alt="follow on LinkedIn"></a>
|
|
||||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||||
|
|||||||
@@ -25,9 +25,6 @@
|
|||||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||||
alt="follow on X(Twitter)"></a>
|
alt="follow on X(Twitter)"></a>
|
||||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
|
||||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
|
||||||
alt="follow on LinkedIn"></a>
|
|
||||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||||
|
|||||||
72
README_SI.md
72
README_SI.md
@@ -22,9 +22,6 @@
|
|||||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||||
alt="follow on X(Twitter)"></a>
|
alt="follow on X(Twitter)"></a>
|
||||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
|
||||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
|
||||||
alt="follow on LinkedIn"></a>
|
|
||||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||||
@@ -106,73 +103,6 @@ Prosimo, glejte naša pogosta vprašanja [FAQ](https://docs.dify.ai/getting-star
|
|||||||
**7. Backend-as-a-Service**:
|
**7. Backend-as-a-Service**:
|
||||||
AVse ponudbe Difyja so opremljene z ustreznimi API-ji, tako da lahko Dify brez težav integrirate v svojo poslovno logiko.
|
AVse ponudbe Difyja so opremljene z ustreznimi API-ji, tako da lahko Dify brez težav integrirate v svojo poslovno logiko.
|
||||||
|
|
||||||
## Primerjava Funkcij
|
|
||||||
|
|
||||||
<table style="width: 100%;">
|
|
||||||
<tr>
|
|
||||||
<th align="center">Funkcija</th>
|
|
||||||
<th align="center">Dify.AI</th>
|
|
||||||
<th align="center">LangChain</th>
|
|
||||||
<th align="center">Flowise</th>
|
|
||||||
<th align="center">OpenAI Assistants API</th>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">Programski pristop</td>
|
|
||||||
<td align="center">API + usmerjeno v aplikacije</td>
|
|
||||||
<td align="center">Python koda</td>
|
|
||||||
<td align="center">Usmerjeno v aplikacije</td>
|
|
||||||
<td align="center">Usmerjeno v API</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">Podprti LLM-ji</td>
|
|
||||||
<td align="center">Bogata izbira</td>
|
|
||||||
<td align="center">Bogata izbira</td>
|
|
||||||
<td align="center">Bogata izbira</td>
|
|
||||||
<td align="center">Samo OpenAI</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">RAG pogon</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">Agent</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">Potek dela</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">Spremljanje</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">Funkcija za podjetja (SSO/nadzor dostopa)</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">Lokalna namestitev</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">✅</td>
|
|
||||||
<td align="center">❌</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
## Uporaba Dify
|
## Uporaba Dify
|
||||||
|
|
||||||
@@ -254,4 +184,4 @@ Zaradi zaščite vaše zasebnosti se izogibajte objavljanju varnostnih vprašanj
|
|||||||
|
|
||||||
## Licenca
|
## Licenca
|
||||||
|
|
||||||
To skladišče je na voljo pod [odprtokodno licenco Dify](LICENSE) , ki je v bistvu Apache 2.0 z nekaj dodatnimi omejitvami.
|
To skladišče je na voljo pod [odprtokodno licenco Dify](LICENSE) , ki je v bistvu Apache 2.0 z nekaj dodatnimi omejitvami.
|
||||||
@@ -21,9 +21,6 @@
|
|||||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||||
alt="X(Twitter)'da takip et"></a>
|
alt="X(Twitter)'da takip et"></a>
|
||||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
|
||||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
|
||||||
alt="LinkedIn'da takip et"></a>
|
|
||||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||||
<img alt="Docker Çekmeleri" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
<img alt="Docker Çekmeleri" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||||
@@ -65,6 +62,8 @@ Görsel bir arayüz üzerinde güçlü AI iş akışları oluşturun ve test edi
|
|||||||

|

|
||||||
|
|
||||||
|
|
||||||
|
Özür dilerim, haklısınız. Daha anlamlı ve akıcı bir çeviri yapmaya çalışayım. İşte güncellenmiş çeviri:
|
||||||
|
|
||||||
**3. Prompt IDE**:
|
**3. Prompt IDE**:
|
||||||
Komut istemlerini oluşturmak, model performansını karşılaştırmak ve sohbet tabanlı uygulamalara metin-konuşma gibi ek özellikler eklemek için kullanıcı dostu bir arayüz.
|
Komut istemlerini oluşturmak, model performansını karşılaştırmak ve sohbet tabanlı uygulamalara metin-konuşma gibi ek özellikler eklemek için kullanıcı dostu bir arayüz.
|
||||||
|
|
||||||
@@ -151,6 +150,8 @@ Görsel bir arayüz üzerinde güçlü AI iş akışları oluşturun ve test edi
|
|||||||
## Dify'ı Kullanma
|
## Dify'ı Kullanma
|
||||||
|
|
||||||
- **Cloud </br>**
|
- **Cloud </br>**
|
||||||
|
İşte verdiğiniz metnin Türkçe çevirisi, kod bloğu içinde:
|
||||||
|
-
|
||||||
Herkesin sıfır kurulumla denemesi için bir [Dify Cloud](https://dify.ai) hizmeti sunuyoruz. Bu hizmet, kendi kendine dağıtılan versiyonun tüm yeteneklerini sağlar ve sandbox planında 200 ücretsiz GPT-4 çağrısı içerir.
|
Herkesin sıfır kurulumla denemesi için bir [Dify Cloud](https://dify.ai) hizmeti sunuyoruz. Bu hizmet, kendi kendine dağıtılan versiyonun tüm yeteneklerini sağlar ve sandbox planında 200 ücretsiz GPT-4 çağrısı içerir.
|
||||||
|
|
||||||
- **Dify Topluluk Sürümünü Kendi Sunucunuzda Barındırma</br>**
|
- **Dify Topluluk Sürümünü Kendi Sunucunuzda Barındırma</br>**
|
||||||
@@ -176,6 +177,8 @@ GitHub'da Dify'a yıldız verin ve yeni sürümlerden anında haberdar olun.
|
|||||||
>- RAM >= 4GB
|
>- RAM >= 4GB
|
||||||
|
|
||||||
</br>
|
</br>
|
||||||
|
İşte verdiğiniz metnin Türkçe çevirisi, kod bloğu içinde:
|
||||||
|
|
||||||
Dify sunucusunu başlatmanın en kolay yolu, [docker-compose.yml](docker/docker-compose.yaml) dosyamızı çalıştırmaktır. Kurulum komutunu çalıştırmadan önce, makinenizde [Docker](https://docs.docker.com/get-docker/) ve [Docker Compose](https://docs.docker.com/compose/install/)'un kurulu olduğundan emin olun:
|
Dify sunucusunu başlatmanın en kolay yolu, [docker-compose.yml](docker/docker-compose.yaml) dosyamızı çalıştırmaktır. Kurulum komutunu çalıştırmadan önce, makinenizde [Docker](https://docs.docker.com/get-docker/) ve [Docker Compose](https://docs.docker.com/compose/install/)'un kurulu olduğundan emin olun:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
|||||||
@@ -21,9 +21,6 @@
|
|||||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||||
alt="theo dõi trên X(Twitter)"></a>
|
alt="theo dõi trên X(Twitter)"></a>
|
||||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
|
||||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
|
||||||
alt="theo dõi trên LinkedIn"></a>
|
|
||||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||||
|
|||||||
@@ -1,10 +1,7 @@
|
|||||||
.env
|
.env
|
||||||
*.env.*
|
*.env.*
|
||||||
|
|
||||||
storage/generate_files/*
|
|
||||||
storage/privkeys/*
|
storage/privkeys/*
|
||||||
storage/tools/*
|
|
||||||
storage/upload_files/*
|
|
||||||
|
|
||||||
# Logs
|
# Logs
|
||||||
logs
|
logs
|
||||||
@@ -12,8 +9,6 @@ logs
|
|||||||
|
|
||||||
# jetbrains
|
# jetbrains
|
||||||
.idea
|
.idea
|
||||||
.mypy_cache
|
|
||||||
.ruff_cache
|
|
||||||
|
|
||||||
# venv
|
# venv
|
||||||
.venv
|
.venv
|
||||||
@@ -23,9 +23,6 @@ FILES_ACCESS_TIMEOUT=300
|
|||||||
# Access token expiration time in minutes
|
# Access token expiration time in minutes
|
||||||
ACCESS_TOKEN_EXPIRE_MINUTES=60
|
ACCESS_TOKEN_EXPIRE_MINUTES=60
|
||||||
|
|
||||||
# Refresh token expiration time in days
|
|
||||||
REFRESH_TOKEN_EXPIRE_DAYS=30
|
|
||||||
|
|
||||||
# celery configuration
|
# celery configuration
|
||||||
CELERY_BROKER_URL=redis://:difyai123456@localhost:6379/1
|
CELERY_BROKER_URL=redis://:difyai123456@localhost:6379/1
|
||||||
|
|
||||||
@@ -68,7 +65,7 @@ OPENDAL_FS_ROOT=storage
|
|||||||
|
|
||||||
# S3 Storage configuration
|
# S3 Storage configuration
|
||||||
S3_USE_AWS_MANAGED_IAM=false
|
S3_USE_AWS_MANAGED_IAM=false
|
||||||
S3_ENDPOINT=https://your-bucket-name.storage.s3.cloudflare.com
|
S3_ENDPOINT=https://your-bucket-name.storage.s3.clooudflare.com
|
||||||
S3_BUCKET_NAME=your-bucket-name
|
S3_BUCKET_NAME=your-bucket-name
|
||||||
S3_ACCESS_KEY=your-access-key
|
S3_ACCESS_KEY=your-access-key
|
||||||
S3_SECRET_KEY=your-secret-key
|
S3_SECRET_KEY=your-secret-key
|
||||||
@@ -77,7 +74,7 @@ S3_REGION=your-region
|
|||||||
# Azure Blob Storage configuration
|
# Azure Blob Storage configuration
|
||||||
AZURE_BLOB_ACCOUNT_NAME=your-account-name
|
AZURE_BLOB_ACCOUNT_NAME=your-account-name
|
||||||
AZURE_BLOB_ACCOUNT_KEY=your-account-key
|
AZURE_BLOB_ACCOUNT_KEY=your-account-key
|
||||||
AZURE_BLOB_CONTAINER_NAME=your-container-name
|
AZURE_BLOB_CONTAINER_NAME=yout-container-name
|
||||||
AZURE_BLOB_ACCOUNT_URL=https://<your_account_name>.blob.core.windows.net
|
AZURE_BLOB_ACCOUNT_URL=https://<your_account_name>.blob.core.windows.net
|
||||||
|
|
||||||
# Aliyun oss Storage configuration
|
# Aliyun oss Storage configuration
|
||||||
@@ -91,7 +88,7 @@ ALIYUN_OSS_REGION=your-region
|
|||||||
ALIYUN_OSS_PATH=your-path
|
ALIYUN_OSS_PATH=your-path
|
||||||
|
|
||||||
# Google Storage configuration
|
# Google Storage configuration
|
||||||
GOOGLE_STORAGE_BUCKET_NAME=your-bucket-name
|
GOOGLE_STORAGE_BUCKET_NAME=yout-bucket-name
|
||||||
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64=your-google-service-account-json-base64-string
|
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64=your-google-service-account-json-base64-string
|
||||||
|
|
||||||
# Tencent COS Storage configuration
|
# Tencent COS Storage configuration
|
||||||
@@ -402,13 +399,13 @@ INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=4000
|
|||||||
WORKFLOW_MAX_EXECUTION_STEPS=500
|
WORKFLOW_MAX_EXECUTION_STEPS=500
|
||||||
WORKFLOW_MAX_EXECUTION_TIME=1200
|
WORKFLOW_MAX_EXECUTION_TIME=1200
|
||||||
WORKFLOW_CALL_MAX_DEPTH=5
|
WORKFLOW_CALL_MAX_DEPTH=5
|
||||||
WORKFLOW_PARALLEL_DEPTH_LIMIT=3
|
|
||||||
MAX_VARIABLE_SIZE=204800
|
MAX_VARIABLE_SIZE=204800
|
||||||
|
|
||||||
# App configuration
|
# App configuration
|
||||||
APP_MAX_EXECUTION_TIME=1200
|
APP_MAX_EXECUTION_TIME=1200
|
||||||
APP_MAX_ACTIVE_REQUESTS=0
|
APP_MAX_ACTIVE_REQUESTS=0
|
||||||
|
|
||||||
|
|
||||||
# Celery beat configuration
|
# Celery beat configuration
|
||||||
CELERY_BEAT_SCHEDULER_TIME=1
|
CELERY_BEAT_SCHEDULER_TIME=1
|
||||||
|
|
||||||
@@ -421,22 +418,6 @@ POSITION_PROVIDER_PINS=
|
|||||||
POSITION_PROVIDER_INCLUDES=
|
POSITION_PROVIDER_INCLUDES=
|
||||||
POSITION_PROVIDER_EXCLUDES=
|
POSITION_PROVIDER_EXCLUDES=
|
||||||
|
|
||||||
# Plugin configuration
|
|
||||||
PLUGIN_DAEMON_KEY=lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi
|
|
||||||
PLUGIN_DAEMON_URL=http://127.0.0.1:5002
|
|
||||||
PLUGIN_REMOTE_INSTALL_PORT=5003
|
|
||||||
PLUGIN_REMOTE_INSTALL_HOST=localhost
|
|
||||||
PLUGIN_MAX_PACKAGE_SIZE=15728640
|
|
||||||
INNER_API_KEY=QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1
|
|
||||||
INNER_API_KEY_FOR_PLUGIN=QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1
|
|
||||||
|
|
||||||
# Marketplace configuration
|
|
||||||
MARKETPLACE_ENABLED=true
|
|
||||||
MARKETPLACE_API_URL=https://marketplace.dify.ai
|
|
||||||
|
|
||||||
# Endpoint configuration
|
|
||||||
ENDPOINT_URL_TEMPLATE=http://localhost:5002/e/{hook_id}
|
|
||||||
|
|
||||||
# Reset password token expiry minutes
|
# Reset password token expiry minutes
|
||||||
RESET_PASSWORD_TOKEN_EXPIRY_MINUTES=5
|
RESET_PASSWORD_TOKEN_EXPIRY_MINUTES=5
|
||||||
|
|
||||||
|
|||||||
@@ -53,12 +53,10 @@ ignore = [
|
|||||||
"FURB152", # math-constant
|
"FURB152", # math-constant
|
||||||
"UP007", # non-pep604-annotation
|
"UP007", # non-pep604-annotation
|
||||||
"UP032", # f-string
|
"UP032", # f-string
|
||||||
"UP045", # non-pep604-annotation-optional
|
|
||||||
"B005", # strip-with-multi-characters
|
"B005", # strip-with-multi-characters
|
||||||
"B006", # mutable-argument-default
|
"B006", # mutable-argument-default
|
||||||
"B007", # unused-loop-control-variable
|
"B007", # unused-loop-control-variable
|
||||||
"B026", # star-arg-unpacking-after-keyword-arg
|
"B026", # star-arg-unpacking-after-keyword-arg
|
||||||
"B903", # class-as-data-structure
|
|
||||||
"B904", # raise-without-from-inside-except
|
"B904", # raise-without-from-inside-except
|
||||||
"B905", # zip-without-explicit-strict
|
"B905", # zip-without-explicit-strict
|
||||||
"N806", # non-lowercase-variable-in-function
|
"N806", # non-lowercase-variable-in-function
|
||||||
@@ -69,7 +67,7 @@ ignore = [
|
|||||||
"SIM105", # suppressible-exception
|
"SIM105", # suppressible-exception
|
||||||
"SIM107", # return-in-try-except-finally
|
"SIM107", # return-in-try-except-finally
|
||||||
"SIM108", # if-else-block-instead-of-if-exp
|
"SIM108", # if-else-block-instead-of-if-exp
|
||||||
"SIM113", # enumerate-for-loop
|
"SIM113", # eumerate-for-loop
|
||||||
"SIM117", # multiple-with-statements
|
"SIM117", # multiple-with-statements
|
||||||
"SIM210", # if-expr-with-true-false
|
"SIM210", # if-expr-with-true-false
|
||||||
]
|
]
|
||||||
@@ -87,11 +85,11 @@ ignore = [
|
|||||||
]
|
]
|
||||||
"tests/*" = [
|
"tests/*" = [
|
||||||
"F811", # redefined-while-unused
|
"F811", # redefined-while-unused
|
||||||
|
"F401", # unused-import
|
||||||
]
|
]
|
||||||
|
|
||||||
[lint.pyflakes]
|
[lint.pyflakes]
|
||||||
allowed-unused-imports = [
|
extend-generics = [
|
||||||
"_pytest.monkeypatch",
|
"_pytest.monkeypatch",
|
||||||
"tests.integration_tests",
|
"tests.integration_tests",
|
||||||
"tests.unit_tests",
|
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ FROM python:3.12-slim-bookworm AS base
|
|||||||
WORKDIR /app/api
|
WORKDIR /app/api
|
||||||
|
|
||||||
# Install Poetry
|
# Install Poetry
|
||||||
ENV POETRY_VERSION=2.0.1
|
ENV POETRY_VERSION=1.8.4
|
||||||
|
|
||||||
# if you located in China, you can use aliyun mirror to speed up
|
# if you located in China, you can use aliyun mirror to speed up
|
||||||
# RUN pip install --no-cache-dir poetry==${POETRY_VERSION} -i https://mirrors.aliyun.com/pypi/simple/
|
# RUN pip install --no-cache-dir poetry==${POETRY_VERSION} -i https://mirrors.aliyun.com/pypi/simple/
|
||||||
@@ -48,20 +48,16 @@ ENV TZ=UTC
|
|||||||
|
|
||||||
WORKDIR /app/api
|
WORKDIR /app/api
|
||||||
|
|
||||||
RUN \
|
RUN apt-get update \
|
||||||
apt-get update \
|
&& apt-get install -y --no-install-recommends curl nodejs libgmp-dev libmpfr-dev libmpc-dev \
|
||||||
# Install dependencies
|
# if you located in China, you can use aliyun mirror to speed up
|
||||||
&& apt-get install -y --no-install-recommends \
|
# && echo "deb http://mirrors.aliyun.com/debian testing main" > /etc/apt/sources.list \
|
||||||
# basic environment
|
&& echo "deb http://deb.debian.org/debian testing main" > /etc/apt/sources.list \
|
||||||
curl nodejs libgmp-dev libmpfr-dev libmpc-dev \
|
&& apt-get update \
|
||||||
# For Security
|
# For Security
|
||||||
expat libldap-2.5-0 perl libsqlite3-0 zlib1g \
|
&& apt-get install -y --no-install-recommends expat=2.6.4-1 libldap-2.5-0=2.5.18+dfsg-3+b1 perl=5.40.0-8 libsqlite3-0=3.46.1-1 zlib1g=1:1.3.dfsg+really1.3.1-1+b1 \
|
||||||
# install a chinese font to support the use of tools like matplotlib
|
# install a chinese font to support the use of tools like matplotlib
|
||||||
fonts-noto-cjk \
|
&& apt-get install -y fonts-noto-cjk \
|
||||||
# install a package to improve the accuracy of guessing mime type and file extension
|
|
||||||
media-types \
|
|
||||||
# install libmagic to support the use of python-magic guess MIMETYPE
|
|
||||||
libmagic1 \
|
|
||||||
&& apt-get autoremove -y \
|
&& apt-get autoremove -y \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
@@ -73,10 +69,6 @@ ENV PATH="${VIRTUAL_ENV}/bin:${PATH}"
|
|||||||
# Download nltk data
|
# Download nltk data
|
||||||
RUN python -c "import nltk; nltk.download('punkt'); nltk.download('averaged_perceptron_tagger')"
|
RUN python -c "import nltk; nltk.download('punkt'); nltk.download('averaged_perceptron_tagger')"
|
||||||
|
|
||||||
ENV TIKTOKEN_CACHE_DIR=/app/api/.tiktoken_cache
|
|
||||||
|
|
||||||
RUN python -c "import tiktoken; tiktoken.encoding_for_model('gpt2')"
|
|
||||||
|
|
||||||
# Copy source code
|
# Copy source code
|
||||||
COPY . /app/api/
|
COPY . /app/api/
|
||||||
|
|
||||||
@@ -84,6 +76,7 @@ COPY . /app/api/
|
|||||||
COPY docker/entrypoint.sh /entrypoint.sh
|
COPY docker/entrypoint.sh /entrypoint.sh
|
||||||
RUN chmod +x /entrypoint.sh
|
RUN chmod +x /entrypoint.sh
|
||||||
|
|
||||||
|
|
||||||
ARG COMMIT_SHA
|
ARG COMMIT_SHA
|
||||||
ENV COMMIT_SHA=${COMMIT_SHA}
|
ENV COMMIT_SHA=${COMMIT_SHA}
|
||||||
|
|
||||||
|
|||||||
@@ -37,13 +37,7 @@
|
|||||||
|
|
||||||
4. Create environment.
|
4. Create environment.
|
||||||
|
|
||||||
Dify API service uses [Poetry](https://python-poetry.org/docs/) to manage dependencies. First, you need to add the poetry shell plugin, if you don't have it already, in order to run in a virtual environment. [Note: Poetry shell is no longer a native command so you need to install the poetry plugin beforehand]
|
Dify API service uses [Poetry](https://python-poetry.org/docs/) to manage dependencies. You can execute `poetry shell` to activate the environment.
|
||||||
|
|
||||||
```bash
|
|
||||||
poetry self add poetry-plugin-shell
|
|
||||||
```
|
|
||||||
|
|
||||||
Then, You can execute `poetry shell` to activate the environment.
|
|
||||||
|
|
||||||
5. Install dependencies
|
5. Install dependencies
|
||||||
|
|
||||||
@@ -85,5 +79,5 @@
|
|||||||
2. Run the tests locally with mocked system environment variables in `tool.pytest_env` section in `pyproject.toml`
|
2. Run the tests locally with mocked system environment variables in `tool.pytest_env` section in `pyproject.toml`
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
poetry run -P api bash dev/pytest/pytest_all_tests.sh
|
poetry run -C api bash dev/pytest/pytest_all_tests.sh
|
||||||
```
|
```
|
||||||
|
|||||||
29
api/app.py
29
api/app.py
@@ -1,8 +1,12 @@
|
|||||||
import os
|
from libs import version_utils
|
||||||
import sys
|
|
||||||
|
# preparation before creating app
|
||||||
|
version_utils.check_supported_python_version()
|
||||||
|
|
||||||
|
|
||||||
def is_db_command():
|
def is_db_command():
|
||||||
|
import sys
|
||||||
|
|
||||||
if len(sys.argv) > 1 and sys.argv[0].endswith("flask") and sys.argv[1] == "db":
|
if len(sys.argv) > 1 and sys.argv[0].endswith("flask") and sys.argv[1] == "db":
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
@@ -14,25 +18,10 @@ if is_db_command():
|
|||||||
|
|
||||||
app = create_migrations_app()
|
app = create_migrations_app()
|
||||||
else:
|
else:
|
||||||
# It seems that JetBrains Python debugger does not work well with gevent,
|
|
||||||
# so we need to disable gevent in debug mode.
|
|
||||||
# If you are using debugpy and set GEVENT_SUPPORT=True, you can debug with gevent.
|
|
||||||
if (flask_debug := os.environ.get("FLASK_DEBUG", "0")) and flask_debug.lower() in {"false", "0", "no"}:
|
|
||||||
from gevent import monkey # type: ignore
|
|
||||||
|
|
||||||
# gevent
|
|
||||||
monkey.patch_all()
|
|
||||||
|
|
||||||
from grpc.experimental import gevent as grpc_gevent # type: ignore
|
|
||||||
|
|
||||||
# grpc gevent
|
|
||||||
grpc_gevent.init_gevent()
|
|
||||||
|
|
||||||
import psycogreen.gevent # type: ignore
|
|
||||||
|
|
||||||
psycogreen.gevent.patch_psycopg()
|
|
||||||
|
|
||||||
from app_factory import create_app
|
from app_factory import create_app
|
||||||
|
from libs import threadings_utils
|
||||||
|
|
||||||
|
threadings_utils.apply_gevent_threading_patch()
|
||||||
|
|
||||||
app = create_app()
|
app = create_app()
|
||||||
celery = app.extensions["celery"]
|
celery = app.extensions["celery"]
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ import logging
|
|||||||
import time
|
import time
|
||||||
|
|
||||||
from configs import dify_config
|
from configs import dify_config
|
||||||
from contexts.wrapper import RecyclableContextVar
|
|
||||||
from dify_app import DifyApp
|
from dify_app import DifyApp
|
||||||
|
|
||||||
|
|
||||||
@@ -17,12 +16,6 @@ def create_flask_app_with_configs() -> DifyApp:
|
|||||||
dify_app = DifyApp(__name__)
|
dify_app = DifyApp(__name__)
|
||||||
dify_app.config.from_mapping(dify_config.model_dump())
|
dify_app.config.from_mapping(dify_config.model_dump())
|
||||||
|
|
||||||
# add before request hook
|
|
||||||
@dify_app.before_request
|
|
||||||
def before_request():
|
|
||||||
# add an unique identifier to each request
|
|
||||||
RecyclableContextVar.increment_thread_recycles()
|
|
||||||
|
|
||||||
return dify_app
|
return dify_app
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
101
api/commands.py
101
api/commands.py
@@ -25,8 +25,6 @@ from models.dataset import Document as DatasetDocument
|
|||||||
from models.model import Account, App, AppAnnotationSetting, AppMode, Conversation, MessageAnnotation
|
from models.model import Account, App, AppAnnotationSetting, AppMode, Conversation, MessageAnnotation
|
||||||
from models.provider import Provider, ProviderModel
|
from models.provider import Provider, ProviderModel
|
||||||
from services.account_service import RegisterService, TenantService
|
from services.account_service import RegisterService, TenantService
|
||||||
from services.plugin.data_migration import PluginDataMigration
|
|
||||||
from services.plugin.plugin_migration import PluginMigration
|
|
||||||
|
|
||||||
|
|
||||||
@click.command("reset-password", help="Reset the account password.")
|
@click.command("reset-password", help="Reset the account password.")
|
||||||
@@ -161,7 +159,8 @@ def migrate_annotation_vector_database():
|
|||||||
try:
|
try:
|
||||||
# get apps info
|
# get apps info
|
||||||
apps = (
|
apps = (
|
||||||
App.query.filter(App.status == "normal")
|
db.session.query(App)
|
||||||
|
.filter(App.status == "normal")
|
||||||
.order_by(App.created_at.desc())
|
.order_by(App.created_at.desc())
|
||||||
.paginate(page=page, per_page=50)
|
.paginate(page=page, per_page=50)
|
||||||
)
|
)
|
||||||
@@ -286,7 +285,8 @@ def migrate_knowledge_vector_database():
|
|||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
datasets = (
|
datasets = (
|
||||||
Dataset.query.filter(Dataset.indexing_technique == "high_quality")
|
db.session.query(Dataset)
|
||||||
|
.filter(Dataset.indexing_technique == "high_quality")
|
||||||
.order_by(Dataset.created_at.desc())
|
.order_by(Dataset.created_at.desc())
|
||||||
.paginate(page=page, per_page=50)
|
.paginate(page=page, per_page=50)
|
||||||
)
|
)
|
||||||
@@ -450,8 +450,7 @@ def convert_to_agent_apps():
|
|||||||
if app_id not in proceeded_app_ids:
|
if app_id not in proceeded_app_ids:
|
||||||
proceeded_app_ids.append(app_id)
|
proceeded_app_ids.append(app_id)
|
||||||
app = db.session.query(App).filter(App.id == app_id).first()
|
app = db.session.query(App).filter(App.id == app_id).first()
|
||||||
if app is not None:
|
apps.append(app)
|
||||||
apps.append(app)
|
|
||||||
|
|
||||||
if len(apps) == 0:
|
if len(apps) == 0:
|
||||||
break
|
break
|
||||||
@@ -526,7 +525,7 @@ def add_qdrant_doc_id_index(field: str):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
except Exception:
|
except Exception as e:
|
||||||
click.echo(click.style("Failed to create Qdrant client.", fg="red"))
|
click.echo(click.style("Failed to create Qdrant client.", fg="red"))
|
||||||
|
|
||||||
click.echo(click.style(f"Index creation complete. Created {create_count} collection indexes.", fg="green"))
|
click.echo(click.style(f"Index creation complete. Created {create_count} collection indexes.", fg="green"))
|
||||||
@@ -556,20 +555,14 @@ def create_tenant(email: str, language: Optional[str] = None, name: Optional[str
|
|||||||
if language not in languages:
|
if language not in languages:
|
||||||
language = "en-US"
|
language = "en-US"
|
||||||
|
|
||||||
# Validates name encoding for non-Latin characters.
|
name = name.strip()
|
||||||
name = name.strip().encode("utf-8").decode("utf-8") if name else None
|
|
||||||
|
|
||||||
# generate random password
|
# generate random password
|
||||||
new_password = secrets.token_urlsafe(16)
|
new_password = secrets.token_urlsafe(16)
|
||||||
|
|
||||||
# register account
|
# register account
|
||||||
account = RegisterService.register(
|
account = RegisterService.register(email=email, name=account_name, password=new_password, language=language)
|
||||||
email=email,
|
|
||||||
name=account_name,
|
|
||||||
password=new_password,
|
|
||||||
language=language,
|
|
||||||
create_workspace_required=False,
|
|
||||||
)
|
|
||||||
TenantService.create_owner_tenant_if_not_exist(account, name)
|
TenantService.create_owner_tenant_if_not_exist(account, name)
|
||||||
|
|
||||||
click.echo(
|
click.echo(
|
||||||
@@ -589,13 +582,13 @@ def upgrade_db():
|
|||||||
click.echo(click.style("Starting database migration.", fg="green"))
|
click.echo(click.style("Starting database migration.", fg="green"))
|
||||||
|
|
||||||
# run db migration
|
# run db migration
|
||||||
import flask_migrate # type: ignore
|
import flask_migrate
|
||||||
|
|
||||||
flask_migrate.upgrade()
|
flask_migrate.upgrade()
|
||||||
|
|
||||||
click.echo(click.style("Database migration successful!", fg="green"))
|
click.echo(click.style("Database migration successful!", fg="green"))
|
||||||
|
|
||||||
except Exception:
|
except Exception as e:
|
||||||
logging.exception("Failed to execute database migration")
|
logging.exception("Failed to execute database migration")
|
||||||
finally:
|
finally:
|
||||||
lock.release()
|
lock.release()
|
||||||
@@ -627,10 +620,6 @@ where sites.id is null limit 1000"""
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
app = db.session.query(App).filter(App.id == app_id).first()
|
app = db.session.query(App).filter(App.id == app_id).first()
|
||||||
if not app:
|
|
||||||
print(f"App {app_id} not found")
|
|
||||||
continue
|
|
||||||
|
|
||||||
tenant = app.tenant
|
tenant = app.tenant
|
||||||
if tenant:
|
if tenant:
|
||||||
accounts = tenant.get_accounts()
|
accounts = tenant.get_accounts()
|
||||||
@@ -641,7 +630,7 @@ where sites.id is null limit 1000"""
|
|||||||
account = accounts[0]
|
account = accounts[0]
|
||||||
print("Fixing missing site for app {}".format(app.id))
|
print("Fixing missing site for app {}".format(app.id))
|
||||||
app_was_created.send(app, account=account)
|
app_was_created.send(app, account=account)
|
||||||
except Exception:
|
except Exception as e:
|
||||||
failed_app_ids.append(app_id)
|
failed_app_ids.append(app_id)
|
||||||
click.echo(click.style("Failed to fix missing site for app {}".format(app_id), fg="red"))
|
click.echo(click.style("Failed to fix missing site for app {}".format(app_id), fg="red"))
|
||||||
logging.exception(f"Failed to fix app related site missing issue, app_id: {app_id}")
|
logging.exception(f"Failed to fix app related site missing issue, app_id: {app_id}")
|
||||||
@@ -651,69 +640,3 @@ where sites.id is null limit 1000"""
|
|||||||
break
|
break
|
||||||
|
|
||||||
click.echo(click.style("Fix for missing app-related sites completed successfully!", fg="green"))
|
click.echo(click.style("Fix for missing app-related sites completed successfully!", fg="green"))
|
||||||
|
|
||||||
|
|
||||||
@click.command("migrate-data-for-plugin", help="Migrate data for plugin.")
|
|
||||||
def migrate_data_for_plugin():
|
|
||||||
"""
|
|
||||||
Migrate data for plugin.
|
|
||||||
"""
|
|
||||||
click.echo(click.style("Starting migrate data for plugin.", fg="white"))
|
|
||||||
|
|
||||||
PluginDataMigration.migrate()
|
|
||||||
|
|
||||||
click.echo(click.style("Migrate data for plugin completed.", fg="green"))
|
|
||||||
|
|
||||||
|
|
||||||
@click.command("extract-plugins", help="Extract plugins.")
|
|
||||||
@click.option("--output_file", prompt=True, help="The file to store the extracted plugins.", default="plugins.jsonl")
|
|
||||||
@click.option("--workers", prompt=True, help="The number of workers to extract plugins.", default=10)
|
|
||||||
def extract_plugins(output_file: str, workers: int):
|
|
||||||
"""
|
|
||||||
Extract plugins.
|
|
||||||
"""
|
|
||||||
click.echo(click.style("Starting extract plugins.", fg="white"))
|
|
||||||
|
|
||||||
PluginMigration.extract_plugins(output_file, workers)
|
|
||||||
|
|
||||||
click.echo(click.style("Extract plugins completed.", fg="green"))
|
|
||||||
|
|
||||||
|
|
||||||
@click.command("extract-unique-identifiers", help="Extract unique identifiers.")
|
|
||||||
@click.option(
|
|
||||||
"--output_file",
|
|
||||||
prompt=True,
|
|
||||||
help="The file to store the extracted unique identifiers.",
|
|
||||||
default="unique_identifiers.json",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--input_file", prompt=True, help="The file to store the extracted unique identifiers.", default="plugins.jsonl"
|
|
||||||
)
|
|
||||||
def extract_unique_plugins(output_file: str, input_file: str):
|
|
||||||
"""
|
|
||||||
Extract unique plugins.
|
|
||||||
"""
|
|
||||||
click.echo(click.style("Starting extract unique plugins.", fg="white"))
|
|
||||||
|
|
||||||
PluginMigration.extract_unique_plugins_to_file(input_file, output_file)
|
|
||||||
|
|
||||||
click.echo(click.style("Extract unique plugins completed.", fg="green"))
|
|
||||||
|
|
||||||
|
|
||||||
@click.command("install-plugins", help="Install plugins.")
|
|
||||||
@click.option(
|
|
||||||
"--input_file", prompt=True, help="The file to store the extracted unique identifiers.", default="plugins.jsonl"
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--output_file", prompt=True, help="The file to store the installed plugins.", default="installed_plugins.jsonl"
|
|
||||||
)
|
|
||||||
@click.option("--workers", prompt=True, help="The number of workers to install plugins.", default=100)
|
|
||||||
def install_plugins(input_file: str, output_file: str, workers: int):
|
|
||||||
"""
|
|
||||||
Install plugins.
|
|
||||||
"""
|
|
||||||
click.echo(click.style("Starting install plugins.", fg="white"))
|
|
||||||
|
|
||||||
PluginMigration.install_plugins(input_file, output_file, workers)
|
|
||||||
|
|
||||||
click.echo(click.style("Install plugins completed.", fg="green"))
|
|
||||||
|
|||||||
@@ -134,60 +134,6 @@ class CodeExecutionSandboxConfig(BaseSettings):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class PluginConfig(BaseSettings):
|
|
||||||
"""
|
|
||||||
Plugin configs
|
|
||||||
"""
|
|
||||||
|
|
||||||
PLUGIN_DAEMON_URL: HttpUrl = Field(
|
|
||||||
description="Plugin API URL",
|
|
||||||
default="http://localhost:5002",
|
|
||||||
)
|
|
||||||
|
|
||||||
PLUGIN_DAEMON_KEY: str = Field(
|
|
||||||
description="Plugin API key",
|
|
||||||
default="plugin-api-key",
|
|
||||||
)
|
|
||||||
|
|
||||||
INNER_API_KEY_FOR_PLUGIN: str = Field(description="Inner api key for plugin", default="inner-api-key")
|
|
||||||
|
|
||||||
PLUGIN_REMOTE_INSTALL_HOST: str = Field(
|
|
||||||
description="Plugin Remote Install Host",
|
|
||||||
default="localhost",
|
|
||||||
)
|
|
||||||
|
|
||||||
PLUGIN_REMOTE_INSTALL_PORT: PositiveInt = Field(
|
|
||||||
description="Plugin Remote Install Port",
|
|
||||||
default=5003,
|
|
||||||
)
|
|
||||||
|
|
||||||
PLUGIN_MAX_PACKAGE_SIZE: PositiveInt = Field(
|
|
||||||
description="Maximum allowed size for plugin packages in bytes",
|
|
||||||
default=15728640,
|
|
||||||
)
|
|
||||||
|
|
||||||
PLUGIN_MAX_BUNDLE_SIZE: PositiveInt = Field(
|
|
||||||
description="Maximum allowed size for plugin bundles in bytes",
|
|
||||||
default=15728640 * 12,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class MarketplaceConfig(BaseSettings):
|
|
||||||
"""
|
|
||||||
Configuration for marketplace
|
|
||||||
"""
|
|
||||||
|
|
||||||
MARKETPLACE_ENABLED: bool = Field(
|
|
||||||
description="Enable or disable marketplace",
|
|
||||||
default=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
MARKETPLACE_API_URL: HttpUrl = Field(
|
|
||||||
description="Marketplace API URL",
|
|
||||||
default="https://marketplace.dify.ai",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class EndpointConfig(BaseSettings):
|
class EndpointConfig(BaseSettings):
|
||||||
"""
|
"""
|
||||||
Configuration for various application endpoints and URLs
|
Configuration for various application endpoints and URLs
|
||||||
@@ -200,7 +146,7 @@ class EndpointConfig(BaseSettings):
|
|||||||
)
|
)
|
||||||
|
|
||||||
CONSOLE_WEB_URL: str = Field(
|
CONSOLE_WEB_URL: str = Field(
|
||||||
description="Base URL for the console web interface,used for frontend references and CORS configuration",
|
description="Base URL for the console web interface," "used for frontend references and CORS configuration",
|
||||||
default="",
|
default="",
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -214,10 +160,6 @@ class EndpointConfig(BaseSettings):
|
|||||||
default="",
|
default="",
|
||||||
)
|
)
|
||||||
|
|
||||||
ENDPOINT_URL_TEMPLATE: str = Field(
|
|
||||||
description="Template url for endpoint plugin", default="http://localhost:5002/e/{hook_id}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class FileAccessConfig(BaseSettings):
|
class FileAccessConfig(BaseSettings):
|
||||||
"""
|
"""
|
||||||
@@ -297,6 +239,7 @@ class HttpConfig(BaseSettings):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@computed_field
|
@computed_field
|
||||||
|
@property
|
||||||
def CONSOLE_CORS_ALLOW_ORIGINS(self) -> list[str]:
|
def CONSOLE_CORS_ALLOW_ORIGINS(self) -> list[str]:
|
||||||
return self.inner_CONSOLE_CORS_ALLOW_ORIGINS.split(",")
|
return self.inner_CONSOLE_CORS_ALLOW_ORIGINS.split(",")
|
||||||
|
|
||||||
@@ -307,6 +250,7 @@ class HttpConfig(BaseSettings):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@computed_field
|
@computed_field
|
||||||
|
@property
|
||||||
def WEB_API_CORS_ALLOW_ORIGINS(self) -> list[str]:
|
def WEB_API_CORS_ALLOW_ORIGINS(self) -> list[str]:
|
||||||
return self.inner_WEB_API_CORS_ALLOW_ORIGINS.split(",")
|
return self.inner_WEB_API_CORS_ALLOW_ORIGINS.split(",")
|
||||||
|
|
||||||
@@ -373,8 +317,8 @@ class HttpConfig(BaseSettings):
|
|||||||
)
|
)
|
||||||
|
|
||||||
RESPECT_XFORWARD_HEADERS_ENABLED: bool = Field(
|
RESPECT_XFORWARD_HEADERS_ENABLED: bool = Field(
|
||||||
description="Enable handling of X-Forwarded-For, X-Forwarded-Proto, and X-Forwarded-Port headers"
|
description="Enable or disable the X-Forwarded-For Proxy Fix middleware from Werkzeug"
|
||||||
" when the app is behind a single trusted reverse proxy.",
|
" to respect X-* headers to redirect clients",
|
||||||
default=False,
|
default=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -489,11 +433,6 @@ class WorkflowConfig(BaseSettings):
|
|||||||
default=5,
|
default=5,
|
||||||
)
|
)
|
||||||
|
|
||||||
WORKFLOW_PARALLEL_DEPTH_LIMIT: PositiveInt = Field(
|
|
||||||
description="Maximum allowed depth for nested parallel executions",
|
|
||||||
default=3,
|
|
||||||
)
|
|
||||||
|
|
||||||
MAX_VARIABLE_SIZE: PositiveInt = Field(
|
MAX_VARIABLE_SIZE: PositiveInt = Field(
|
||||||
description="Maximum size in bytes for a single variable in workflows. Default to 200 KB.",
|
description="Maximum size in bytes for a single variable in workflows. Default to 200 KB.",
|
||||||
default=200 * 1024,
|
default=200 * 1024,
|
||||||
@@ -546,21 +485,11 @@ class AuthConfig(BaseSettings):
|
|||||||
default=60,
|
default=60,
|
||||||
)
|
)
|
||||||
|
|
||||||
REFRESH_TOKEN_EXPIRE_DAYS: PositiveFloat = Field(
|
|
||||||
description="Expiration time for refresh tokens in days",
|
|
||||||
default=30,
|
|
||||||
)
|
|
||||||
|
|
||||||
LOGIN_LOCKOUT_DURATION: PositiveInt = Field(
|
LOGIN_LOCKOUT_DURATION: PositiveInt = Field(
|
||||||
description="Time (in seconds) a user must wait before retrying login after exceeding the rate limit.",
|
description="Time (in seconds) a user must wait before retrying login after exceeding the rate limit.",
|
||||||
default=86400,
|
default=86400,
|
||||||
)
|
)
|
||||||
|
|
||||||
FORGOT_PASSWORD_LOCKOUT_DURATION: PositiveInt = Field(
|
|
||||||
description="Time (in seconds) a user must wait before retrying password reset after exceeding the rate limit.",
|
|
||||||
default=86400,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ModerationConfig(BaseSettings):
|
class ModerationConfig(BaseSettings):
|
||||||
"""
|
"""
|
||||||
@@ -669,7 +598,7 @@ class RagEtlConfig(BaseSettings):
|
|||||||
|
|
||||||
UNSTRUCTURED_API_KEY: Optional[str] = Field(
|
UNSTRUCTURED_API_KEY: Optional[str] = Field(
|
||||||
description="API key for Unstructured.io service",
|
description="API key for Unstructured.io service",
|
||||||
default="",
|
default=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
SCARF_NO_ANALYTICS: Optional[str] = Field(
|
SCARF_NO_ANALYTICS: Optional[str] = Field(
|
||||||
@@ -735,11 +664,6 @@ class IndexingConfig(BaseSettings):
|
|||||||
default=4000,
|
default=4000,
|
||||||
)
|
)
|
||||||
|
|
||||||
CHILD_CHUNKS_PREVIEW_NUMBER: PositiveInt = Field(
|
|
||||||
description="Maximum number of child chunks to preview",
|
|
||||||
default=50,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class MultiModalTransferConfig(BaseSettings):
|
class MultiModalTransferConfig(BaseSettings):
|
||||||
MULTIMODAL_SEND_FORMAT: Literal["base64", "url"] = Field(
|
MULTIMODAL_SEND_FORMAT: Literal["base64", "url"] = Field(
|
||||||
@@ -786,27 +710,27 @@ class PositionConfig(BaseSettings):
|
|||||||
default="",
|
default="",
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@computed_field
|
||||||
def POSITION_PROVIDER_PINS_LIST(self) -> list[str]:
|
def POSITION_PROVIDER_PINS_LIST(self) -> list[str]:
|
||||||
return [item.strip() for item in self.POSITION_PROVIDER_PINS.split(",") if item.strip() != ""]
|
return [item.strip() for item in self.POSITION_PROVIDER_PINS.split(",") if item.strip() != ""]
|
||||||
|
|
||||||
@property
|
@computed_field
|
||||||
def POSITION_PROVIDER_INCLUDES_SET(self) -> set[str]:
|
def POSITION_PROVIDER_INCLUDES_SET(self) -> set[str]:
|
||||||
return {item.strip() for item in self.POSITION_PROVIDER_INCLUDES.split(",") if item.strip() != ""}
|
return {item.strip() for item in self.POSITION_PROVIDER_INCLUDES.split(",") if item.strip() != ""}
|
||||||
|
|
||||||
@property
|
@computed_field
|
||||||
def POSITION_PROVIDER_EXCLUDES_SET(self) -> set[str]:
|
def POSITION_PROVIDER_EXCLUDES_SET(self) -> set[str]:
|
||||||
return {item.strip() for item in self.POSITION_PROVIDER_EXCLUDES.split(",") if item.strip() != ""}
|
return {item.strip() for item in self.POSITION_PROVIDER_EXCLUDES.split(",") if item.strip() != ""}
|
||||||
|
|
||||||
@property
|
@computed_field
|
||||||
def POSITION_TOOL_PINS_LIST(self) -> list[str]:
|
def POSITION_TOOL_PINS_LIST(self) -> list[str]:
|
||||||
return [item.strip() for item in self.POSITION_TOOL_PINS.split(",") if item.strip() != ""]
|
return [item.strip() for item in self.POSITION_TOOL_PINS.split(",") if item.strip() != ""]
|
||||||
|
|
||||||
@property
|
@computed_field
|
||||||
def POSITION_TOOL_INCLUDES_SET(self) -> set[str]:
|
def POSITION_TOOL_INCLUDES_SET(self) -> set[str]:
|
||||||
return {item.strip() for item in self.POSITION_TOOL_INCLUDES.split(",") if item.strip() != ""}
|
return {item.strip() for item in self.POSITION_TOOL_INCLUDES.split(",") if item.strip() != ""}
|
||||||
|
|
||||||
@property
|
@computed_field
|
||||||
def POSITION_TOOL_EXCLUDES_SET(self) -> set[str]:
|
def POSITION_TOOL_EXCLUDES_SET(self) -> set[str]:
|
||||||
return {item.strip() for item in self.POSITION_TOOL_EXCLUDES.split(",") if item.strip() != ""}
|
return {item.strip() for item in self.POSITION_TOOL_EXCLUDES.split(",") if item.strip() != ""}
|
||||||
|
|
||||||
@@ -838,21 +762,12 @@ class LoginConfig(BaseSettings):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class AccountConfig(BaseSettings):
|
|
||||||
ACCOUNT_DELETION_TOKEN_EXPIRY_MINUTES: PositiveInt = Field(
|
|
||||||
description="Duration in minutes for which a account deletion token remains valid",
|
|
||||||
default=5,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class FeatureConfig(
|
class FeatureConfig(
|
||||||
# place the configs in alphabet order
|
# place the configs in alphabet order
|
||||||
AppExecutionConfig,
|
AppExecutionConfig,
|
||||||
AuthConfig, # Changed from OAuthConfig to AuthConfig
|
AuthConfig, # Changed from OAuthConfig to AuthConfig
|
||||||
BillingConfig,
|
BillingConfig,
|
||||||
CodeExecutionSandboxConfig,
|
CodeExecutionSandboxConfig,
|
||||||
PluginConfig,
|
|
||||||
MarketplaceConfig,
|
|
||||||
DataSetConfig,
|
DataSetConfig,
|
||||||
EndpointConfig,
|
EndpointConfig,
|
||||||
FileAccessConfig,
|
FileAccessConfig,
|
||||||
@@ -874,7 +789,6 @@ class FeatureConfig(
|
|||||||
WorkflowNodeExecutionConfig,
|
WorkflowNodeExecutionConfig,
|
||||||
WorkspaceConfig,
|
WorkspaceConfig,
|
||||||
LoginConfig,
|
LoginConfig,
|
||||||
AccountConfig,
|
|
||||||
# hosted services config
|
# hosted services config
|
||||||
HostedServiceConfig,
|
HostedServiceConfig,
|
||||||
CeleryBeatConfig,
|
CeleryBeatConfig,
|
||||||
|
|||||||
@@ -1,40 +1,9 @@
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import Field, NonNegativeInt, computed_field
|
from pydantic import Field, NonNegativeInt
|
||||||
from pydantic_settings import BaseSettings
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
class HostedCreditConfig(BaseSettings):
|
|
||||||
HOSTED_MODEL_CREDIT_CONFIG: str = Field(
|
|
||||||
description="Model credit configuration in format 'model:credits,model:credits', e.g., 'gpt-4:20,gpt-4o:10'",
|
|
||||||
default="",
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_model_credits(self, model_name: str) -> int:
|
|
||||||
"""
|
|
||||||
Get credit value for a specific model name.
|
|
||||||
Returns 1 if model is not found in configuration (default credit).
|
|
||||||
|
|
||||||
:param model_name: The name of the model to search for
|
|
||||||
:return: The credit value for the model
|
|
||||||
"""
|
|
||||||
if not self.HOSTED_MODEL_CREDIT_CONFIG:
|
|
||||||
return 1
|
|
||||||
|
|
||||||
try:
|
|
||||||
credit_map = dict(
|
|
||||||
item.strip().split(":", 1) for item in self.HOSTED_MODEL_CREDIT_CONFIG.split(",") if ":" in item
|
|
||||||
)
|
|
||||||
|
|
||||||
# Search for matching model pattern
|
|
||||||
for pattern, credit in credit_map.items():
|
|
||||||
if pattern.strip() == model_name:
|
|
||||||
return int(credit)
|
|
||||||
return 1 # Default quota if no match found
|
|
||||||
except (ValueError, AttributeError):
|
|
||||||
return 1 # Return default quota if parsing fails
|
|
||||||
|
|
||||||
|
|
||||||
class HostedOpenAiConfig(BaseSettings):
|
class HostedOpenAiConfig(BaseSettings):
|
||||||
"""
|
"""
|
||||||
Configuration for hosted OpenAI service
|
Configuration for hosted OpenAI service
|
||||||
@@ -212,7 +181,7 @@ class HostedFetchAppTemplateConfig(BaseSettings):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
HOSTED_FETCH_APP_TEMPLATES_MODE: str = Field(
|
HOSTED_FETCH_APP_TEMPLATES_MODE: str = Field(
|
||||||
description="Mode for fetching app templates: remote, db, or builtin default to remote,",
|
description="Mode for fetching app templates: remote, db, or builtin" " default to remote,",
|
||||||
default="remote",
|
default="remote",
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -233,7 +202,5 @@ class HostedServiceConfig(
|
|||||||
HostedZhipuAIConfig,
|
HostedZhipuAIConfig,
|
||||||
# moderation
|
# moderation
|
||||||
HostedModerationConfig,
|
HostedModerationConfig,
|
||||||
# credit config
|
|
||||||
HostedCreditConfig,
|
|
||||||
):
|
):
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import os
|
|
||||||
from typing import Any, Literal, Optional
|
from typing import Any, Literal, Optional
|
||||||
from urllib.parse import quote_plus
|
from urllib.parse import quote_plus
|
||||||
|
|
||||||
@@ -131,6 +130,7 @@ class DatabaseConfig(BaseSettings):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@computed_field
|
@computed_field
|
||||||
|
@property
|
||||||
def SQLALCHEMY_DATABASE_URI(self) -> str:
|
def SQLALCHEMY_DATABASE_URI(self) -> str:
|
||||||
db_extras = (
|
db_extras = (
|
||||||
f"{self.DB_EXTRAS}&client_encoding={self.DB_CHARSET}" if self.DB_CHARSET else self.DB_EXTRAS
|
f"{self.DB_EXTRAS}&client_encoding={self.DB_CHARSET}" if self.DB_CHARSET else self.DB_EXTRAS
|
||||||
@@ -167,12 +167,8 @@ class DatabaseConfig(BaseSettings):
|
|||||||
default=False,
|
default=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
RETRIEVAL_SERVICE_EXECUTORS: NonNegativeInt = Field(
|
|
||||||
description="Number of processes for the retrieval service, default to CPU cores.",
|
|
||||||
default=os.cpu_count(),
|
|
||||||
)
|
|
||||||
|
|
||||||
@computed_field
|
@computed_field
|
||||||
|
@property
|
||||||
def SQLALCHEMY_ENGINE_OPTIONS(self) -> dict[str, Any]:
|
def SQLALCHEMY_ENGINE_OPTIONS(self) -> dict[str, Any]:
|
||||||
return {
|
return {
|
||||||
"pool_size": self.SQLALCHEMY_POOL_SIZE,
|
"pool_size": self.SQLALCHEMY_POOL_SIZE,
|
||||||
@@ -210,6 +206,7 @@ class CeleryConfig(DatabaseConfig):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@computed_field
|
@computed_field
|
||||||
|
@property
|
||||||
def CELERY_RESULT_BACKEND(self) -> str | None:
|
def CELERY_RESULT_BACKEND(self) -> str | None:
|
||||||
return (
|
return (
|
||||||
"db+{}".format(self.SQLALCHEMY_DATABASE_URI)
|
"db+{}".format(self.SQLALCHEMY_DATABASE_URI)
|
||||||
@@ -217,6 +214,7 @@ class CeleryConfig(DatabaseConfig):
|
|||||||
else self.CELERY_BROKER_URL
|
else self.CELERY_BROKER_URL
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@computed_field
|
||||||
@property
|
@property
|
||||||
def BROKER_USE_SSL(self) -> bool:
|
def BROKER_USE_SSL(self) -> bool:
|
||||||
return self.CELERY_BROKER_URL.startswith("rediss://") if self.CELERY_BROKER_URL else False
|
return self.CELERY_BROKER_URL.startswith("rediss://") if self.CELERY_BROKER_URL else False
|
||||||
|
|||||||
@@ -33,9 +33,3 @@ class MilvusConfig(BaseSettings):
|
|||||||
description="Name of the Milvus database to connect to (default is 'default')",
|
description="Name of the Milvus database to connect to (default is 'default')",
|
||||||
default="default",
|
default="default",
|
||||||
)
|
)
|
||||||
|
|
||||||
MILVUS_ENABLE_HYBRID_SEARCH: bool = Field(
|
|
||||||
description="Enable hybrid search features (requires Milvus >= 2.5.0). Set to false for compatibility with "
|
|
||||||
"older versions",
|
|
||||||
default=True,
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ class PackagingInfo(BaseSettings):
|
|||||||
|
|
||||||
CURRENT_VERSION: str = Field(
|
CURRENT_VERSION: str = Field(
|
||||||
description="Dify version",
|
description="Dify version",
|
||||||
default="1.0.0",
|
default="0.14.1",
|
||||||
)
|
)
|
||||||
|
|
||||||
COMMIT_SHA: str = Field(
|
COMMIT_SHA: str = Field(
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ import logging
|
|||||||
import os
|
import os
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
from collections.abc import Mapping
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from .python_3x import http_request, makedirs_wrapper
|
from .python_3x import http_request, makedirs_wrapper
|
||||||
@@ -256,8 +255,8 @@ class ApolloClient:
|
|||||||
logger.info("stopped, long_poll")
|
logger.info("stopped, long_poll")
|
||||||
|
|
||||||
# add the need for endorsement to the header
|
# add the need for endorsement to the header
|
||||||
def _sign_headers(self, url: str) -> Mapping[str, str]:
|
def _sign_headers(self, url):
|
||||||
headers: dict[str, str] = {}
|
headers = {}
|
||||||
if self.secret == "":
|
if self.secret == "":
|
||||||
return headers
|
return headers
|
||||||
uri = url[len(self.config_url) : len(url)]
|
uri = url[len(self.config_url) : len(url)]
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ AUDIO_EXTENSIONS.extend([ext.upper() for ext in AUDIO_EXTENSIONS])
|
|||||||
|
|
||||||
if dify_config.ETL_TYPE == "Unstructured":
|
if dify_config.ETL_TYPE == "Unstructured":
|
||||||
DOCUMENT_EXTENSIONS = ["txt", "markdown", "md", "mdx", "pdf", "html", "htm", "xlsx", "xls"]
|
DOCUMENT_EXTENSIONS = ["txt", "markdown", "md", "mdx", "pdf", "html", "htm", "xlsx", "xls"]
|
||||||
DOCUMENT_EXTENSIONS.extend(("doc", "docx", "csv", "eml", "msg", "pptx", "xml", "epub"))
|
DOCUMENT_EXTENSIONS.extend(("docx", "csv", "eml", "msg", "pptx", "xml", "epub"))
|
||||||
if dify_config.UNSTRUCTURED_API_URL:
|
if dify_config.UNSTRUCTURED_API_URL:
|
||||||
DOCUMENT_EXTENSIONS.append("ppt")
|
DOCUMENT_EXTENSIONS.append("ppt")
|
||||||
DOCUMENT_EXTENSIONS.extend([ext.upper() for ext in DOCUMENT_EXTENSIONS])
|
DOCUMENT_EXTENSIONS.extend([ext.upper() for ext in DOCUMENT_EXTENSIONS])
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
import json
|
import json
|
||||||
from collections.abc import Mapping
|
|
||||||
|
|
||||||
from models.model import AppMode
|
from models.model import AppMode
|
||||||
|
|
||||||
default_app_templates: Mapping[AppMode, Mapping] = {
|
default_app_templates = {
|
||||||
# workflow default mode
|
# workflow default mode
|
||||||
AppMode.WORKFLOW: {
|
AppMode.WORKFLOW: {
|
||||||
"app": {
|
"app": {
|
||||||
|
|||||||
@@ -1,30 +1,9 @@
|
|||||||
from contextvars import ContextVar
|
from contextvars import ContextVar
|
||||||
from threading import Lock
|
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from contexts.wrapper import RecyclableContextVar
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from core.plugin.entities.plugin_daemon import PluginModelProviderEntity
|
|
||||||
from core.tools.plugin_tool.provider import PluginToolProviderController
|
|
||||||
from core.workflow.entities.variable_pool import VariablePool
|
from core.workflow.entities.variable_pool import VariablePool
|
||||||
|
|
||||||
|
|
||||||
tenant_id: ContextVar[str] = ContextVar("tenant_id")
|
tenant_id: ContextVar[str] = ContextVar("tenant_id")
|
||||||
|
|
||||||
workflow_variable_pool: ContextVar["VariablePool"] = ContextVar("workflow_variable_pool")
|
workflow_variable_pool: ContextVar["VariablePool"] = ContextVar("workflow_variable_pool")
|
||||||
|
|
||||||
"""
|
|
||||||
To avoid race-conditions caused by gunicorn thread recycling, using RecyclableContextVar to replace with
|
|
||||||
"""
|
|
||||||
plugin_tool_providers: RecyclableContextVar[dict[str, "PluginToolProviderController"]] = RecyclableContextVar(
|
|
||||||
ContextVar("plugin_tool_providers")
|
|
||||||
)
|
|
||||||
plugin_tool_providers_lock: RecyclableContextVar[Lock] = RecyclableContextVar(ContextVar("plugin_tool_providers_lock"))
|
|
||||||
|
|
||||||
plugin_model_providers: RecyclableContextVar[list["PluginModelProviderEntity"] | None] = RecyclableContextVar(
|
|
||||||
ContextVar("plugin_model_providers")
|
|
||||||
)
|
|
||||||
plugin_model_providers_lock: RecyclableContextVar[Lock] = RecyclableContextVar(
|
|
||||||
ContextVar("plugin_model_providers_lock")
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -1,65 +0,0 @@
|
|||||||
from contextvars import ContextVar
|
|
||||||
from typing import Generic, TypeVar
|
|
||||||
|
|
||||||
T = TypeVar("T")
|
|
||||||
|
|
||||||
|
|
||||||
class HiddenValue:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
_default = HiddenValue()
|
|
||||||
|
|
||||||
|
|
||||||
class RecyclableContextVar(Generic[T]):
|
|
||||||
"""
|
|
||||||
RecyclableContextVar is a wrapper around ContextVar
|
|
||||||
It's safe to use in gunicorn with thread recycling, but features like `reset` are not available for now
|
|
||||||
|
|
||||||
NOTE: you need to call `increment_thread_recycles` before requests
|
|
||||||
"""
|
|
||||||
|
|
||||||
_thread_recycles: ContextVar[int] = ContextVar("thread_recycles")
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def increment_thread_recycles(cls):
|
|
||||||
try:
|
|
||||||
recycles = cls._thread_recycles.get()
|
|
||||||
cls._thread_recycles.set(recycles + 1)
|
|
||||||
except LookupError:
|
|
||||||
cls._thread_recycles.set(0)
|
|
||||||
|
|
||||||
def __init__(self, context_var: ContextVar[T]):
|
|
||||||
self._context_var = context_var
|
|
||||||
self._updates = ContextVar[int](context_var.name + "_updates", default=0)
|
|
||||||
|
|
||||||
def get(self, default: T | HiddenValue = _default) -> T:
|
|
||||||
thread_recycles = self._thread_recycles.get(0)
|
|
||||||
self_updates = self._updates.get()
|
|
||||||
if thread_recycles > self_updates:
|
|
||||||
self._updates.set(thread_recycles)
|
|
||||||
|
|
||||||
# check if thread is recycled and should be updated
|
|
||||||
if thread_recycles < self_updates:
|
|
||||||
return self._context_var.get()
|
|
||||||
else:
|
|
||||||
# thread_recycles >= self_updates, means current context is invalid
|
|
||||||
if isinstance(default, HiddenValue) or default is _default:
|
|
||||||
raise LookupError
|
|
||||||
else:
|
|
||||||
return default
|
|
||||||
|
|
||||||
def set(self, value: T):
|
|
||||||
# it leads to a situation that self.updates is less than cls.thread_recycles if `set` was never called before
|
|
||||||
# increase it manually
|
|
||||||
thread_recycles = self._thread_recycles.get(0)
|
|
||||||
self_updates = self._updates.get()
|
|
||||||
if thread_recycles > self_updates:
|
|
||||||
self._updates.set(thread_recycles)
|
|
||||||
|
|
||||||
if self._updates.get() == self._thread_recycles.get(0):
|
|
||||||
# after increment,
|
|
||||||
self._updates.set(self._updates.get() + 1)
|
|
||||||
|
|
||||||
# set the context
|
|
||||||
self._context_var.set(value)
|
|
||||||
@@ -4,8 +4,3 @@ from werkzeug.exceptions import HTTPException
|
|||||||
class FilenameNotExistsError(HTTPException):
|
class FilenameNotExistsError(HTTPException):
|
||||||
code = 400
|
code = 400
|
||||||
description = "The specified filename does not exist."
|
description = "The specified filename does not exist."
|
||||||
|
|
||||||
|
|
||||||
class RemoteFileUploadError(HTTPException):
|
|
||||||
code = 400
|
|
||||||
description = "Error uploading remote file."
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from flask_restful import fields # type: ignore
|
from flask_restful import fields
|
||||||
|
|
||||||
parameters__system_parameters = {
|
parameters__system_parameters = {
|
||||||
"image_file_size_limit": fields.Integer,
|
"image_file_size_limit": fields.Integer,
|
||||||
|
|||||||
@@ -1,32 +1,12 @@
|
|||||||
import mimetypes
|
import mimetypes
|
||||||
import os
|
import os
|
||||||
import platform
|
|
||||||
import re
|
import re
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
import warnings
|
|
||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
|
|
||||||
try:
|
|
||||||
import magic
|
|
||||||
except ImportError:
|
|
||||||
if platform.system() == "Windows":
|
|
||||||
warnings.warn(
|
|
||||||
"To use python-magic guess MIMETYPE, you need to run `pip install python-magic-bin`", stacklevel=2
|
|
||||||
)
|
|
||||||
elif platform.system() == "Darwin":
|
|
||||||
warnings.warn("To use python-magic guess MIMETYPE, you need to run `brew install libmagic`", stacklevel=2)
|
|
||||||
elif platform.system() == "Linux":
|
|
||||||
warnings.warn(
|
|
||||||
"To use python-magic guess MIMETYPE, you need to run `sudo apt-get install libmagic1`", stacklevel=2
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
warnings.warn("To use python-magic guess MIMETYPE, you need to install `libmagic`", stacklevel=2)
|
|
||||||
magic = None # type: ignore
|
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
|
||||||
from configs import dify_config
|
from configs import dify_config
|
||||||
@@ -67,13 +47,6 @@ def guess_file_info_from_response(response: httpx.Response):
|
|||||||
# If guessing fails, use Content-Type from response headers
|
# If guessing fails, use Content-Type from response headers
|
||||||
mimetype = response.headers.get("Content-Type", "application/octet-stream")
|
mimetype = response.headers.get("Content-Type", "application/octet-stream")
|
||||||
|
|
||||||
# Use python-magic to guess MIME type if still unknown or generic
|
|
||||||
if mimetype == "application/octet-stream" and magic is not None:
|
|
||||||
try:
|
|
||||||
mimetype = magic.from_buffer(response.content[:1024], mime=True)
|
|
||||||
except magic.MagicException:
|
|
||||||
pass
|
|
||||||
|
|
||||||
extension = os.path.splitext(filename)[1]
|
extension = os.path.splitext(filename)[1]
|
||||||
|
|
||||||
# Ensure filename has an extension
|
# Ensure filename has an extension
|
||||||
|
|||||||
@@ -2,26 +2,7 @@ from flask import Blueprint
|
|||||||
|
|
||||||
from libs.external_api import ExternalApi
|
from libs.external_api import ExternalApi
|
||||||
|
|
||||||
from .app.app_import import AppImportApi, AppImportCheckDependenciesApi, AppImportConfirmApi
|
from .app.app_import import AppImportApi, AppImportConfirmApi
|
||||||
from .explore.audio import ChatAudioApi, ChatTextApi
|
|
||||||
from .explore.completion import ChatApi, ChatStopApi, CompletionApi, CompletionStopApi
|
|
||||||
from .explore.conversation import (
|
|
||||||
ConversationApi,
|
|
||||||
ConversationListApi,
|
|
||||||
ConversationPinApi,
|
|
||||||
ConversationRenameApi,
|
|
||||||
ConversationUnPinApi,
|
|
||||||
)
|
|
||||||
from .explore.message import (
|
|
||||||
MessageFeedbackApi,
|
|
||||||
MessageListApi,
|
|
||||||
MessageMoreLikeThisApi,
|
|
||||||
MessageSuggestedQuestionApi,
|
|
||||||
)
|
|
||||||
from .explore.workflow import (
|
|
||||||
InstalledAppWorkflowRunApi,
|
|
||||||
InstalledAppWorkflowTaskStopApi,
|
|
||||||
)
|
|
||||||
from .files import FileApi, FilePreviewApi, FileSupportTypeApi
|
from .files import FileApi, FilePreviewApi, FileSupportTypeApi
|
||||||
from .remote_files import RemoteFileInfoApi, RemoteFileUploadApi
|
from .remote_files import RemoteFileInfoApi, RemoteFileUploadApi
|
||||||
|
|
||||||
@@ -40,7 +21,6 @@ api.add_resource(RemoteFileUploadApi, "/remote-files/upload")
|
|||||||
# Import App
|
# Import App
|
||||||
api.add_resource(AppImportApi, "/apps/imports")
|
api.add_resource(AppImportApi, "/apps/imports")
|
||||||
api.add_resource(AppImportConfirmApi, "/apps/imports/<string:import_id>/confirm")
|
api.add_resource(AppImportConfirmApi, "/apps/imports/<string:import_id>/confirm")
|
||||||
api.add_resource(AppImportCheckDependenciesApi, "/apps/imports/<string:app_id>/check-dependencies")
|
|
||||||
|
|
||||||
# Import other controllers
|
# Import other controllers
|
||||||
from . import admin, apikey, extension, feature, ping, setup, version
|
from . import admin, apikey, extension, feature, ping, setup, version
|
||||||
@@ -86,96 +66,19 @@ from .datasets import (
|
|||||||
|
|
||||||
# Import explore controllers
|
# Import explore controllers
|
||||||
from .explore import (
|
from .explore import (
|
||||||
|
audio,
|
||||||
|
completion,
|
||||||
|
conversation,
|
||||||
installed_app,
|
installed_app,
|
||||||
|
message,
|
||||||
parameter,
|
parameter,
|
||||||
recommended_app,
|
recommended_app,
|
||||||
saved_message,
|
saved_message,
|
||||||
)
|
workflow,
|
||||||
|
|
||||||
# Explore Audio
|
|
||||||
api.add_resource(ChatAudioApi, "/installed-apps/<uuid:installed_app_id>/audio-to-text", endpoint="installed_app_audio")
|
|
||||||
api.add_resource(ChatTextApi, "/installed-apps/<uuid:installed_app_id>/text-to-audio", endpoint="installed_app_text")
|
|
||||||
|
|
||||||
# Explore Completion
|
|
||||||
api.add_resource(
|
|
||||||
CompletionApi, "/installed-apps/<uuid:installed_app_id>/completion-messages", endpoint="installed_app_completion"
|
|
||||||
)
|
|
||||||
api.add_resource(
|
|
||||||
CompletionStopApi,
|
|
||||||
"/installed-apps/<uuid:installed_app_id>/completion-messages/<string:task_id>/stop",
|
|
||||||
endpoint="installed_app_stop_completion",
|
|
||||||
)
|
|
||||||
api.add_resource(
|
|
||||||
ChatApi, "/installed-apps/<uuid:installed_app_id>/chat-messages", endpoint="installed_app_chat_completion"
|
|
||||||
)
|
|
||||||
api.add_resource(
|
|
||||||
ChatStopApi,
|
|
||||||
"/installed-apps/<uuid:installed_app_id>/chat-messages/<string:task_id>/stop",
|
|
||||||
endpoint="installed_app_stop_chat_completion",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Explore Conversation
|
|
||||||
api.add_resource(
|
|
||||||
ConversationRenameApi,
|
|
||||||
"/installed-apps/<uuid:installed_app_id>/conversations/<uuid:c_id>/name",
|
|
||||||
endpoint="installed_app_conversation_rename",
|
|
||||||
)
|
|
||||||
api.add_resource(
|
|
||||||
ConversationListApi, "/installed-apps/<uuid:installed_app_id>/conversations", endpoint="installed_app_conversations"
|
|
||||||
)
|
|
||||||
api.add_resource(
|
|
||||||
ConversationApi,
|
|
||||||
"/installed-apps/<uuid:installed_app_id>/conversations/<uuid:c_id>",
|
|
||||||
endpoint="installed_app_conversation",
|
|
||||||
)
|
|
||||||
api.add_resource(
|
|
||||||
ConversationPinApi,
|
|
||||||
"/installed-apps/<uuid:installed_app_id>/conversations/<uuid:c_id>/pin",
|
|
||||||
endpoint="installed_app_conversation_pin",
|
|
||||||
)
|
|
||||||
api.add_resource(
|
|
||||||
ConversationUnPinApi,
|
|
||||||
"/installed-apps/<uuid:installed_app_id>/conversations/<uuid:c_id>/unpin",
|
|
||||||
endpoint="installed_app_conversation_unpin",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# Explore Message
|
|
||||||
api.add_resource(MessageListApi, "/installed-apps/<uuid:installed_app_id>/messages", endpoint="installed_app_messages")
|
|
||||||
api.add_resource(
|
|
||||||
MessageFeedbackApi,
|
|
||||||
"/installed-apps/<uuid:installed_app_id>/messages/<uuid:message_id>/feedbacks",
|
|
||||||
endpoint="installed_app_message_feedback",
|
|
||||||
)
|
|
||||||
api.add_resource(
|
|
||||||
MessageMoreLikeThisApi,
|
|
||||||
"/installed-apps/<uuid:installed_app_id>/messages/<uuid:message_id>/more-like-this",
|
|
||||||
endpoint="installed_app_more_like_this",
|
|
||||||
)
|
|
||||||
api.add_resource(
|
|
||||||
MessageSuggestedQuestionApi,
|
|
||||||
"/installed-apps/<uuid:installed_app_id>/messages/<uuid:message_id>/suggested-questions",
|
|
||||||
endpoint="installed_app_suggested_question",
|
|
||||||
)
|
|
||||||
# Explore Workflow
|
|
||||||
api.add_resource(InstalledAppWorkflowRunApi, "/installed-apps/<uuid:installed_app_id>/workflows/run")
|
|
||||||
api.add_resource(
|
|
||||||
InstalledAppWorkflowTaskStopApi, "/installed-apps/<uuid:installed_app_id>/workflows/tasks/<string:task_id>/stop"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Import tag controllers
|
# Import tag controllers
|
||||||
from .tag import tags
|
from .tag import tags
|
||||||
|
|
||||||
# Import workspace controllers
|
# Import workspace controllers
|
||||||
from .workspace import (
|
from .workspace import account, load_balancing_config, members, model_providers, models, tool_providers, workspace
|
||||||
account,
|
|
||||||
agent_providers,
|
|
||||||
endpoint,
|
|
||||||
load_balancing_config,
|
|
||||||
members,
|
|
||||||
model_providers,
|
|
||||||
models,
|
|
||||||
plugin,
|
|
||||||
tool_providers,
|
|
||||||
workspace,
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -1,9 +1,7 @@
|
|||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
from sqlalchemy import select
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
from werkzeug.exceptions import NotFound, Unauthorized
|
from werkzeug.exceptions import NotFound, Unauthorized
|
||||||
|
|
||||||
from configs import dify_config
|
from configs import dify_config
|
||||||
@@ -56,10 +54,9 @@ class InsertExploreAppListApi(Resource):
|
|||||||
parser.add_argument("position", type=int, required=True, nullable=False, location="json")
|
parser.add_argument("position", type=int, required=True, nullable=False, location="json")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
with Session(db.engine) as session:
|
app = App.query.filter(App.id == args["app_id"]).first()
|
||||||
app = session.execute(select(App).filter(App.id == args["app_id"])).scalar_one_or_none()
|
|
||||||
if not app:
|
if not app:
|
||||||
raise NotFound(f"App '{args['app_id']}' is not found")
|
raise NotFound(f'App \'{args["app_id"]}\' is not found')
|
||||||
|
|
||||||
site = app.site
|
site = app.site
|
||||||
if not site:
|
if not site:
|
||||||
@@ -73,10 +70,7 @@ class InsertExploreAppListApi(Resource):
|
|||||||
privacy_policy = site.privacy_policy or args["privacy_policy"] or ""
|
privacy_policy = site.privacy_policy or args["privacy_policy"] or ""
|
||||||
custom_disclaimer = site.custom_disclaimer or args["custom_disclaimer"] or ""
|
custom_disclaimer = site.custom_disclaimer or args["custom_disclaimer"] or ""
|
||||||
|
|
||||||
with Session(db.engine) as session:
|
recommended_app = RecommendedApp.query.filter(RecommendedApp.app_id == args["app_id"]).first()
|
||||||
recommended_app = session.execute(
|
|
||||||
select(RecommendedApp).filter(RecommendedApp.app_id == args["app_id"])
|
|
||||||
).scalar_one_or_none()
|
|
||||||
|
|
||||||
if not recommended_app:
|
if not recommended_app:
|
||||||
recommended_app = RecommendedApp(
|
recommended_app = RecommendedApp(
|
||||||
@@ -116,27 +110,17 @@ class InsertExploreAppApi(Resource):
|
|||||||
@only_edition_cloud
|
@only_edition_cloud
|
||||||
@admin_required
|
@admin_required
|
||||||
def delete(self, app_id):
|
def delete(self, app_id):
|
||||||
with Session(db.engine) as session:
|
recommended_app = RecommendedApp.query.filter(RecommendedApp.app_id == str(app_id)).first()
|
||||||
recommended_app = session.execute(
|
|
||||||
select(RecommendedApp).filter(RecommendedApp.app_id == str(app_id))
|
|
||||||
).scalar_one_or_none()
|
|
||||||
|
|
||||||
if not recommended_app:
|
if not recommended_app:
|
||||||
return {"result": "success"}, 204
|
return {"result": "success"}, 204
|
||||||
|
|
||||||
with Session(db.engine) as session:
|
app = App.query.filter(App.id == recommended_app.app_id).first()
|
||||||
app = session.execute(select(App).filter(App.id == recommended_app.app_id)).scalar_one_or_none()
|
|
||||||
|
|
||||||
if app:
|
if app:
|
||||||
app.is_public = False
|
app.is_public = False
|
||||||
|
|
||||||
with Session(db.engine) as session:
|
installed_apps = InstalledApp.query.filter(
|
||||||
installed_apps = session.execute(
|
InstalledApp.app_id == recommended_app.app_id, InstalledApp.tenant_id != InstalledApp.app_owner_tenant_id
|
||||||
select(InstalledApp).filter(
|
).all()
|
||||||
InstalledApp.app_id == recommended_app.app_id,
|
|
||||||
InstalledApp.tenant_id != InstalledApp.app_owner_tenant_id,
|
|
||||||
)
|
|
||||||
).all()
|
|
||||||
|
|
||||||
for installed_app in installed_apps:
|
for installed_app in installed_apps:
|
||||||
db.session.delete(installed_app)
|
db.session.delete(installed_app)
|
||||||
|
|||||||
@@ -1,10 +1,6 @@
|
|||||||
from typing import Any
|
import flask_restful
|
||||||
|
from flask_login import current_user
|
||||||
import flask_restful # type: ignore
|
|
||||||
from flask_login import current_user # type: ignore
|
|
||||||
from flask_restful import Resource, fields, marshal_with
|
from flask_restful import Resource, fields, marshal_with
|
||||||
from sqlalchemy import select
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
from werkzeug.exceptions import Forbidden
|
from werkzeug.exceptions import Forbidden
|
||||||
|
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
@@ -28,16 +24,7 @@ api_key_list = {"data": fields.List(fields.Nested(api_key_fields), attribute="it
|
|||||||
|
|
||||||
|
|
||||||
def _get_resource(resource_id, tenant_id, resource_model):
|
def _get_resource(resource_id, tenant_id, resource_model):
|
||||||
if resource_model == App:
|
resource = resource_model.query.filter_by(id=resource_id, tenant_id=tenant_id).first()
|
||||||
with Session(db.engine) as session:
|
|
||||||
resource = session.execute(
|
|
||||||
select(resource_model).filter_by(id=resource_id, tenant_id=tenant_id)
|
|
||||||
).scalar_one_or_none()
|
|
||||||
else:
|
|
||||||
with Session(db.engine) as session:
|
|
||||||
resource = session.execute(
|
|
||||||
select(resource_model).filter_by(id=resource_id, tenant_id=tenant_id)
|
|
||||||
).scalar_one_or_none()
|
|
||||||
|
|
||||||
if resource is None:
|
if resource is None:
|
||||||
flask_restful.abort(404, message=f"{resource_model.__name__} not found.")
|
flask_restful.abort(404, message=f"{resource_model.__name__} not found.")
|
||||||
@@ -48,15 +35,14 @@ def _get_resource(resource_id, tenant_id, resource_model):
|
|||||||
class BaseApiKeyListResource(Resource):
|
class BaseApiKeyListResource(Resource):
|
||||||
method_decorators = [account_initialization_required, login_required, setup_required]
|
method_decorators = [account_initialization_required, login_required, setup_required]
|
||||||
|
|
||||||
resource_type: str | None = None
|
resource_type = None
|
||||||
resource_model: Any = None
|
resource_model = None
|
||||||
resource_id_field: str | None = None
|
resource_id_field = None
|
||||||
token_prefix: str | None = None
|
token_prefix = None
|
||||||
max_keys = 10
|
max_keys = 10
|
||||||
|
|
||||||
@marshal_with(api_key_list)
|
@marshal_with(api_key_list)
|
||||||
def get(self, resource_id):
|
def get(self, resource_id):
|
||||||
assert self.resource_id_field is not None, "resource_id_field must be set"
|
|
||||||
resource_id = str(resource_id)
|
resource_id = str(resource_id)
|
||||||
_get_resource(resource_id, current_user.current_tenant_id, self.resource_model)
|
_get_resource(resource_id, current_user.current_tenant_id, self.resource_model)
|
||||||
keys = (
|
keys = (
|
||||||
@@ -68,7 +54,6 @@ class BaseApiKeyListResource(Resource):
|
|||||||
|
|
||||||
@marshal_with(api_key_fields)
|
@marshal_with(api_key_fields)
|
||||||
def post(self, resource_id):
|
def post(self, resource_id):
|
||||||
assert self.resource_id_field is not None, "resource_id_field must be set"
|
|
||||||
resource_id = str(resource_id)
|
resource_id = str(resource_id)
|
||||||
_get_resource(resource_id, current_user.current_tenant_id, self.resource_model)
|
_get_resource(resource_id, current_user.current_tenant_id, self.resource_model)
|
||||||
if not current_user.is_editor:
|
if not current_user.is_editor:
|
||||||
@@ -101,12 +86,11 @@ class BaseApiKeyListResource(Resource):
|
|||||||
class BaseApiKeyResource(Resource):
|
class BaseApiKeyResource(Resource):
|
||||||
method_decorators = [account_initialization_required, login_required, setup_required]
|
method_decorators = [account_initialization_required, login_required, setup_required]
|
||||||
|
|
||||||
resource_type: str | None = None
|
resource_type = None
|
||||||
resource_model: Any = None
|
resource_model = None
|
||||||
resource_id_field: str | None = None
|
resource_id_field = None
|
||||||
|
|
||||||
def delete(self, resource_id, api_key_id):
|
def delete(self, resource_id, api_key_id):
|
||||||
assert self.resource_id_field is not None, "resource_id_field must be set"
|
|
||||||
resource_id = str(resource_id)
|
resource_id = str(resource_id)
|
||||||
api_key_id = str(api_key_id)
|
api_key_id = str(api_key_id)
|
||||||
_get_resource(resource_id, current_user.current_tenant_id, self.resource_model)
|
_get_resource(resource_id, current_user.current_tenant_id, self.resource_model)
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.wraps import account_initialization_required, setup_required
|
from controllers.console.wraps import account_initialization_required, setup_required
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.app.wraps import get_app_model
|
from controllers.console.app.wraps import get_app_model
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
from flask import request
|
from flask import request
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, marshal, marshal_with, reqparse # type: ignore
|
from flask_restful import Resource, marshal, marshal_with, reqparse
|
||||||
from werkzeug.exceptions import Forbidden
|
from werkzeug.exceptions import Forbidden
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
@@ -110,7 +110,7 @@ class AnnotationListApi(Resource):
|
|||||||
|
|
||||||
page = request.args.get("page", default=1, type=int)
|
page = request.args.get("page", default=1, type=int)
|
||||||
limit = request.args.get("limit", default=20, type=int)
|
limit = request.args.get("limit", default=20, type=int)
|
||||||
keyword = request.args.get("keyword", default="", type=str)
|
keyword = request.args.get("keyword", default=None, type=str)
|
||||||
|
|
||||||
app_id = str(app_id)
|
app_id = str(app_id)
|
||||||
annotation_list, total = AppAnnotationService.get_annotation_list_by_app_id(app_id, page, limit, keyword)
|
annotation_list, total = AppAnnotationService.get_annotation_list_by_app_id(app_id, page, limit, keyword)
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
import uuid
|
import uuid
|
||||||
from typing import cast
|
from typing import cast
|
||||||
|
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, inputs, marshal, marshal_with, reqparse # type: ignore
|
from flask_restful import Resource, inputs, marshal, marshal_with, reqparse
|
||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
from werkzeug.exceptions import BadRequest, Forbidden, abort
|
from werkzeug.exceptions import BadRequest, Forbidden, abort
|
||||||
@@ -57,13 +57,12 @@ class AppListApi(Resource):
|
|||||||
)
|
)
|
||||||
parser.add_argument("name", type=str, location="args", required=False)
|
parser.add_argument("name", type=str, location="args", required=False)
|
||||||
parser.add_argument("tag_ids", type=uuid_list, location="args", required=False)
|
parser.add_argument("tag_ids", type=uuid_list, location="args", required=False)
|
||||||
parser.add_argument("is_created_by_me", type=inputs.boolean, location="args", required=False)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
# get app list
|
# get app list
|
||||||
app_service = AppService()
|
app_service = AppService()
|
||||||
app_pagination = app_service.get_paginate_apps(current_user.id, current_user.current_tenant_id, args)
|
app_pagination = app_service.get_paginate_apps(current_user.current_tenant_id, args)
|
||||||
if not app_pagination:
|
if not app_pagination:
|
||||||
return {"data": [], "total": 0, "page": 1, "limit": 20, "has_more": False}
|
return {"data": [], "total": 0, "page": 1, "limit": 20, "has_more": False}
|
||||||
|
|
||||||
|
|||||||
@@ -1,20 +1,18 @@
|
|||||||
from typing import cast
|
from typing import cast
|
||||||
|
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, marshal_with, reqparse # type: ignore
|
from flask_restful import Resource, marshal_with, reqparse
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
from werkzeug.exceptions import Forbidden
|
from werkzeug.exceptions import Forbidden
|
||||||
|
|
||||||
from controllers.console.app.wraps import get_app_model
|
|
||||||
from controllers.console.wraps import (
|
from controllers.console.wraps import (
|
||||||
account_initialization_required,
|
account_initialization_required,
|
||||||
setup_required,
|
setup_required,
|
||||||
)
|
)
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
from fields.app_fields import app_import_check_dependencies_fields, app_import_fields
|
from fields.app_fields import app_import_fields
|
||||||
from libs.login import login_required
|
from libs.login import login_required
|
||||||
from models import Account
|
from models import Account
|
||||||
from models.model import App
|
|
||||||
from services.app_dsl_service import AppDslService, ImportStatus
|
from services.app_dsl_service import AppDslService, ImportStatus
|
||||||
|
|
||||||
|
|
||||||
@@ -90,20 +88,3 @@ class AppImportConfirmApi(Resource):
|
|||||||
if result.status == ImportStatus.FAILED.value:
|
if result.status == ImportStatus.FAILED.value:
|
||||||
return result.model_dump(mode="json"), 400
|
return result.model_dump(mode="json"), 400
|
||||||
return result.model_dump(mode="json"), 200
|
return result.model_dump(mode="json"), 200
|
||||||
|
|
||||||
|
|
||||||
class AppImportCheckDependenciesApi(Resource):
|
|
||||||
@setup_required
|
|
||||||
@login_required
|
|
||||||
@get_app_model
|
|
||||||
@account_initialization_required
|
|
||||||
@marshal_with(app_import_check_dependencies_fields)
|
|
||||||
def get(self, app_model: App):
|
|
||||||
if not current_user.is_editor:
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
with Session(db.engine) as session:
|
|
||||||
import_service = AppDslService(session)
|
|
||||||
result = import_service.check_dependencies(app_model=app_model)
|
|
||||||
|
|
||||||
return result.model_dump(mode="json"), 200
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
from werkzeug.exceptions import InternalServerError
|
from werkzeug.exceptions import InternalServerError
|
||||||
|
|
||||||
import services
|
import services
|
||||||
@@ -22,7 +22,7 @@ from controllers.console.wraps import account_initialization_required, setup_req
|
|||||||
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
|
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
|
||||||
from core.model_runtime.errors.invoke import InvokeError
|
from core.model_runtime.errors.invoke import InvokeError
|
||||||
from libs.login import login_required
|
from libs.login import login_required
|
||||||
from models import App, AppMode
|
from models.model import AppMode
|
||||||
from services.audio_service import AudioService
|
from services.audio_service import AudioService
|
||||||
from services.errors.audio import (
|
from services.errors.audio import (
|
||||||
AudioTooLargeServiceError,
|
AudioTooLargeServiceError,
|
||||||
@@ -79,7 +79,7 @@ class ChatMessageTextApi(Resource):
|
|||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@get_app_model
|
@get_app_model
|
||||||
def post(self, app_model: App):
|
def post(self, app_model):
|
||||||
from werkzeug.exceptions import InternalServerError
|
from werkzeug.exceptions import InternalServerError
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -98,13 +98,9 @@ class ChatMessageTextApi(Resource):
|
|||||||
and app_model.workflow.features_dict
|
and app_model.workflow.features_dict
|
||||||
):
|
):
|
||||||
text_to_speech = app_model.workflow.features_dict.get("text_to_speech")
|
text_to_speech = app_model.workflow.features_dict.get("text_to_speech")
|
||||||
if text_to_speech is None:
|
|
||||||
raise ValueError("TTS is not enabled")
|
|
||||||
voice = args.get("voice") or text_to_speech.get("voice")
|
voice = args.get("voice") or text_to_speech.get("voice")
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
if app_model.app_model_config is None:
|
|
||||||
raise ValueError("AppModelConfig not found")
|
|
||||||
voice = args.get("voice") or app_model.app_model_config.text_to_speech_dict.get("voice")
|
voice = args.get("voice") or app_model.app_model_config.text_to_speech_dict.get("voice")
|
||||||
except Exception:
|
except Exception:
|
||||||
voice = None
|
voice = None
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
import flask_login # type: ignore
|
import flask_login
|
||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
from werkzeug.exceptions import InternalServerError, NotFound
|
from werkzeug.exceptions import InternalServerError, NotFound
|
||||||
|
|
||||||
import services
|
import services
|
||||||
@@ -20,6 +20,7 @@ from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpErr
|
|||||||
from core.app.apps.base_app_queue_manager import AppQueueManager
|
from core.app.apps.base_app_queue_manager import AppQueueManager
|
||||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||||
from core.errors.error import (
|
from core.errors.error import (
|
||||||
|
AppInvokeQuotaExceededError,
|
||||||
ModelCurrentlyNotSupportError,
|
ModelCurrentlyNotSupportError,
|
||||||
ProviderTokenNotInitError,
|
ProviderTokenNotInitError,
|
||||||
QuotaExceededError,
|
QuotaExceededError,
|
||||||
@@ -75,7 +76,7 @@ class CompletionMessageApi(Resource):
|
|||||||
raise ProviderModelCurrentlyNotSupportError()
|
raise ProviderModelCurrentlyNotSupportError()
|
||||||
except InvokeError as e:
|
except InvokeError as e:
|
||||||
raise CompletionRequestError(e.description)
|
raise CompletionRequestError(e.description)
|
||||||
except ValueError as e:
|
except (ValueError, AppInvokeQuotaExceededError) as e:
|
||||||
raise e
|
raise e
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.exception("internal server error.")
|
logging.exception("internal server error.")
|
||||||
@@ -140,7 +141,7 @@ class ChatMessageApi(Resource):
|
|||||||
raise InvokeRateLimitHttpError(ex.description)
|
raise InvokeRateLimitHttpError(ex.description)
|
||||||
except InvokeError as e:
|
except InvokeError as e:
|
||||||
raise CompletionRequestError(e.description)
|
raise CompletionRequestError(e.description)
|
||||||
except ValueError as e:
|
except (ValueError, AppInvokeQuotaExceededError) as e:
|
||||||
raise e
|
raise e
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.exception("internal server error.")
|
logging.exception("internal server error.")
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
from datetime import UTC, datetime
|
from datetime import UTC, datetime
|
||||||
|
|
||||||
import pytz # pip install pytz
|
import pytz
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, marshal_with, reqparse # type: ignore
|
from flask_restful import Resource, marshal_with, reqparse
|
||||||
from flask_restful.inputs import int_range # type: ignore
|
from flask_restful.inputs import int_range
|
||||||
from sqlalchemy import func, or_
|
from sqlalchemy import func, or_
|
||||||
from sqlalchemy.orm import joinedload
|
from sqlalchemy.orm import joinedload
|
||||||
from werkzeug.exceptions import Forbidden, NotFound
|
from werkzeug.exceptions import Forbidden, NotFound
|
||||||
@@ -77,9 +77,8 @@ class CompletionConversationApi(Resource):
|
|||||||
|
|
||||||
query = query.where(Conversation.created_at < end_datetime_utc)
|
query = query.where(Conversation.created_at < end_datetime_utc)
|
||||||
|
|
||||||
# FIXME, the type ignore in this file
|
|
||||||
if args["annotation_status"] == "annotated":
|
if args["annotation_status"] == "annotated":
|
||||||
query = query.options(joinedload(Conversation.message_annotations)).join( # type: ignore
|
query = query.options(joinedload(Conversation.message_annotations)).join(
|
||||||
MessageAnnotation, MessageAnnotation.conversation_id == Conversation.id
|
MessageAnnotation, MessageAnnotation.conversation_id == Conversation.id
|
||||||
)
|
)
|
||||||
elif args["annotation_status"] == "not_annotated":
|
elif args["annotation_status"] == "not_annotated":
|
||||||
@@ -223,7 +222,7 @@ class ChatConversationApi(Resource):
|
|||||||
query = query.where(Conversation.created_at <= end_datetime_utc)
|
query = query.where(Conversation.created_at <= end_datetime_utc)
|
||||||
|
|
||||||
if args["annotation_status"] == "annotated":
|
if args["annotation_status"] == "annotated":
|
||||||
query = query.options(joinedload(Conversation.message_annotations)).join( # type: ignore
|
query = query.options(joinedload(Conversation.message_annotations)).join(
|
||||||
MessageAnnotation, MessageAnnotation.conversation_id == Conversation.id
|
MessageAnnotation, MessageAnnotation.conversation_id == Conversation.id
|
||||||
)
|
)
|
||||||
elif args["annotation_status"] == "not_annotated":
|
elif args["annotation_status"] == "not_annotated":
|
||||||
@@ -235,7 +234,7 @@ class ChatConversationApi(Resource):
|
|||||||
|
|
||||||
if args["message_count_gte"] and args["message_count_gte"] >= 1:
|
if args["message_count_gte"] and args["message_count_gte"] >= 1:
|
||||||
query = (
|
query = (
|
||||||
query.options(joinedload(Conversation.messages)) # type: ignore
|
query.options(joinedload(Conversation.messages))
|
||||||
.join(Message, Message.conversation_id == Conversation.id)
|
.join(Message, Message.conversation_id == Conversation.id)
|
||||||
.group_by(Conversation.id)
|
.group_by(Conversation.id)
|
||||||
.having(func.count(Message.id) >= args["message_count_gte"])
|
.having(func.count(Message.id) >= args["message_count_gte"])
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from flask_restful import Resource, marshal_with, reqparse # type: ignore
|
from flask_restful import Resource, marshal_with, reqparse
|
||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.app.error import (
|
from controllers.console.app.error import (
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, fields, marshal_with, reqparse # type: ignore
|
from flask_restful import Resource, fields, marshal_with, reqparse
|
||||||
from flask_restful.inputs import int_range # type: ignore
|
from flask_restful.inputs import int_range
|
||||||
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
|
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
import json
|
import json
|
||||||
from typing import cast
|
|
||||||
|
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource # type: ignore
|
from flask_restful import Resource
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.app.wraps import get_app_model
|
from controllers.console.app.wraps import get_app_model
|
||||||
@@ -27,9 +26,7 @@ class ModelConfigResource(Resource):
|
|||||||
"""Modify app model config"""
|
"""Modify app model config"""
|
||||||
# validate config
|
# validate config
|
||||||
model_configuration = AppModelConfigService.validate_configuration(
|
model_configuration = AppModelConfigService.validate_configuration(
|
||||||
tenant_id=current_user.current_tenant_id,
|
tenant_id=current_user.current_tenant_id, config=request.json, app_mode=AppMode.value_of(app_model.mode)
|
||||||
config=cast(dict, request.json),
|
|
||||||
app_mode=AppMode.value_of(app_model.mode),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
new_app_model_config = AppModelConfig(
|
new_app_model_config = AppModelConfig(
|
||||||
@@ -41,11 +38,9 @@ class ModelConfigResource(Resource):
|
|||||||
|
|
||||||
if app_model.mode == AppMode.AGENT_CHAT.value or app_model.is_agent:
|
if app_model.mode == AppMode.AGENT_CHAT.value or app_model.is_agent:
|
||||||
# get original app model config
|
# get original app model config
|
||||||
original_app_model_config = (
|
original_app_model_config: AppModelConfig = (
|
||||||
db.session.query(AppModelConfig).filter(AppModelConfig.id == app_model.app_model_config_id).first()
|
db.session.query(AppModelConfig).filter(AppModelConfig.id == app_model.app_model_config_id).first()
|
||||||
)
|
)
|
||||||
if original_app_model_config is None:
|
|
||||||
raise ValueError("Original app model config not found")
|
|
||||||
agent_mode = original_app_model_config.agent_mode_dict
|
agent_mode = original_app_model_config.agent_mode_dict
|
||||||
# decrypt agent tool parameters if it's secret-input
|
# decrypt agent tool parameters if it's secret-input
|
||||||
parameter_map = {}
|
parameter_map = {}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
from werkzeug.exceptions import BadRequest
|
from werkzeug.exceptions import BadRequest
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
from datetime import UTC, datetime
|
from datetime import UTC, datetime
|
||||||
|
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, marshal_with, reqparse # type: ignore
|
from flask_restful import Resource, marshal_with, reqparse
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
from werkzeug.exceptions import Forbidden, NotFound
|
from werkzeug.exceptions import Forbidden, NotFound
|
||||||
|
|
||||||
from constants.languages import supported_language
|
from constants.languages import supported_language
|
||||||
@@ -51,37 +50,33 @@ class AppSite(Resource):
|
|||||||
if not current_user.is_editor:
|
if not current_user.is_editor:
|
||||||
raise Forbidden()
|
raise Forbidden()
|
||||||
|
|
||||||
with Session(db.engine) as session:
|
site = db.session.query(Site).filter(Site.app_id == app_model.id).one_or_404()
|
||||||
site = session.query(Site).filter(Site.app_id == app_model.id).first()
|
|
||||||
|
|
||||||
if not site:
|
for attr_name in [
|
||||||
raise NotFound
|
"title",
|
||||||
|
"icon_type",
|
||||||
|
"icon",
|
||||||
|
"icon_background",
|
||||||
|
"description",
|
||||||
|
"default_language",
|
||||||
|
"chat_color_theme",
|
||||||
|
"chat_color_theme_inverted",
|
||||||
|
"customize_domain",
|
||||||
|
"copyright",
|
||||||
|
"privacy_policy",
|
||||||
|
"custom_disclaimer",
|
||||||
|
"customize_token_strategy",
|
||||||
|
"prompt_public",
|
||||||
|
"show_workflow_steps",
|
||||||
|
"use_icon_as_answer_icon",
|
||||||
|
]:
|
||||||
|
value = args.get(attr_name)
|
||||||
|
if value is not None:
|
||||||
|
setattr(site, attr_name, value)
|
||||||
|
|
||||||
for attr_name in [
|
site.updated_by = current_user.id
|
||||||
"title",
|
site.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
"icon_type",
|
db.session.commit()
|
||||||
"icon",
|
|
||||||
"icon_background",
|
|
||||||
"description",
|
|
||||||
"default_language",
|
|
||||||
"chat_color_theme",
|
|
||||||
"chat_color_theme_inverted",
|
|
||||||
"customize_domain",
|
|
||||||
"copyright",
|
|
||||||
"privacy_policy",
|
|
||||||
"custom_disclaimer",
|
|
||||||
"customize_token_strategy",
|
|
||||||
"prompt_public",
|
|
||||||
"show_workflow_steps",
|
|
||||||
"use_icon_as_answer_icon",
|
|
||||||
]:
|
|
||||||
value = args.get(attr_name)
|
|
||||||
if value is not None:
|
|
||||||
setattr(site, attr_name, value)
|
|
||||||
|
|
||||||
site.updated_by = current_user.id
|
|
||||||
site.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
|
||||||
session.commit()
|
|
||||||
|
|
||||||
return site
|
return site
|
||||||
|
|
||||||
|
|||||||
@@ -3,8 +3,8 @@ from decimal import Decimal
|
|||||||
|
|
||||||
import pytz
|
import pytz
|
||||||
from flask import jsonify
|
from flask import jsonify
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.app.wraps import get_app_model
|
from controllers.console.app.wraps import get_app_model
|
||||||
@@ -273,7 +273,8 @@ FROM
|
|||||||
messages m
|
messages m
|
||||||
ON c.id = m.conversation_id
|
ON c.id = m.conversation_id
|
||||||
WHERE
|
WHERE
|
||||||
c.app_id = :app_id"""
|
c.override_model_configs IS NULL
|
||||||
|
AND c.app_id = :app_id"""
|
||||||
arg_dict = {"tz": account.timezone, "app_id": app_model.id}
|
arg_dict = {"tz": account.timezone, "app_id": app_model.id}
|
||||||
|
|
||||||
timezone = pytz.timezone(account.timezone)
|
timezone = pytz.timezone(account.timezone)
|
||||||
|
|||||||
@@ -2,11 +2,10 @@ import json
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from flask import abort, request
|
from flask import abort, request
|
||||||
from flask_restful import Resource, inputs, marshal_with, reqparse # type: ignore
|
from flask_restful import Resource, marshal_with, reqparse
|
||||||
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
|
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
|
||||||
|
|
||||||
import services
|
import services
|
||||||
from configs import dify_config
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.app.error import ConversationCompletedError, DraftWorkflowNotExist, DraftWorkflowNotSync
|
from controllers.console.app.error import ConversationCompletedError, DraftWorkflowNotExist, DraftWorkflowNotSync
|
||||||
from controllers.console.app.wraps import get_app_model
|
from controllers.console.app.wraps import get_app_model
|
||||||
@@ -14,13 +13,12 @@ from controllers.console.wraps import account_initialization_required, setup_req
|
|||||||
from core.app.apps.base_app_queue_manager import AppQueueManager
|
from core.app.apps.base_app_queue_manager import AppQueueManager
|
||||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||||
from factories import variable_factory
|
from factories import variable_factory
|
||||||
from fields.workflow_fields import workflow_fields, workflow_pagination_fields
|
from fields.workflow_fields import workflow_fields
|
||||||
from fields.workflow_run_fields import workflow_run_node_execution_fields
|
from fields.workflow_run_fields import workflow_run_node_execution_fields
|
||||||
from libs import helper
|
from libs import helper
|
||||||
from libs.helper import TimestampField, uuid_value
|
from libs.helper import TimestampField, uuid_value
|
||||||
from libs.login import current_user, login_required
|
from libs.login import current_user, login_required
|
||||||
from models import App
|
from models import App
|
||||||
from models.account import Account
|
|
||||||
from models.model import AppMode
|
from models.model import AppMode
|
||||||
from services.app_generate_service import AppGenerateService
|
from services.app_generate_service import AppGenerateService
|
||||||
from services.errors.app import WorkflowHashNotEqualError
|
from services.errors.app import WorkflowHashNotEqualError
|
||||||
@@ -97,9 +95,6 @@ class DraftWorkflowApi(Resource):
|
|||||||
else:
|
else:
|
||||||
abort(415)
|
abort(415)
|
||||||
|
|
||||||
if not isinstance(current_user, Account):
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
workflow_service = WorkflowService()
|
workflow_service = WorkflowService()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -143,9 +138,6 @@ class AdvancedChatDraftWorkflowRunApi(Resource):
|
|||||||
if not current_user.is_editor:
|
if not current_user.is_editor:
|
||||||
raise Forbidden()
|
raise Forbidden()
|
||||||
|
|
||||||
if not isinstance(current_user, Account):
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
parser = reqparse.RequestParser()
|
parser = reqparse.RequestParser()
|
||||||
parser.add_argument("inputs", type=dict, location="json")
|
parser.add_argument("inputs", type=dict, location="json")
|
||||||
parser.add_argument("query", type=str, required=True, location="json", default="")
|
parser.add_argument("query", type=str, required=True, location="json", default="")
|
||||||
@@ -167,7 +159,7 @@ class AdvancedChatDraftWorkflowRunApi(Resource):
|
|||||||
raise ConversationCompletedError()
|
raise ConversationCompletedError()
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise e
|
raise e
|
||||||
except Exception:
|
except Exception as e:
|
||||||
logging.exception("internal server error.")
|
logging.exception("internal server error.")
|
||||||
raise InternalServerError()
|
raise InternalServerError()
|
||||||
|
|
||||||
@@ -185,9 +177,6 @@ class AdvancedChatDraftRunIterationNodeApi(Resource):
|
|||||||
if not current_user.is_editor:
|
if not current_user.is_editor:
|
||||||
raise Forbidden()
|
raise Forbidden()
|
||||||
|
|
||||||
if not isinstance(current_user, Account):
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
parser = reqparse.RequestParser()
|
parser = reqparse.RequestParser()
|
||||||
parser.add_argument("inputs", type=dict, location="json")
|
parser.add_argument("inputs", type=dict, location="json")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
@@ -204,7 +193,7 @@ class AdvancedChatDraftRunIterationNodeApi(Resource):
|
|||||||
raise ConversationCompletedError()
|
raise ConversationCompletedError()
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise e
|
raise e
|
||||||
except Exception:
|
except Exception as e:
|
||||||
logging.exception("internal server error.")
|
logging.exception("internal server error.")
|
||||||
raise InternalServerError()
|
raise InternalServerError()
|
||||||
|
|
||||||
@@ -222,9 +211,6 @@ class WorkflowDraftRunIterationNodeApi(Resource):
|
|||||||
if not current_user.is_editor:
|
if not current_user.is_editor:
|
||||||
raise Forbidden()
|
raise Forbidden()
|
||||||
|
|
||||||
if not isinstance(current_user, Account):
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
parser = reqparse.RequestParser()
|
parser = reqparse.RequestParser()
|
||||||
parser.add_argument("inputs", type=dict, location="json")
|
parser.add_argument("inputs", type=dict, location="json")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
@@ -241,7 +227,7 @@ class WorkflowDraftRunIterationNodeApi(Resource):
|
|||||||
raise ConversationCompletedError()
|
raise ConversationCompletedError()
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise e
|
raise e
|
||||||
except Exception:
|
except Exception as e:
|
||||||
logging.exception("internal server error.")
|
logging.exception("internal server error.")
|
||||||
raise InternalServerError()
|
raise InternalServerError()
|
||||||
|
|
||||||
@@ -259,9 +245,6 @@ class DraftWorkflowRunApi(Resource):
|
|||||||
if not current_user.is_editor:
|
if not current_user.is_editor:
|
||||||
raise Forbidden()
|
raise Forbidden()
|
||||||
|
|
||||||
if not isinstance(current_user, Account):
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
parser = reqparse.RequestParser()
|
parser = reqparse.RequestParser()
|
||||||
parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
|
parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
|
||||||
parser.add_argument("files", type=list, required=False, location="json")
|
parser.add_argument("files", type=list, required=False, location="json")
|
||||||
@@ -310,20 +293,13 @@ class DraftWorkflowNodeRunApi(Resource):
|
|||||||
if not current_user.is_editor:
|
if not current_user.is_editor:
|
||||||
raise Forbidden()
|
raise Forbidden()
|
||||||
|
|
||||||
if not isinstance(current_user, Account):
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
parser = reqparse.RequestParser()
|
parser = reqparse.RequestParser()
|
||||||
parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
|
parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
inputs = args.get("inputs")
|
|
||||||
if inputs == None:
|
|
||||||
raise ValueError("missing inputs")
|
|
||||||
|
|
||||||
workflow_service = WorkflowService()
|
workflow_service = WorkflowService()
|
||||||
workflow_node_execution = workflow_service.run_draft_workflow_node(
|
workflow_node_execution = workflow_service.run_draft_workflow_node(
|
||||||
app_model=app_model, node_id=node_id, user_inputs=inputs, account=current_user
|
app_model=app_model, node_id=node_id, user_inputs=args.get("inputs"), account=current_user
|
||||||
)
|
)
|
||||||
|
|
||||||
return workflow_node_execution
|
return workflow_node_execution
|
||||||
@@ -362,9 +338,6 @@ class PublishedWorkflowApi(Resource):
|
|||||||
if not current_user.is_editor:
|
if not current_user.is_editor:
|
||||||
raise Forbidden()
|
raise Forbidden()
|
||||||
|
|
||||||
if not isinstance(current_user, Account):
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
workflow_service = WorkflowService()
|
workflow_service = WorkflowService()
|
||||||
workflow = workflow_service.publish_workflow(app_model=app_model, account=current_user)
|
workflow = workflow_service.publish_workflow(app_model=app_model, account=current_user)
|
||||||
|
|
||||||
@@ -402,17 +375,12 @@ class DefaultBlockConfigApi(Resource):
|
|||||||
if not current_user.is_editor:
|
if not current_user.is_editor:
|
||||||
raise Forbidden()
|
raise Forbidden()
|
||||||
|
|
||||||
if not isinstance(current_user, Account):
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
parser = reqparse.RequestParser()
|
parser = reqparse.RequestParser()
|
||||||
parser.add_argument("q", type=str, location="args")
|
parser.add_argument("q", type=str, location="args")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
q = args.get("q")
|
|
||||||
|
|
||||||
filters = None
|
filters = None
|
||||||
if q:
|
if args.get("q"):
|
||||||
try:
|
try:
|
||||||
filters = json.loads(args.get("q", ""))
|
filters = json.loads(args.get("q", ""))
|
||||||
except json.JSONDecodeError:
|
except json.JSONDecodeError:
|
||||||
@@ -438,9 +406,6 @@ class ConvertToWorkflowApi(Resource):
|
|||||||
if not current_user.is_editor:
|
if not current_user.is_editor:
|
||||||
raise Forbidden()
|
raise Forbidden()
|
||||||
|
|
||||||
if not isinstance(current_user, Account):
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
if request.data:
|
if request.data:
|
||||||
parser = reqparse.RequestParser()
|
parser = reqparse.RequestParser()
|
||||||
parser.add_argument("name", type=str, required=False, nullable=True, location="json")
|
parser.add_argument("name", type=str, required=False, nullable=True, location="json")
|
||||||
@@ -461,46 +426,7 @@ class ConvertToWorkflowApi(Resource):
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class WorkflowConfigApi(Resource):
|
|
||||||
"""Resource for workflow configuration."""
|
|
||||||
|
|
||||||
@setup_required
|
|
||||||
@login_required
|
|
||||||
@account_initialization_required
|
|
||||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
|
||||||
def get(self, app_model: App):
|
|
||||||
return {
|
|
||||||
"parallel_depth_limit": dify_config.WORKFLOW_PARALLEL_DEPTH_LIMIT,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class PublishedAllWorkflowApi(Resource):
|
|
||||||
@setup_required
|
|
||||||
@login_required
|
|
||||||
@account_initialization_required
|
|
||||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
|
||||||
@marshal_with(workflow_pagination_fields)
|
|
||||||
def get(self, app_model: App):
|
|
||||||
"""
|
|
||||||
Get published workflows
|
|
||||||
"""
|
|
||||||
if not current_user.is_editor:
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
parser = reqparse.RequestParser()
|
|
||||||
parser.add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args")
|
|
||||||
parser.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args")
|
|
||||||
args = parser.parse_args()
|
|
||||||
page = args.get("page")
|
|
||||||
limit = args.get("limit")
|
|
||||||
workflow_service = WorkflowService()
|
|
||||||
workflows, has_more = workflow_service.get_all_published_workflow(app_model=app_model, page=page, limit=limit)
|
|
||||||
|
|
||||||
return {"items": workflows, "page": page, "limit": limit, "has_more": has_more}
|
|
||||||
|
|
||||||
|
|
||||||
api.add_resource(DraftWorkflowApi, "/apps/<uuid:app_id>/workflows/draft")
|
api.add_resource(DraftWorkflowApi, "/apps/<uuid:app_id>/workflows/draft")
|
||||||
api.add_resource(WorkflowConfigApi, "/apps/<uuid:app_id>/workflows/draft/config")
|
|
||||||
api.add_resource(AdvancedChatDraftWorkflowRunApi, "/apps/<uuid:app_id>/advanced-chat/workflows/draft/run")
|
api.add_resource(AdvancedChatDraftWorkflowRunApi, "/apps/<uuid:app_id>/advanced-chat/workflows/draft/run")
|
||||||
api.add_resource(DraftWorkflowRunApi, "/apps/<uuid:app_id>/workflows/draft/run")
|
api.add_resource(DraftWorkflowRunApi, "/apps/<uuid:app_id>/workflows/draft/run")
|
||||||
api.add_resource(WorkflowTaskStopApi, "/apps/<uuid:app_id>/workflow-runs/tasks/<string:task_id>/stop")
|
api.add_resource(WorkflowTaskStopApi, "/apps/<uuid:app_id>/workflow-runs/tasks/<string:task_id>/stop")
|
||||||
@@ -513,7 +439,6 @@ api.add_resource(
|
|||||||
WorkflowDraftRunIterationNodeApi, "/apps/<uuid:app_id>/workflows/draft/iteration/nodes/<string:node_id>/run"
|
WorkflowDraftRunIterationNodeApi, "/apps/<uuid:app_id>/workflows/draft/iteration/nodes/<string:node_id>/run"
|
||||||
)
|
)
|
||||||
api.add_resource(PublishedWorkflowApi, "/apps/<uuid:app_id>/workflows/publish")
|
api.add_resource(PublishedWorkflowApi, "/apps/<uuid:app_id>/workflows/publish")
|
||||||
api.add_resource(PublishedAllWorkflowApi, "/apps/<uuid:app_id>/workflows")
|
|
||||||
api.add_resource(DefaultBlockConfigsApi, "/apps/<uuid:app_id>/workflows/default-workflow-block-configs")
|
api.add_resource(DefaultBlockConfigsApi, "/apps/<uuid:app_id>/workflows/default-workflow-block-configs")
|
||||||
api.add_resource(
|
api.add_resource(
|
||||||
DefaultBlockConfigApi, "/apps/<uuid:app_id>/workflows/default-workflow-block-configs/<string:block_type>"
|
DefaultBlockConfigApi, "/apps/<uuid:app_id>/workflows/default-workflow-block-configs/<string:block_type>"
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
from flask_restful import Resource, marshal_with, reqparse # type: ignore
|
from flask_restful import Resource, marshal_with, reqparse
|
||||||
from flask_restful.inputs import int_range # type: ignore
|
from flask_restful.inputs import int_range
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.app.wraps import get_app_model
|
from controllers.console.app.wraps import get_app_model
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
from flask_restful import Resource, marshal_with, reqparse # type: ignore
|
from flask_restful import Resource, marshal_with, reqparse
|
||||||
from flask_restful.inputs import int_range # type: ignore
|
from flask_restful.inputs import int_range
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.app.wraps import get_app_model
|
from controllers.console.app.wraps import get_app_model
|
||||||
|
|||||||
@@ -3,8 +3,8 @@ from decimal import Decimal
|
|||||||
|
|
||||||
import pytz
|
import pytz
|
||||||
from flask import jsonify
|
from flask import jsonify
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.app.wraps import get_app_model
|
from controllers.console.app.wraps import get_app_model
|
||||||
|
|||||||
@@ -5,10 +5,11 @@ from typing import Optional, Union
|
|||||||
from controllers.console.app.error import AppNotFoundError
|
from controllers.console.app.error import AppNotFoundError
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
from libs.login import current_user
|
from libs.login import current_user
|
||||||
from models import App, AppMode
|
from models import App
|
||||||
|
from models.model import AppMode
|
||||||
|
|
||||||
|
|
||||||
def get_app_model(view: Optional[Callable] = None, *, mode: Union[AppMode, list[AppMode], None] = None):
|
def get_app_model(view: Optional[Callable] = None, *, mode: Union[AppMode, list[AppMode]] = None):
|
||||||
def decorator(view_func):
|
def decorator(view_func):
|
||||||
@wraps(view_func)
|
@wraps(view_func)
|
||||||
def decorated_view(*args, **kwargs):
|
def decorated_view(*args, **kwargs):
|
||||||
|
|||||||
@@ -1,14 +1,14 @@
|
|||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
|
|
||||||
from constants.languages import supported_language
|
from constants.languages import supported_language
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.error import AlreadyActivateError
|
from controllers.console.error import AlreadyActivateError
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
from libs.helper import StrLen, email, extract_remote_ip, timezone
|
from libs.helper import StrLen, email, extract_remote_ip, timezone
|
||||||
from models.account import AccountStatus
|
from models.account import AccountStatus, Tenant
|
||||||
from services.account_service import AccountService, RegisterService
|
from services.account_service import AccountService, RegisterService
|
||||||
|
|
||||||
|
|
||||||
@@ -27,7 +27,7 @@ class ActivateCheckApi(Resource):
|
|||||||
invitation = RegisterService.get_invitation_if_token_valid(workspaceId, reg_email, token)
|
invitation = RegisterService.get_invitation_if_token_valid(workspaceId, reg_email, token)
|
||||||
if invitation:
|
if invitation:
|
||||||
data = invitation.get("data", {})
|
data = invitation.get("data", {})
|
||||||
tenant = invitation.get("tenant", None)
|
tenant: Tenant = invitation.get("tenant", None)
|
||||||
workspace_name = tenant.name if tenant else None
|
workspace_name = tenant.name if tenant else None
|
||||||
workspace_id = tenant.id if tenant else None
|
workspace_id = tenant.id if tenant else None
|
||||||
invitee_email = data.get("email") if data else None
|
invitee_email = data.get("email") if data else None
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
from werkzeug.exceptions import Forbidden
|
from werkzeug.exceptions import Forbidden
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
|
|||||||
@@ -2,8 +2,8 @@ import logging
|
|||||||
|
|
||||||
import requests
|
import requests
|
||||||
from flask import current_app, redirect, request
|
from flask import current_app, redirect, request
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource # type: ignore
|
from flask_restful import Resource
|
||||||
from werkzeug.exceptions import Forbidden
|
from werkzeug.exceptions import Forbidden
|
||||||
|
|
||||||
from configs import dify_config
|
from configs import dify_config
|
||||||
@@ -17,8 +17,8 @@ from ..wraps import account_initialization_required, setup_required
|
|||||||
def get_oauth_providers():
|
def get_oauth_providers():
|
||||||
with current_app.app_context():
|
with current_app.app_context():
|
||||||
notion_oauth = NotionOAuth(
|
notion_oauth = NotionOAuth(
|
||||||
client_id=dify_config.NOTION_CLIENT_ID or "",
|
client_id=dify_config.NOTION_CLIENT_ID,
|
||||||
client_secret=dify_config.NOTION_CLIENT_SECRET or "",
|
client_secret=dify_config.NOTION_CLIENT_SECRET,
|
||||||
redirect_uri=dify_config.CONSOLE_API_URL + "/console/api/oauth/data-source/callback/notion",
|
redirect_uri=dify_config.CONSOLE_API_URL + "/console/api/oauth/data-source/callback/notion",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -53,15 +53,3 @@ class EmailCodeLoginRateLimitExceededError(BaseHTTPException):
|
|||||||
error_code = "email_code_login_rate_limit_exceeded"
|
error_code = "email_code_login_rate_limit_exceeded"
|
||||||
description = "Too many login emails have been sent. Please try again in 5 minutes."
|
description = "Too many login emails have been sent. Please try again in 5 minutes."
|
||||||
code = 429
|
code = 429
|
||||||
|
|
||||||
|
|
||||||
class EmailCodeAccountDeletionRateLimitExceededError(BaseHTTPException):
|
|
||||||
error_code = "email_code_account_deletion_rate_limit_exceeded"
|
|
||||||
description = "Too many account deletion emails have been sent. Please try again in 5 minutes."
|
|
||||||
code = 429
|
|
||||||
|
|
||||||
|
|
||||||
class EmailPasswordResetLimitError(BaseHTTPException):
|
|
||||||
error_code = "email_password_reset_limit"
|
|
||||||
description = "Too many failed password reset attempts. Please try again in 24 hours."
|
|
||||||
code = 429
|
|
||||||
|
|||||||
@@ -2,20 +2,17 @@ import base64
|
|||||||
import secrets
|
import secrets
|
||||||
|
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
from sqlalchemy import select
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
|
|
||||||
from constants.languages import languages
|
from constants.languages import languages
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.auth.error import (
|
from controllers.console.auth.error import (
|
||||||
EmailCodeError,
|
EmailCodeError,
|
||||||
EmailPasswordResetLimitError,
|
|
||||||
InvalidEmailError,
|
InvalidEmailError,
|
||||||
InvalidTokenError,
|
InvalidTokenError,
|
||||||
PasswordMismatchError,
|
PasswordMismatchError,
|
||||||
)
|
)
|
||||||
from controllers.console.error import AccountInFreezeError, AccountNotFound, EmailSendIpLimitError
|
from controllers.console.error import AccountNotFound, EmailSendIpLimitError
|
||||||
from controllers.console.wraps import setup_required
|
from controllers.console.wraps import setup_required
|
||||||
from events.tenant_event import tenant_was_created
|
from events.tenant_event import tenant_was_created
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
@@ -23,7 +20,6 @@ from libs.helper import email, extract_remote_ip
|
|||||||
from libs.password import hash_password, valid_password
|
from libs.password import hash_password, valid_password
|
||||||
from models.account import Account
|
from models.account import Account
|
||||||
from services.account_service import AccountService, TenantService
|
from services.account_service import AccountService, TenantService
|
||||||
from services.errors.account import AccountRegisterError
|
|
||||||
from services.errors.workspace import WorkSpaceNotAllowedCreateError
|
from services.errors.workspace import WorkSpaceNotAllowedCreateError
|
||||||
from services.feature_service import FeatureService
|
from services.feature_service import FeatureService
|
||||||
|
|
||||||
@@ -45,8 +41,7 @@ class ForgotPasswordSendEmailApi(Resource):
|
|||||||
else:
|
else:
|
||||||
language = "en-US"
|
language = "en-US"
|
||||||
|
|
||||||
with Session(db.engine) as session:
|
account = Account.query.filter_by(email=args["email"]).first()
|
||||||
account = session.execute(select(Account).filter_by(email=args["email"])).scalar_one_or_none()
|
|
||||||
token = None
|
token = None
|
||||||
if account is None:
|
if account is None:
|
||||||
if FeatureService.get_system_features().is_allow_register:
|
if FeatureService.get_system_features().is_allow_register:
|
||||||
@@ -71,10 +66,6 @@ class ForgotPasswordCheckApi(Resource):
|
|||||||
|
|
||||||
user_email = args["email"]
|
user_email = args["email"]
|
||||||
|
|
||||||
is_forgot_password_error_rate_limit = AccountService.is_forgot_password_error_rate_limit(args["email"])
|
|
||||||
if is_forgot_password_error_rate_limit:
|
|
||||||
raise EmailPasswordResetLimitError()
|
|
||||||
|
|
||||||
token_data = AccountService.get_reset_password_data(args["token"])
|
token_data = AccountService.get_reset_password_data(args["token"])
|
||||||
if token_data is None:
|
if token_data is None:
|
||||||
raise InvalidTokenError()
|
raise InvalidTokenError()
|
||||||
@@ -83,10 +74,8 @@ class ForgotPasswordCheckApi(Resource):
|
|||||||
raise InvalidEmailError()
|
raise InvalidEmailError()
|
||||||
|
|
||||||
if args["code"] != token_data.get("code"):
|
if args["code"] != token_data.get("code"):
|
||||||
AccountService.add_forgot_password_error_rate_limit(args["email"])
|
|
||||||
raise EmailCodeError()
|
raise EmailCodeError()
|
||||||
|
|
||||||
AccountService.reset_forgot_password_error_rate_limit(args["email"])
|
|
||||||
return {"is_valid": True, "email": token_data.get("email")}
|
return {"is_valid": True, "email": token_data.get("email")}
|
||||||
|
|
||||||
|
|
||||||
@@ -119,8 +108,7 @@ class ForgotPasswordResetApi(Resource):
|
|||||||
password_hashed = hash_password(new_password, salt)
|
password_hashed = hash_password(new_password, salt)
|
||||||
base64_password_hashed = base64.b64encode(password_hashed).decode()
|
base64_password_hashed = base64.b64encode(password_hashed).decode()
|
||||||
|
|
||||||
with Session(db.engine) as session:
|
account = Account.query.filter_by(email=reset_data.get("email")).first()
|
||||||
account = session.execute(select(Account).filter_by(email=reset_data.get("email"))).scalar_one_or_none()
|
|
||||||
if account:
|
if account:
|
||||||
account.password = base64_password_hashed
|
account.password = base64_password_hashed
|
||||||
account.password_salt = base64_salt
|
account.password_salt = base64_salt
|
||||||
@@ -134,15 +122,13 @@ class ForgotPasswordResetApi(Resource):
|
|||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
account = AccountService.create_account_and_tenant(
|
account = AccountService.create_account_and_tenant(
|
||||||
email=reset_data.get("email", ""),
|
email=reset_data.get("email"),
|
||||||
name=reset_data.get("email", ""),
|
name=reset_data.get("email"),
|
||||||
password=password_confirm,
|
password=password_confirm,
|
||||||
interface_language=languages[0],
|
interface_language=languages[0],
|
||||||
)
|
)
|
||||||
except WorkSpaceNotAllowedCreateError:
|
except WorkSpaceNotAllowedCreateError:
|
||||||
pass
|
pass
|
||||||
except AccountRegisterError:
|
|
||||||
raise AccountInFreezeError()
|
|
||||||
|
|
||||||
return {"result": "success"}
|
return {"result": "success"}
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,10 @@
|
|||||||
from typing import cast
|
from typing import cast
|
||||||
|
|
||||||
import flask_login # type: ignore
|
import flask_login
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
|
|
||||||
import services
|
import services
|
||||||
from configs import dify_config
|
|
||||||
from constants.languages import languages
|
from constants.languages import languages
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.auth.error import (
|
from controllers.console.auth.error import (
|
||||||
@@ -17,7 +16,6 @@ from controllers.console.auth.error import (
|
|||||||
)
|
)
|
||||||
from controllers.console.error import (
|
from controllers.console.error import (
|
||||||
AccountBannedError,
|
AccountBannedError,
|
||||||
AccountInFreezeError,
|
|
||||||
AccountNotFound,
|
AccountNotFound,
|
||||||
EmailSendIpLimitError,
|
EmailSendIpLimitError,
|
||||||
NotAllowedCreateWorkspace,
|
NotAllowedCreateWorkspace,
|
||||||
@@ -28,8 +26,6 @@ from libs.helper import email, extract_remote_ip
|
|||||||
from libs.password import valid_password
|
from libs.password import valid_password
|
||||||
from models.account import Account
|
from models.account import Account
|
||||||
from services.account_service import AccountService, RegisterService, TenantService
|
from services.account_service import AccountService, RegisterService, TenantService
|
||||||
from services.billing_service import BillingService
|
|
||||||
from services.errors.account import AccountRegisterError
|
|
||||||
from services.errors.workspace import WorkSpaceNotAllowedCreateError
|
from services.errors.workspace import WorkSpaceNotAllowedCreateError
|
||||||
from services.feature_service import FeatureService
|
from services.feature_service import FeatureService
|
||||||
|
|
||||||
@@ -48,9 +44,6 @@ class LoginApi(Resource):
|
|||||||
parser.add_argument("language", type=str, required=False, default="en-US", location="json")
|
parser.add_argument("language", type=str, required=False, default="en-US", location="json")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
if dify_config.BILLING_ENABLED and BillingService.is_email_in_freeze(args["email"]):
|
|
||||||
raise AccountInFreezeError()
|
|
||||||
|
|
||||||
is_login_error_rate_limit = AccountService.is_login_error_rate_limit(args["email"])
|
is_login_error_rate_limit = AccountService.is_login_error_rate_limit(args["email"])
|
||||||
if is_login_error_rate_limit:
|
if is_login_error_rate_limit:
|
||||||
raise EmailPasswordLoginLimitError()
|
raise EmailPasswordLoginLimitError()
|
||||||
@@ -120,10 +113,8 @@ class ResetPasswordSendEmailApi(Resource):
|
|||||||
language = "zh-Hans"
|
language = "zh-Hans"
|
||||||
else:
|
else:
|
||||||
language = "en-US"
|
language = "en-US"
|
||||||
try:
|
|
||||||
account = AccountService.get_user_through_email(args["email"])
|
account = AccountService.get_user_through_email(args["email"])
|
||||||
except AccountRegisterError as are:
|
|
||||||
raise AccountInFreezeError()
|
|
||||||
if account is None:
|
if account is None:
|
||||||
if FeatureService.get_system_features().is_allow_register:
|
if FeatureService.get_system_features().is_allow_register:
|
||||||
token = AccountService.send_reset_password_email(email=args["email"], language=language)
|
token = AccountService.send_reset_password_email(email=args["email"], language=language)
|
||||||
@@ -151,11 +142,8 @@ class EmailCodeLoginSendEmailApi(Resource):
|
|||||||
language = "zh-Hans"
|
language = "zh-Hans"
|
||||||
else:
|
else:
|
||||||
language = "en-US"
|
language = "en-US"
|
||||||
try:
|
|
||||||
account = AccountService.get_user_through_email(args["email"])
|
|
||||||
except AccountRegisterError as are:
|
|
||||||
raise AccountInFreezeError()
|
|
||||||
|
|
||||||
|
account = AccountService.get_user_through_email(args["email"])
|
||||||
if account is None:
|
if account is None:
|
||||||
if FeatureService.get_system_features().is_allow_register:
|
if FeatureService.get_system_features().is_allow_register:
|
||||||
token = AccountService.send_email_code_login_email(email=args["email"], language=language)
|
token = AccountService.send_email_code_login_email(email=args["email"], language=language)
|
||||||
@@ -189,10 +177,7 @@ class EmailCodeLoginApi(Resource):
|
|||||||
raise EmailCodeError()
|
raise EmailCodeError()
|
||||||
|
|
||||||
AccountService.revoke_email_code_login_token(args["token"])
|
AccountService.revoke_email_code_login_token(args["token"])
|
||||||
try:
|
account = AccountService.get_user_through_email(user_email)
|
||||||
account = AccountService.get_user_through_email(user_email)
|
|
||||||
except AccountRegisterError as are:
|
|
||||||
raise AccountInFreezeError()
|
|
||||||
if account:
|
if account:
|
||||||
tenant = TenantService.get_join_tenants(account)
|
tenant = TenantService.get_join_tenants(account)
|
||||||
if not tenant:
|
if not tenant:
|
||||||
@@ -211,8 +196,6 @@ class EmailCodeLoginApi(Resource):
|
|||||||
)
|
)
|
||||||
except WorkSpaceNotAllowedCreateError:
|
except WorkSpaceNotAllowedCreateError:
|
||||||
return NotAllowedCreateWorkspace()
|
return NotAllowedCreateWorkspace()
|
||||||
except AccountRegisterError as are:
|
|
||||||
raise AccountInFreezeError()
|
|
||||||
token_pair = AccountService.login(account, ip_address=extract_remote_ip(request))
|
token_pair = AccountService.login(account, ip_address=extract_remote_ip(request))
|
||||||
AccountService.reset_login_error_rate_limit(args["email"])
|
AccountService.reset_login_error_rate_limit(args["email"])
|
||||||
return {"result": "success", "data": token_pair.model_dump()}
|
return {"result": "success", "data": token_pair.model_dump()}
|
||||||
|
|||||||
@@ -4,9 +4,7 @@ from typing import Optional
|
|||||||
|
|
||||||
import requests
|
import requests
|
||||||
from flask import current_app, redirect, request
|
from flask import current_app, redirect, request
|
||||||
from flask_restful import Resource # type: ignore
|
from flask_restful import Resource
|
||||||
from sqlalchemy import select
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
from werkzeug.exceptions import Unauthorized
|
from werkzeug.exceptions import Unauthorized
|
||||||
|
|
||||||
from configs import dify_config
|
from configs import dify_config
|
||||||
@@ -18,7 +16,7 @@ from libs.oauth import GitHubOAuth, GoogleOAuth, OAuthUserInfo
|
|||||||
from models import Account
|
from models import Account
|
||||||
from models.account import AccountStatus
|
from models.account import AccountStatus
|
||||||
from services.account_service import AccountService, RegisterService, TenantService
|
from services.account_service import AccountService, RegisterService, TenantService
|
||||||
from services.errors.account import AccountNotFoundError, AccountRegisterError
|
from services.errors.account import AccountNotFoundError
|
||||||
from services.errors.workspace import WorkSpaceNotAllowedCreateError, WorkSpaceNotFoundError
|
from services.errors.workspace import WorkSpaceNotAllowedCreateError, WorkSpaceNotFoundError
|
||||||
from services.feature_service import FeatureService
|
from services.feature_service import FeatureService
|
||||||
|
|
||||||
@@ -78,9 +76,8 @@ class OAuthCallback(Resource):
|
|||||||
try:
|
try:
|
||||||
token = oauth_provider.get_access_token(code)
|
token = oauth_provider.get_access_token(code)
|
||||||
user_info = oauth_provider.get_user_info(token)
|
user_info = oauth_provider.get_user_info(token)
|
||||||
except requests.exceptions.RequestException as e:
|
except requests.exceptions.HTTPError as e:
|
||||||
error_text = e.response.text if e.response else str(e)
|
logging.exception(f"An error occurred during the OAuth process with {provider}: {e.response.text}")
|
||||||
logging.exception(f"An error occurred during the OAuth process with {provider}: {error_text}")
|
|
||||||
return {"error": "OAuth process failed"}, 400
|
return {"error": "OAuth process failed"}, 400
|
||||||
|
|
||||||
if invite_token and RegisterService.is_valid_invite_token(invite_token):
|
if invite_token and RegisterService.is_valid_invite_token(invite_token):
|
||||||
@@ -101,8 +98,6 @@ class OAuthCallback(Resource):
|
|||||||
f"{dify_config.CONSOLE_WEB_URL}/signin"
|
f"{dify_config.CONSOLE_WEB_URL}/signin"
|
||||||
"?message=Workspace not found, please contact system admin to invite you to join in a workspace."
|
"?message=Workspace not found, please contact system admin to invite you to join in a workspace."
|
||||||
)
|
)
|
||||||
except AccountRegisterError as e:
|
|
||||||
return redirect(f"{dify_config.CONSOLE_WEB_URL}/signin?message={e.description}")
|
|
||||||
|
|
||||||
# Check account status
|
# Check account status
|
||||||
if account.status == AccountStatus.BANNED.value:
|
if account.status == AccountStatus.BANNED.value:
|
||||||
@@ -134,11 +129,10 @@ class OAuthCallback(Resource):
|
|||||||
|
|
||||||
|
|
||||||
def _get_account_by_openid_or_email(provider: str, user_info: OAuthUserInfo) -> Optional[Account]:
|
def _get_account_by_openid_or_email(provider: str, user_info: OAuthUserInfo) -> Optional[Account]:
|
||||||
account: Optional[Account] = Account.get_by_openid(provider, user_info.id)
|
account = Account.get_by_openid(provider, user_info.id)
|
||||||
|
|
||||||
if not account:
|
if not account:
|
||||||
with Session(db.engine) as session:
|
account = Account.query.filter_by(email=user_info.email).first()
|
||||||
account = session.execute(select(Account).filter_by(email=user_info.email)).scalar_one_or_none()
|
|
||||||
|
|
||||||
return account
|
return account
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.wraps import account_initialization_required, only_edition_cloud, setup_required
|
from controllers.console.wraps import account_initialization_required, only_edition_cloud, setup_required
|
||||||
|
|||||||
@@ -2,10 +2,8 @@ import datetime
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, marshal_with, reqparse # type: ignore
|
from flask_restful import Resource, marshal_with, reqparse
|
||||||
from sqlalchemy import select
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
from werkzeug.exceptions import NotFound
|
from werkzeug.exceptions import NotFound
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
@@ -78,10 +76,7 @@ class DataSourceApi(Resource):
|
|||||||
def patch(self, binding_id, action):
|
def patch(self, binding_id, action):
|
||||||
binding_id = str(binding_id)
|
binding_id = str(binding_id)
|
||||||
action = str(action)
|
action = str(action)
|
||||||
with Session(db.engine) as session:
|
data_source_binding = DataSourceOauthBinding.query.filter_by(id=binding_id).first()
|
||||||
data_source_binding = session.execute(
|
|
||||||
select(DataSourceOauthBinding).filter_by(id=binding_id)
|
|
||||||
).scalar_one_or_none()
|
|
||||||
if data_source_binding is None:
|
if data_source_binding is None:
|
||||||
raise NotFound("Data source binding not found.")
|
raise NotFound("Data source binding not found.")
|
||||||
# enable binding
|
# enable binding
|
||||||
@@ -113,53 +108,47 @@ class DataSourceNotionListApi(Resource):
|
|||||||
def get(self):
|
def get(self):
|
||||||
dataset_id = request.args.get("dataset_id", default=None, type=str)
|
dataset_id = request.args.get("dataset_id", default=None, type=str)
|
||||||
exist_page_ids = []
|
exist_page_ids = []
|
||||||
with Session(db.engine) as session:
|
# import notion in the exist dataset
|
||||||
# import notion in the exist dataset
|
if dataset_id:
|
||||||
if dataset_id:
|
dataset = DatasetService.get_dataset(dataset_id)
|
||||||
dataset = DatasetService.get_dataset(dataset_id)
|
if not dataset:
|
||||||
if not dataset:
|
raise NotFound("Dataset not found.")
|
||||||
raise NotFound("Dataset not found.")
|
if dataset.data_source_type != "notion_import":
|
||||||
if dataset.data_source_type != "notion_import":
|
raise ValueError("Dataset is not notion type.")
|
||||||
raise ValueError("Dataset is not notion type.")
|
documents = Document.query.filter_by(
|
||||||
|
dataset_id=dataset_id,
|
||||||
documents = session.execute(
|
tenant_id=current_user.current_tenant_id,
|
||||||
select(Document).filter_by(
|
data_source_type="notion_import",
|
||||||
dataset_id=dataset_id,
|
enabled=True,
|
||||||
tenant_id=current_user.current_tenant_id,
|
|
||||||
data_source_type="notion_import",
|
|
||||||
enabled=True,
|
|
||||||
)
|
|
||||||
).all()
|
|
||||||
if documents:
|
|
||||||
for document in documents:
|
|
||||||
data_source_info = json.loads(document.data_source_info)
|
|
||||||
exist_page_ids.append(data_source_info["notion_page_id"])
|
|
||||||
# get all authorized pages
|
|
||||||
data_source_bindings = session.scalars(
|
|
||||||
select(DataSourceOauthBinding).filter_by(
|
|
||||||
tenant_id=current_user.current_tenant_id, provider="notion", disabled=False
|
|
||||||
)
|
|
||||||
).all()
|
).all()
|
||||||
if not data_source_bindings:
|
if documents:
|
||||||
return {"notion_info": []}, 200
|
for document in documents:
|
||||||
pre_import_info_list = []
|
data_source_info = json.loads(document.data_source_info)
|
||||||
for data_source_binding in data_source_bindings:
|
exist_page_ids.append(data_source_info["notion_page_id"])
|
||||||
source_info = data_source_binding.source_info
|
# get all authorized pages
|
||||||
pages = source_info["pages"]
|
data_source_bindings = DataSourceOauthBinding.query.filter_by(
|
||||||
# Filter out already bound pages
|
tenant_id=current_user.current_tenant_id, provider="notion", disabled=False
|
||||||
for page in pages:
|
).all()
|
||||||
if page["page_id"] in exist_page_ids:
|
if not data_source_bindings:
|
||||||
page["is_bound"] = True
|
return {"notion_info": []}, 200
|
||||||
else:
|
pre_import_info_list = []
|
||||||
page["is_bound"] = False
|
for data_source_binding in data_source_bindings:
|
||||||
pre_import_info = {
|
source_info = data_source_binding.source_info
|
||||||
"workspace_name": source_info["workspace_name"],
|
pages = source_info["pages"]
|
||||||
"workspace_icon": source_info["workspace_icon"],
|
# Filter out already bound pages
|
||||||
"workspace_id": source_info["workspace_id"],
|
for page in pages:
|
||||||
"pages": pages,
|
if page["page_id"] in exist_page_ids:
|
||||||
}
|
page["is_bound"] = True
|
||||||
pre_import_info_list.append(pre_import_info)
|
else:
|
||||||
return {"notion_info": pre_import_info_list}, 200
|
page["is_bound"] = False
|
||||||
|
pre_import_info = {
|
||||||
|
"workspace_name": source_info["workspace_name"],
|
||||||
|
"workspace_icon": source_info["workspace_icon"],
|
||||||
|
"workspace_id": source_info["workspace_id"],
|
||||||
|
"pages": pages,
|
||||||
|
}
|
||||||
|
pre_import_info_list.append(pre_import_info)
|
||||||
|
return {"notion_info": pre_import_info_list}, 200
|
||||||
|
|
||||||
|
|
||||||
class DataSourceNotionApi(Resource):
|
class DataSourceNotionApi(Resource):
|
||||||
@@ -169,17 +158,14 @@ class DataSourceNotionApi(Resource):
|
|||||||
def get(self, workspace_id, page_id, page_type):
|
def get(self, workspace_id, page_id, page_type):
|
||||||
workspace_id = str(workspace_id)
|
workspace_id = str(workspace_id)
|
||||||
page_id = str(page_id)
|
page_id = str(page_id)
|
||||||
with Session(db.engine) as session:
|
data_source_binding = DataSourceOauthBinding.query.filter(
|
||||||
data_source_binding = session.execute(
|
db.and_(
|
||||||
select(DataSourceOauthBinding).filter(
|
DataSourceOauthBinding.tenant_id == current_user.current_tenant_id,
|
||||||
db.and_(
|
DataSourceOauthBinding.provider == "notion",
|
||||||
DataSourceOauthBinding.tenant_id == current_user.current_tenant_id,
|
DataSourceOauthBinding.disabled == False,
|
||||||
DataSourceOauthBinding.provider == "notion",
|
DataSourceOauthBinding.source_info["workspace_id"] == f'"{workspace_id}"',
|
||||||
DataSourceOauthBinding.disabled == False,
|
)
|
||||||
DataSourceOauthBinding.source_info["workspace_id"] == f'"{workspace_id}"',
|
).first()
|
||||||
)
|
|
||||||
)
|
|
||||||
).scalar_one_or_none()
|
|
||||||
if not data_source_binding:
|
if not data_source_binding:
|
||||||
raise NotFound("Data source binding not found.")
|
raise NotFound("Data source binding not found.")
|
||||||
|
|
||||||
@@ -232,7 +218,7 @@ class DataSourceNotionApi(Resource):
|
|||||||
args["doc_form"],
|
args["doc_form"],
|
||||||
args["doc_language"],
|
args["doc_language"],
|
||||||
)
|
)
|
||||||
return response.model_dump(), 200
|
return response, 200
|
||||||
|
|
||||||
|
|
||||||
class DataSourceNotionDatasetSyncApi(Resource):
|
class DataSourceNotionDatasetSyncApi(Resource):
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import flask_restful # type: ignore
|
import flask_restful
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask_login import current_user # type: ignore # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, marshal, marshal_with, reqparse # type: ignore
|
from flask_restful import Resource, marshal, marshal_with, reqparse
|
||||||
from werkzeug.exceptions import Forbidden, NotFound
|
from werkzeug.exceptions import Forbidden, NotFound
|
||||||
|
|
||||||
import services
|
import services
|
||||||
@@ -14,7 +14,6 @@ from controllers.console.wraps import account_initialization_required, enterpris
|
|||||||
from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError
|
from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError
|
||||||
from core.indexing_runner import IndexingRunner
|
from core.indexing_runner import IndexingRunner
|
||||||
from core.model_runtime.entities.model_entities import ModelType
|
from core.model_runtime.entities.model_entities import ModelType
|
||||||
from core.plugin.entities.plugin import ModelProviderID
|
|
||||||
from core.provider_manager import ProviderManager
|
from core.provider_manager import ProviderManager
|
||||||
from core.rag.datasource.vdb.vector_type import VectorType
|
from core.rag.datasource.vdb.vector_type import VectorType
|
||||||
from core.rag.extractor.entity.extract_setting import ExtractSetting
|
from core.rag.extractor.entity.extract_setting import ExtractSetting
|
||||||
@@ -53,12 +52,12 @@ class DatasetListApi(Resource):
|
|||||||
# provider = request.args.get("provider", default="vendor")
|
# provider = request.args.get("provider", default="vendor")
|
||||||
search = request.args.get("keyword", default=None, type=str)
|
search = request.args.get("keyword", default=None, type=str)
|
||||||
tag_ids = request.args.getlist("tag_ids")
|
tag_ids = request.args.getlist("tag_ids")
|
||||||
include_all = request.args.get("include_all", default="false").lower() == "true"
|
|
||||||
if ids:
|
if ids:
|
||||||
datasets, total = DatasetService.get_datasets_by_ids(ids, current_user.current_tenant_id)
|
datasets, total = DatasetService.get_datasets_by_ids(ids, current_user.current_tenant_id)
|
||||||
else:
|
else:
|
||||||
datasets, total = DatasetService.get_datasets(
|
datasets, total = DatasetService.get_datasets(
|
||||||
page, limit, current_user.current_tenant_id, current_user, search, tag_ids, include_all
|
page, limit, current_user.current_tenant_id, current_user, search, tag_ids
|
||||||
)
|
)
|
||||||
|
|
||||||
# check embedding setting
|
# check embedding setting
|
||||||
@@ -73,9 +72,7 @@ class DatasetListApi(Resource):
|
|||||||
|
|
||||||
data = marshal(datasets, dataset_detail_fields)
|
data = marshal(datasets, dataset_detail_fields)
|
||||||
for item in data:
|
for item in data:
|
||||||
# convert embedding_model_provider to plugin standard format
|
|
||||||
if item["indexing_technique"] == "high_quality":
|
if item["indexing_technique"] == "high_quality":
|
||||||
item["embedding_model_provider"] = str(ModelProviderID(item["embedding_model_provider"]))
|
|
||||||
item_model = f"{item['embedding_model']}:{item['embedding_model_provider']}"
|
item_model = f"{item['embedding_model']}:{item['embedding_model_provider']}"
|
||||||
if item_model in model_names:
|
if item_model in model_names:
|
||||||
item["embedding_available"] = True
|
item["embedding_available"] = True
|
||||||
@@ -460,14 +457,14 @@ class DatasetIndexingEstimateApi(Resource):
|
|||||||
)
|
)
|
||||||
except LLMBadRequestError:
|
except LLMBadRequestError:
|
||||||
raise ProviderNotInitializeError(
|
raise ProviderNotInitializeError(
|
||||||
"No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider."
|
"No Embedding Model available. Please configure a valid provider " "in the Settings -> Model Provider."
|
||||||
)
|
)
|
||||||
except ProviderTokenNotInitError as ex:
|
except ProviderTokenNotInitError as ex:
|
||||||
raise ProviderNotInitializeError(ex.description)
|
raise ProviderNotInitializeError(ex.description)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise IndexingEstimateError(str(e))
|
raise IndexingEstimateError(str(e))
|
||||||
|
|
||||||
return response.model_dump(), 200
|
return response, 200
|
||||||
|
|
||||||
|
|
||||||
class DatasetRelatedAppListApi(Resource):
|
class DatasetRelatedAppListApi(Resource):
|
||||||
@@ -622,7 +619,9 @@ class DatasetRetrievalSettingApi(Resource):
|
|||||||
vector_type = dify_config.VECTOR_STORE
|
vector_type = dify_config.VECTOR_STORE
|
||||||
match vector_type:
|
match vector_type:
|
||||||
case (
|
case (
|
||||||
VectorType.RELYT
|
VectorType.MILVUS
|
||||||
|
| VectorType.RELYT
|
||||||
|
| VectorType.PGVECTOR
|
||||||
| VectorType.TIDB_VECTOR
|
| VectorType.TIDB_VECTOR
|
||||||
| VectorType.CHROMA
|
| VectorType.CHROMA
|
||||||
| VectorType.TENCENT
|
| VectorType.TENCENT
|
||||||
@@ -641,12 +640,10 @@ class DatasetRetrievalSettingApi(Resource):
|
|||||||
| VectorType.MYSCALE
|
| VectorType.MYSCALE
|
||||||
| VectorType.ORACLE
|
| VectorType.ORACLE
|
||||||
| VectorType.ELASTICSEARCH
|
| VectorType.ELASTICSEARCH
|
||||||
| VectorType.ELASTICSEARCH_JA
|
|
||||||
| VectorType.PGVECTOR
|
| VectorType.PGVECTOR
|
||||||
| VectorType.TIDB_ON_QDRANT
|
| VectorType.TIDB_ON_QDRANT
|
||||||
| VectorType.LINDORM
|
| VectorType.LINDORM
|
||||||
| VectorType.COUCHBASE
|
| VectorType.COUCHBASE
|
||||||
| VectorType.MILVUS
|
|
||||||
):
|
):
|
||||||
return {
|
return {
|
||||||
"retrieval_method": [
|
"retrieval_method": [
|
||||||
@@ -686,7 +683,6 @@ class DatasetRetrievalSettingMockApi(Resource):
|
|||||||
| VectorType.MYSCALE
|
| VectorType.MYSCALE
|
||||||
| VectorType.ORACLE
|
| VectorType.ORACLE
|
||||||
| VectorType.ELASTICSEARCH
|
| VectorType.ELASTICSEARCH
|
||||||
| VectorType.ELASTICSEARCH_JA
|
|
||||||
| VectorType.COUCHBASE
|
| VectorType.COUCHBASE
|
||||||
| VectorType.PGVECTOR
|
| VectorType.PGVECTOR
|
||||||
| VectorType.LINDORM
|
| VectorType.LINDORM
|
||||||
@@ -737,18 +733,6 @@ class DatasetPermissionUserListApi(Resource):
|
|||||||
}, 200
|
}, 200
|
||||||
|
|
||||||
|
|
||||||
class DatasetAutoDisableLogApi(Resource):
|
|
||||||
@setup_required
|
|
||||||
@login_required
|
|
||||||
@account_initialization_required
|
|
||||||
def get(self, dataset_id):
|
|
||||||
dataset_id_str = str(dataset_id)
|
|
||||||
dataset = DatasetService.get_dataset(dataset_id_str)
|
|
||||||
if dataset is None:
|
|
||||||
raise NotFound("Dataset not found.")
|
|
||||||
return DatasetService.get_dataset_auto_disable_logs(dataset_id_str), 200
|
|
||||||
|
|
||||||
|
|
||||||
api.add_resource(DatasetListApi, "/datasets")
|
api.add_resource(DatasetListApi, "/datasets")
|
||||||
api.add_resource(DatasetApi, "/datasets/<uuid:dataset_id>")
|
api.add_resource(DatasetApi, "/datasets/<uuid:dataset_id>")
|
||||||
api.add_resource(DatasetUseCheckApi, "/datasets/<uuid:dataset_id>/use-check")
|
api.add_resource(DatasetUseCheckApi, "/datasets/<uuid:dataset_id>/use-check")
|
||||||
@@ -763,4 +747,3 @@ api.add_resource(DatasetApiBaseUrlApi, "/datasets/api-base-info")
|
|||||||
api.add_resource(DatasetRetrievalSettingApi, "/datasets/retrieval-setting")
|
api.add_resource(DatasetRetrievalSettingApi, "/datasets/retrieval-setting")
|
||||||
api.add_resource(DatasetRetrievalSettingMockApi, "/datasets/retrieval-setting/<string:vector_type>")
|
api.add_resource(DatasetRetrievalSettingMockApi, "/datasets/retrieval-setting/<string:vector_type>")
|
||||||
api.add_resource(DatasetPermissionUserListApi, "/datasets/<uuid:dataset_id>/permission-part-users")
|
api.add_resource(DatasetPermissionUserListApi, "/datasets/<uuid:dataset_id>/permission-part-users")
|
||||||
api.add_resource(DatasetAutoDisableLogApi, "/datasets/<uuid:dataset_id>/auto-disable-logs")
|
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
import logging
|
import logging
|
||||||
from argparse import ArgumentTypeError
|
from argparse import ArgumentTypeError
|
||||||
from datetime import UTC, datetime
|
from datetime import UTC, datetime
|
||||||
from typing import cast
|
|
||||||
|
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, fields, marshal, marshal_with, reqparse # type: ignore
|
from flask_restful import Resource, fields, marshal, marshal_with, reqparse
|
||||||
from sqlalchemy import asc, desc
|
from sqlalchemy import asc, desc
|
||||||
|
from transformers.hf_argparser import string_to_bool
|
||||||
from werkzeug.exceptions import Forbidden, NotFound
|
from werkzeug.exceptions import Forbidden, NotFound
|
||||||
|
|
||||||
import services
|
import services
|
||||||
@@ -39,7 +39,6 @@ from core.indexing_runner import IndexingRunner
|
|||||||
from core.model_manager import ModelManager
|
from core.model_manager import ModelManager
|
||||||
from core.model_runtime.entities.model_entities import ModelType
|
from core.model_runtime.entities.model_entities import ModelType
|
||||||
from core.model_runtime.errors.invoke import InvokeAuthorizationError
|
from core.model_runtime.errors.invoke import InvokeAuthorizationError
|
||||||
from core.plugin.manager.exc import PluginDaemonClientSideError
|
|
||||||
from core.rag.extractor.entity.extract_setting import ExtractSetting
|
from core.rag.extractor.entity.extract_setting import ExtractSetting
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
from extensions.ext_redis import redis_client
|
from extensions.ext_redis import redis_client
|
||||||
@@ -52,7 +51,6 @@ from fields.document_fields import (
|
|||||||
from libs.login import login_required
|
from libs.login import login_required
|
||||||
from models import Dataset, DatasetProcessRule, Document, DocumentSegment, UploadFile
|
from models import Dataset, DatasetProcessRule, Document, DocumentSegment, UploadFile
|
||||||
from services.dataset_service import DatasetService, DocumentService
|
from services.dataset_service import DatasetService, DocumentService
|
||||||
from services.entities.knowledge_entities.knowledge_entities import KnowledgeConfig
|
|
||||||
from tasks.add_document_to_index_task import add_document_to_index_task
|
from tasks.add_document_to_index_task import add_document_to_index_task
|
||||||
from tasks.remove_document_from_index_task import remove_document_from_index_task
|
from tasks.remove_document_from_index_task import remove_document_from_index_task
|
||||||
|
|
||||||
@@ -150,20 +148,8 @@ class DatasetDocumentListApi(Resource):
|
|||||||
sort = request.args.get("sort", default="-created_at", type=str)
|
sort = request.args.get("sort", default="-created_at", type=str)
|
||||||
# "yes", "true", "t", "y", "1" convert to True, while others convert to False.
|
# "yes", "true", "t", "y", "1" convert to True, while others convert to False.
|
||||||
try:
|
try:
|
||||||
fetch_val = request.args.get("fetch", default="false")
|
fetch = string_to_bool(request.args.get("fetch", default="false"))
|
||||||
if isinstance(fetch_val, bool):
|
except (ArgumentTypeError, ValueError, Exception) as e:
|
||||||
fetch = fetch_val
|
|
||||||
else:
|
|
||||||
if fetch_val.lower() in ("yes", "true", "t", "y", "1"):
|
|
||||||
fetch = True
|
|
||||||
elif fetch_val.lower() in ("no", "false", "f", "n", "0"):
|
|
||||||
fetch = False
|
|
||||||
else:
|
|
||||||
raise ArgumentTypeError(
|
|
||||||
f"Truthy value expected: got {fetch_val} but expected one of yes/no, true/false, t/f, y/n, 1/0 "
|
|
||||||
f"(case insensitive)."
|
|
||||||
)
|
|
||||||
except (ArgumentTypeError, ValueError, Exception):
|
|
||||||
fetch = False
|
fetch = False
|
||||||
dataset = DatasetService.get_dataset(dataset_id)
|
dataset = DatasetService.get_dataset(dataset_id)
|
||||||
if not dataset:
|
if not dataset:
|
||||||
@@ -268,23 +254,20 @@ class DatasetDocumentListApi(Resource):
|
|||||||
parser.add_argument("duplicate", type=bool, default=True, nullable=False, location="json")
|
parser.add_argument("duplicate", type=bool, default=True, nullable=False, location="json")
|
||||||
parser.add_argument("original_document_id", type=str, required=False, location="json")
|
parser.add_argument("original_document_id", type=str, required=False, location="json")
|
||||||
parser.add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json")
|
parser.add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json")
|
||||||
parser.add_argument("retrieval_model", type=dict, required=False, nullable=False, location="json")
|
|
||||||
parser.add_argument("embedding_model", type=str, required=False, nullable=True, location="json")
|
|
||||||
parser.add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json")
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"doc_language", type=str, default="English", required=False, nullable=False, location="json"
|
"doc_language", type=str, default="English", required=False, nullable=False, location="json"
|
||||||
)
|
)
|
||||||
|
parser.add_argument("retrieval_model", type=dict, required=False, nullable=False, location="json")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
knowledge_config = KnowledgeConfig(**args)
|
|
||||||
|
|
||||||
if not dataset.indexing_technique and not knowledge_config.indexing_technique:
|
if not dataset.indexing_technique and not args["indexing_technique"]:
|
||||||
raise ValueError("indexing_technique is required.")
|
raise ValueError("indexing_technique is required.")
|
||||||
|
|
||||||
# validate args
|
# validate args
|
||||||
DocumentService.document_create_args_validate(knowledge_config)
|
DocumentService.document_create_args_validate(args)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
documents, batch = DocumentService.save_document_with_dataset_id(dataset, knowledge_config, current_user)
|
documents, batch = DocumentService.save_document_with_dataset_id(dataset, args, current_user)
|
||||||
except ProviderTokenNotInitError as ex:
|
except ProviderTokenNotInitError as ex:
|
||||||
raise ProviderNotInitializeError(ex.description)
|
raise ProviderNotInitializeError(ex.description)
|
||||||
except QuotaExceededError:
|
except QuotaExceededError:
|
||||||
@@ -294,25 +277,6 @@ class DatasetDocumentListApi(Resource):
|
|||||||
|
|
||||||
return {"documents": documents, "batch": batch}
|
return {"documents": documents, "batch": batch}
|
||||||
|
|
||||||
@setup_required
|
|
||||||
@login_required
|
|
||||||
@account_initialization_required
|
|
||||||
def delete(self, dataset_id):
|
|
||||||
dataset_id = str(dataset_id)
|
|
||||||
dataset = DatasetService.get_dataset(dataset_id)
|
|
||||||
if dataset is None:
|
|
||||||
raise NotFound("Dataset not found.")
|
|
||||||
# check user's model setting
|
|
||||||
DatasetService.check_dataset_model_setting(dataset)
|
|
||||||
|
|
||||||
try:
|
|
||||||
document_ids = request.args.getlist("document_id")
|
|
||||||
DocumentService.delete_documents(dataset, document_ids)
|
|
||||||
except services.errors.document.DocumentIndexingError:
|
|
||||||
raise DocumentIndexingError("Cannot delete document during indexing.")
|
|
||||||
|
|
||||||
return {"result": "success"}, 204
|
|
||||||
|
|
||||||
|
|
||||||
class DatasetInitApi(Resource):
|
class DatasetInitApi(Resource):
|
||||||
@setup_required
|
@setup_required
|
||||||
@@ -348,9 +312,9 @@ class DatasetInitApi(Resource):
|
|||||||
# The role of the current user in the ta table must be admin, owner, or editor, or dataset_operator
|
# The role of the current user in the ta table must be admin, owner, or editor, or dataset_operator
|
||||||
if not current_user.is_dataset_editor:
|
if not current_user.is_dataset_editor:
|
||||||
raise Forbidden()
|
raise Forbidden()
|
||||||
knowledge_config = KnowledgeConfig(**args)
|
|
||||||
if knowledge_config.indexing_technique == "high_quality":
|
if args["indexing_technique"] == "high_quality":
|
||||||
if knowledge_config.embedding_model is None or knowledge_config.embedding_model_provider is None:
|
if args["embedding_model"] is None or args["embedding_model_provider"] is None:
|
||||||
raise ValueError("embedding model and embedding model provider are required for high quality indexing.")
|
raise ValueError("embedding model and embedding model provider are required for high quality indexing.")
|
||||||
try:
|
try:
|
||||||
model_manager = ModelManager()
|
model_manager = ModelManager()
|
||||||
@@ -362,17 +326,18 @@ class DatasetInitApi(Resource):
|
|||||||
)
|
)
|
||||||
except InvokeAuthorizationError:
|
except InvokeAuthorizationError:
|
||||||
raise ProviderNotInitializeError(
|
raise ProviderNotInitializeError(
|
||||||
"No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider."
|
"No Embedding Model available. Please configure a valid provider "
|
||||||
|
"in the Settings -> Model Provider."
|
||||||
)
|
)
|
||||||
except ProviderTokenNotInitError as ex:
|
except ProviderTokenNotInitError as ex:
|
||||||
raise ProviderNotInitializeError(ex.description)
|
raise ProviderNotInitializeError(ex.description)
|
||||||
|
|
||||||
# validate args
|
# validate args
|
||||||
DocumentService.document_create_args_validate(knowledge_config)
|
DocumentService.document_create_args_validate(args)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
dataset, documents, batch = DocumentService.save_document_without_dataset_id(
|
dataset, documents, batch = DocumentService.save_document_without_dataset_id(
|
||||||
tenant_id=current_user.current_tenant_id, knowledge_config=knowledge_config, account=current_user
|
tenant_id=current_user.current_tenant_id, document_data=args, account=current_user
|
||||||
)
|
)
|
||||||
except ProviderTokenNotInitError as ex:
|
except ProviderTokenNotInitError as ex:
|
||||||
raise ProviderNotInitializeError(ex.description)
|
raise ProviderNotInitializeError(ex.description)
|
||||||
@@ -425,7 +390,7 @@ class DocumentIndexingEstimateApi(DocumentResource):
|
|||||||
indexing_runner = IndexingRunner()
|
indexing_runner = IndexingRunner()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
estimate_response = indexing_runner.indexing_estimate(
|
response = indexing_runner.indexing_estimate(
|
||||||
current_user.current_tenant_id,
|
current_user.current_tenant_id,
|
||||||
[extract_setting],
|
[extract_setting],
|
||||||
data_process_rule_dict,
|
data_process_rule_dict,
|
||||||
@@ -433,7 +398,6 @@ class DocumentIndexingEstimateApi(DocumentResource):
|
|||||||
"English",
|
"English",
|
||||||
dataset_id,
|
dataset_id,
|
||||||
)
|
)
|
||||||
return estimate_response.model_dump(), 200
|
|
||||||
except LLMBadRequestError:
|
except LLMBadRequestError:
|
||||||
raise ProviderNotInitializeError(
|
raise ProviderNotInitializeError(
|
||||||
"No Embedding Model available. Please configure a valid provider "
|
"No Embedding Model available. Please configure a valid provider "
|
||||||
@@ -441,12 +405,10 @@ class DocumentIndexingEstimateApi(DocumentResource):
|
|||||||
)
|
)
|
||||||
except ProviderTokenNotInitError as ex:
|
except ProviderTokenNotInitError as ex:
|
||||||
raise ProviderNotInitializeError(ex.description)
|
raise ProviderNotInitializeError(ex.description)
|
||||||
except PluginDaemonClientSideError as ex:
|
|
||||||
raise ProviderNotInitializeError(ex.description)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise IndexingEstimateError(str(e))
|
raise IndexingEstimateError(str(e))
|
||||||
|
|
||||||
return response, 200
|
return response
|
||||||
|
|
||||||
|
|
||||||
class DocumentBatchIndexingEstimateApi(DocumentResource):
|
class DocumentBatchIndexingEstimateApi(DocumentResource):
|
||||||
@@ -457,8 +419,9 @@ class DocumentBatchIndexingEstimateApi(DocumentResource):
|
|||||||
dataset_id = str(dataset_id)
|
dataset_id = str(dataset_id)
|
||||||
batch = str(batch)
|
batch = str(batch)
|
||||||
documents = self.get_batch_documents(dataset_id, batch)
|
documents = self.get_batch_documents(dataset_id, batch)
|
||||||
|
response = {"tokens": 0, "total_price": 0, "currency": "USD", "total_segments": 0, "preview": []}
|
||||||
if not documents:
|
if not documents:
|
||||||
return {"tokens": 0, "total_price": 0, "currency": "USD", "total_segments": 0, "preview": []}, 200
|
return response
|
||||||
data_process_rule = documents[0].dataset_process_rule
|
data_process_rule = documents[0].dataset_process_rule
|
||||||
data_process_rule_dict = data_process_rule.to_dict()
|
data_process_rule_dict = data_process_rule.to_dict()
|
||||||
info_list = []
|
info_list = []
|
||||||
@@ -536,17 +499,16 @@ class DocumentBatchIndexingEstimateApi(DocumentResource):
|
|||||||
"English",
|
"English",
|
||||||
dataset_id,
|
dataset_id,
|
||||||
)
|
)
|
||||||
return response.model_dump(), 200
|
|
||||||
except LLMBadRequestError:
|
except LLMBadRequestError:
|
||||||
raise ProviderNotInitializeError(
|
raise ProviderNotInitializeError(
|
||||||
"No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider."
|
"No Embedding Model available. Please configure a valid provider "
|
||||||
|
"in the Settings -> Model Provider."
|
||||||
)
|
)
|
||||||
except ProviderTokenNotInitError as ex:
|
except ProviderTokenNotInitError as ex:
|
||||||
raise ProviderNotInitializeError(ex.description)
|
raise ProviderNotInitializeError(ex.description)
|
||||||
except PluginDaemonClientSideError as ex:
|
|
||||||
raise ProviderNotInitializeError(ex.description)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise IndexingEstimateError(str(e))
|
raise IndexingEstimateError(str(e))
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
class DocumentBatchIndexingStatusApi(DocumentResource):
|
class DocumentBatchIndexingStatusApi(DocumentResource):
|
||||||
@@ -617,10 +579,9 @@ class DocumentDetailApi(DocumentResource):
|
|||||||
raise InvalidMetadataError(f"Invalid metadata value: {metadata}")
|
raise InvalidMetadataError(f"Invalid metadata value: {metadata}")
|
||||||
|
|
||||||
if metadata == "only":
|
if metadata == "only":
|
||||||
response = {"id": document.id, "doc_type": document.doc_type, "doc_metadata": document.doc_metadata_details}
|
response = {"id": document.id, "doc_type": document.doc_type, "doc_metadata": document.doc_metadata}
|
||||||
elif metadata == "without":
|
elif metadata == "without":
|
||||||
dataset_process_rules = DatasetService.get_process_rules(dataset_id)
|
process_rules = DatasetService.get_process_rules(dataset_id)
|
||||||
document_process_rules = document.dataset_process_rule.to_dict()
|
|
||||||
data_source_info = document.data_source_detail_dict
|
data_source_info = document.data_source_detail_dict
|
||||||
response = {
|
response = {
|
||||||
"id": document.id,
|
"id": document.id,
|
||||||
@@ -628,8 +589,7 @@ class DocumentDetailApi(DocumentResource):
|
|||||||
"data_source_type": document.data_source_type,
|
"data_source_type": document.data_source_type,
|
||||||
"data_source_info": data_source_info,
|
"data_source_info": data_source_info,
|
||||||
"dataset_process_rule_id": document.dataset_process_rule_id,
|
"dataset_process_rule_id": document.dataset_process_rule_id,
|
||||||
"dataset_process_rule": dataset_process_rules,
|
"dataset_process_rule": process_rules,
|
||||||
"document_process_rule": document_process_rules,
|
|
||||||
"name": document.name,
|
"name": document.name,
|
||||||
"created_from": document.created_from,
|
"created_from": document.created_from,
|
||||||
"created_by": document.created_by,
|
"created_by": document.created_by,
|
||||||
@@ -652,8 +612,7 @@ class DocumentDetailApi(DocumentResource):
|
|||||||
"doc_language": document.doc_language,
|
"doc_language": document.doc_language,
|
||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
dataset_process_rules = DatasetService.get_process_rules(dataset_id)
|
process_rules = DatasetService.get_process_rules(dataset_id)
|
||||||
document_process_rules = document.dataset_process_rule.to_dict()
|
|
||||||
data_source_info = document.data_source_detail_dict
|
data_source_info = document.data_source_detail_dict
|
||||||
response = {
|
response = {
|
||||||
"id": document.id,
|
"id": document.id,
|
||||||
@@ -661,8 +620,7 @@ class DocumentDetailApi(DocumentResource):
|
|||||||
"data_source_type": document.data_source_type,
|
"data_source_type": document.data_source_type,
|
||||||
"data_source_info": data_source_info,
|
"data_source_info": data_source_info,
|
||||||
"dataset_process_rule_id": document.dataset_process_rule_id,
|
"dataset_process_rule_id": document.dataset_process_rule_id,
|
||||||
"dataset_process_rule": dataset_process_rules,
|
"dataset_process_rule": process_rules,
|
||||||
"document_process_rule": document_process_rules,
|
|
||||||
"name": document.name,
|
"name": document.name,
|
||||||
"created_from": document.created_from,
|
"created_from": document.created_from,
|
||||||
"created_by": document.created_by,
|
"created_by": document.created_by,
|
||||||
@@ -678,7 +636,7 @@ class DocumentDetailApi(DocumentResource):
|
|||||||
"disabled_by": document.disabled_by,
|
"disabled_by": document.disabled_by,
|
||||||
"archived": document.archived,
|
"archived": document.archived,
|
||||||
"doc_type": document.doc_type,
|
"doc_type": document.doc_type,
|
||||||
"doc_metadata": document.doc_metadata_details,
|
"doc_metadata": document.doc_metadata,
|
||||||
"segment_count": document.segment_count,
|
"segment_count": document.segment_count,
|
||||||
"average_segment_length": document.average_segment_length,
|
"average_segment_length": document.average_segment_length,
|
||||||
"hit_count": document.hit_count,
|
"hit_count": document.hit_count,
|
||||||
@@ -775,7 +733,8 @@ class DocumentMetadataApi(DocumentResource):
|
|||||||
|
|
||||||
if not isinstance(doc_metadata, dict):
|
if not isinstance(doc_metadata, dict):
|
||||||
raise ValueError("doc_metadata must be a dictionary.")
|
raise ValueError("doc_metadata must be a dictionary.")
|
||||||
metadata_schema: dict = cast(dict, DocumentService.DOCUMENT_METADATA_SCHEMA[doc_type])
|
|
||||||
|
metadata_schema = DocumentService.DOCUMENT_METADATA_SCHEMA[doc_type]
|
||||||
|
|
||||||
document.doc_metadata = {}
|
document.doc_metadata = {}
|
||||||
if doc_type == "others":
|
if doc_type == "others":
|
||||||
@@ -798,8 +757,9 @@ class DocumentStatusApi(DocumentResource):
|
|||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@cloud_edition_billing_resource_check("vector_space")
|
@cloud_edition_billing_resource_check("vector_space")
|
||||||
def patch(self, dataset_id, action):
|
def patch(self, dataset_id, document_id, action):
|
||||||
dataset_id = str(dataset_id)
|
dataset_id = str(dataset_id)
|
||||||
|
document_id = str(document_id)
|
||||||
dataset = DatasetService.get_dataset(dataset_id)
|
dataset = DatasetService.get_dataset(dataset_id)
|
||||||
if dataset is None:
|
if dataset is None:
|
||||||
raise NotFound("Dataset not found.")
|
raise NotFound("Dataset not found.")
|
||||||
@@ -814,79 +774,84 @@ class DocumentStatusApi(DocumentResource):
|
|||||||
# check user's permission
|
# check user's permission
|
||||||
DatasetService.check_dataset_permission(dataset, current_user)
|
DatasetService.check_dataset_permission(dataset, current_user)
|
||||||
|
|
||||||
document_ids = request.args.getlist("document_id")
|
document = self.get_document(dataset_id, document_id)
|
||||||
for document_id in document_ids:
|
|
||||||
document = self.get_document(dataset_id, document_id)
|
|
||||||
|
|
||||||
indexing_cache_key = "document_{}_indexing".format(document.id)
|
indexing_cache_key = "document_{}_indexing".format(document.id)
|
||||||
cache_result = redis_client.get(indexing_cache_key)
|
cache_result = redis_client.get(indexing_cache_key)
|
||||||
if cache_result is not None:
|
if cache_result is not None:
|
||||||
raise InvalidActionError(f"Document:{document.name} is being indexed, please try again later")
|
raise InvalidActionError("Document is being indexed, please try again later")
|
||||||
|
|
||||||
if action == "enable":
|
if action == "enable":
|
||||||
if document.enabled:
|
if document.enabled:
|
||||||
continue
|
raise InvalidActionError("Document already enabled.")
|
||||||
document.enabled = True
|
|
||||||
document.disabled_at = None
|
|
||||||
document.disabled_by = None
|
|
||||||
document.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
|
||||||
db.session.commit()
|
|
||||||
|
|
||||||
# Set cache to prevent indexing the same document multiple times
|
document.enabled = True
|
||||||
redis_client.setex(indexing_cache_key, 600, 1)
|
document.disabled_at = None
|
||||||
|
document.disabled_by = None
|
||||||
|
document.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
add_document_to_index_task.delay(document_id)
|
# Set cache to prevent indexing the same document multiple times
|
||||||
|
redis_client.setex(indexing_cache_key, 600, 1)
|
||||||
|
|
||||||
elif action == "disable":
|
add_document_to_index_task.delay(document_id)
|
||||||
if not document.completed_at or document.indexing_status != "completed":
|
|
||||||
raise InvalidActionError(f"Document: {document.name} is not completed.")
|
|
||||||
if not document.enabled:
|
|
||||||
continue
|
|
||||||
|
|
||||||
document.enabled = False
|
return {"result": "success"}, 200
|
||||||
document.disabled_at = datetime.now(UTC).replace(tzinfo=None)
|
|
||||||
document.disabled_by = current_user.id
|
|
||||||
document.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
|
||||||
db.session.commit()
|
|
||||||
|
|
||||||
|
elif action == "disable":
|
||||||
|
if not document.completed_at or document.indexing_status != "completed":
|
||||||
|
raise InvalidActionError("Document is not completed.")
|
||||||
|
if not document.enabled:
|
||||||
|
raise InvalidActionError("Document already disabled.")
|
||||||
|
|
||||||
|
document.enabled = False
|
||||||
|
document.disabled_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
|
document.disabled_by = current_user.id
|
||||||
|
document.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
|
# Set cache to prevent indexing the same document multiple times
|
||||||
|
redis_client.setex(indexing_cache_key, 600, 1)
|
||||||
|
|
||||||
|
remove_document_from_index_task.delay(document_id)
|
||||||
|
|
||||||
|
return {"result": "success"}, 200
|
||||||
|
|
||||||
|
elif action == "archive":
|
||||||
|
if document.archived:
|
||||||
|
raise InvalidActionError("Document already archived.")
|
||||||
|
|
||||||
|
document.archived = True
|
||||||
|
document.archived_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
|
document.archived_by = current_user.id
|
||||||
|
document.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
|
if document.enabled:
|
||||||
# Set cache to prevent indexing the same document multiple times
|
# Set cache to prevent indexing the same document multiple times
|
||||||
redis_client.setex(indexing_cache_key, 600, 1)
|
redis_client.setex(indexing_cache_key, 600, 1)
|
||||||
|
|
||||||
remove_document_from_index_task.delay(document_id)
|
remove_document_from_index_task.delay(document_id)
|
||||||
|
|
||||||
elif action == "archive":
|
return {"result": "success"}, 200
|
||||||
if document.archived:
|
elif action == "un_archive":
|
||||||
continue
|
if not document.archived:
|
||||||
|
raise InvalidActionError("Document is not archived.")
|
||||||
|
|
||||||
document.archived = True
|
document.archived = False
|
||||||
document.archived_at = datetime.now(UTC).replace(tzinfo=None)
|
document.archived_at = None
|
||||||
document.archived_by = current_user.id
|
document.archived_by = None
|
||||||
document.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
document.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
if document.enabled:
|
# Set cache to prevent indexing the same document multiple times
|
||||||
# Set cache to prevent indexing the same document multiple times
|
redis_client.setex(indexing_cache_key, 600, 1)
|
||||||
redis_client.setex(indexing_cache_key, 600, 1)
|
|
||||||
|
|
||||||
remove_document_from_index_task.delay(document_id)
|
add_document_to_index_task.delay(document_id)
|
||||||
|
|
||||||
elif action == "un_archive":
|
return {"result": "success"}, 200
|
||||||
if not document.archived:
|
else:
|
||||||
continue
|
raise InvalidActionError()
|
||||||
document.archived = False
|
|
||||||
document.archived_at = None
|
|
||||||
document.archived_by = None
|
|
||||||
document.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
|
||||||
db.session.commit()
|
|
||||||
|
|
||||||
# Set cache to prevent indexing the same document multiple times
|
|
||||||
redis_client.setex(indexing_cache_key, 600, 1)
|
|
||||||
|
|
||||||
add_document_to_index_task.delay(document_id)
|
|
||||||
|
|
||||||
else:
|
|
||||||
raise InvalidActionError()
|
|
||||||
return {"result": "success"}, 200
|
|
||||||
|
|
||||||
|
|
||||||
class DocumentPauseApi(DocumentResource):
|
class DocumentPauseApi(DocumentResource):
|
||||||
@@ -1057,7 +1022,7 @@ api.add_resource(
|
|||||||
)
|
)
|
||||||
api.add_resource(DocumentDeleteApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>")
|
api.add_resource(DocumentDeleteApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>")
|
||||||
api.add_resource(DocumentMetadataApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/metadata")
|
api.add_resource(DocumentMetadataApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/metadata")
|
||||||
api.add_resource(DocumentStatusApi, "/datasets/<uuid:dataset_id>/documents/status/<string:action>/batch")
|
api.add_resource(DocumentStatusApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/status/<string:action>")
|
||||||
api.add_resource(DocumentPauseApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/pause")
|
api.add_resource(DocumentPauseApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/pause")
|
||||||
api.add_resource(DocumentRecoverApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/resume")
|
api.add_resource(DocumentRecoverApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/resume")
|
||||||
api.add_resource(DocumentRetryApi, "/datasets/<uuid:dataset_id>/retry")
|
api.add_resource(DocumentRetryApi, "/datasets/<uuid:dataset_id>/retry")
|
||||||
|
|||||||
@@ -1,21 +1,16 @@
|
|||||||
import uuid
|
import uuid
|
||||||
|
from datetime import UTC, datetime
|
||||||
|
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, marshal, reqparse # type: ignore
|
from flask_restful import Resource, marshal, reqparse
|
||||||
from werkzeug.exceptions import Forbidden, NotFound
|
from werkzeug.exceptions import Forbidden, NotFound
|
||||||
|
|
||||||
import services
|
import services
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.app.error import ProviderNotInitializeError
|
from controllers.console.app.error import ProviderNotInitializeError
|
||||||
from controllers.console.datasets.error import (
|
from controllers.console.datasets.error import InvalidActionError, NoFileUploadedError, TooManyFilesError
|
||||||
ChildChunkDeleteIndexError,
|
|
||||||
ChildChunkIndexingError,
|
|
||||||
InvalidActionError,
|
|
||||||
NoFileUploadedError,
|
|
||||||
TooManyFilesError,
|
|
||||||
)
|
|
||||||
from controllers.console.wraps import (
|
from controllers.console.wraps import (
|
||||||
account_initialization_required,
|
account_initialization_required,
|
||||||
cloud_edition_billing_knowledge_limit_check,
|
cloud_edition_billing_knowledge_limit_check,
|
||||||
@@ -25,15 +20,15 @@ from controllers.console.wraps import (
|
|||||||
from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError
|
from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError
|
||||||
from core.model_manager import ModelManager
|
from core.model_manager import ModelManager
|
||||||
from core.model_runtime.entities.model_entities import ModelType
|
from core.model_runtime.entities.model_entities import ModelType
|
||||||
|
from extensions.ext_database import db
|
||||||
from extensions.ext_redis import redis_client
|
from extensions.ext_redis import redis_client
|
||||||
from fields.segment_fields import child_chunk_fields, segment_fields
|
from fields.segment_fields import segment_fields
|
||||||
from libs.login import login_required
|
from libs.login import login_required
|
||||||
from models.dataset import ChildChunk, DocumentSegment
|
from models import DocumentSegment
|
||||||
from services.dataset_service import DatasetService, DocumentService, SegmentService
|
from services.dataset_service import DatasetService, DocumentService, SegmentService
|
||||||
from services.entities.knowledge_entities.knowledge_entities import ChildChunkUpdateArgs, SegmentUpdateArgs
|
|
||||||
from services.errors.chunk import ChildChunkDeleteIndexError as ChildChunkDeleteIndexServiceError
|
|
||||||
from services.errors.chunk import ChildChunkIndexingError as ChildChunkIndexingServiceError
|
|
||||||
from tasks.batch_create_segment_to_index_task import batch_create_segment_to_index_task
|
from tasks.batch_create_segment_to_index_task import batch_create_segment_to_index_task
|
||||||
|
from tasks.disable_segment_from_index_task import disable_segment_from_index_task
|
||||||
|
from tasks.enable_segment_to_index_task import enable_segment_to_index_task
|
||||||
|
|
||||||
|
|
||||||
class DatasetDocumentSegmentListApi(Resource):
|
class DatasetDocumentSegmentListApi(Resource):
|
||||||
@@ -58,16 +53,15 @@ class DatasetDocumentSegmentListApi(Resource):
|
|||||||
raise NotFound("Document not found.")
|
raise NotFound("Document not found.")
|
||||||
|
|
||||||
parser = reqparse.RequestParser()
|
parser = reqparse.RequestParser()
|
||||||
|
parser.add_argument("last_id", type=str, default=None, location="args")
|
||||||
parser.add_argument("limit", type=int, default=20, location="args")
|
parser.add_argument("limit", type=int, default=20, location="args")
|
||||||
parser.add_argument("status", type=str, action="append", default=[], location="args")
|
parser.add_argument("status", type=str, action="append", default=[], location="args")
|
||||||
parser.add_argument("hit_count_gte", type=int, default=None, location="args")
|
parser.add_argument("hit_count_gte", type=int, default=None, location="args")
|
||||||
parser.add_argument("enabled", type=str, default="all", location="args")
|
parser.add_argument("enabled", type=str, default="all", location="args")
|
||||||
parser.add_argument("keyword", type=str, default=None, location="args")
|
parser.add_argument("keyword", type=str, default=None, location="args")
|
||||||
parser.add_argument("page", type=int, default=1, location="args")
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
page = args["page"]
|
last_id = args["last_id"]
|
||||||
limit = min(args["limit"], 100)
|
limit = min(args["limit"], 100)
|
||||||
status_list = args["status"]
|
status_list = args["status"]
|
||||||
hit_count_gte = args["hit_count_gte"]
|
hit_count_gte = args["hit_count_gte"]
|
||||||
@@ -75,7 +69,14 @@ class DatasetDocumentSegmentListApi(Resource):
|
|||||||
|
|
||||||
query = DocumentSegment.query.filter(
|
query = DocumentSegment.query.filter(
|
||||||
DocumentSegment.document_id == str(document_id), DocumentSegment.tenant_id == current_user.current_tenant_id
|
DocumentSegment.document_id == str(document_id), DocumentSegment.tenant_id == current_user.current_tenant_id
|
||||||
).order_by(DocumentSegment.position.asc())
|
)
|
||||||
|
|
||||||
|
if last_id is not None:
|
||||||
|
last_segment = db.session.get(DocumentSegment, str(last_id))
|
||||||
|
if last_segment:
|
||||||
|
query = query.filter(DocumentSegment.position > last_segment.position)
|
||||||
|
else:
|
||||||
|
return {"data": [], "has_more": False, "limit": limit}, 200
|
||||||
|
|
||||||
if status_list:
|
if status_list:
|
||||||
query = query.filter(DocumentSegment.status.in_(status_list))
|
query = query.filter(DocumentSegment.status.in_(status_list))
|
||||||
@@ -92,44 +93,21 @@ class DatasetDocumentSegmentListApi(Resource):
|
|||||||
elif args["enabled"].lower() == "false":
|
elif args["enabled"].lower() == "false":
|
||||||
query = query.filter(DocumentSegment.enabled == False)
|
query = query.filter(DocumentSegment.enabled == False)
|
||||||
|
|
||||||
segments = query.paginate(page=page, per_page=limit, max_per_page=100, error_out=False)
|
total = query.count()
|
||||||
|
segments = query.order_by(DocumentSegment.position).limit(limit + 1).all()
|
||||||
|
|
||||||
response = {
|
has_more = False
|
||||||
"data": marshal(segments.items, segment_fields),
|
if len(segments) > limit:
|
||||||
|
has_more = True
|
||||||
|
segments = segments[:-1]
|
||||||
|
|
||||||
|
return {
|
||||||
|
"data": marshal(segments, segment_fields),
|
||||||
|
"doc_form": document.doc_form,
|
||||||
|
"has_more": has_more,
|
||||||
"limit": limit,
|
"limit": limit,
|
||||||
"total": segments.total,
|
"total": total,
|
||||||
"total_pages": segments.pages,
|
}, 200
|
||||||
"page": page,
|
|
||||||
}
|
|
||||||
return response, 200
|
|
||||||
|
|
||||||
@setup_required
|
|
||||||
@login_required
|
|
||||||
@account_initialization_required
|
|
||||||
def delete(self, dataset_id, document_id):
|
|
||||||
# check dataset
|
|
||||||
dataset_id = str(dataset_id)
|
|
||||||
dataset = DatasetService.get_dataset(dataset_id)
|
|
||||||
if not dataset:
|
|
||||||
raise NotFound("Dataset not found.")
|
|
||||||
# check user's model setting
|
|
||||||
DatasetService.check_dataset_model_setting(dataset)
|
|
||||||
# check document
|
|
||||||
document_id = str(document_id)
|
|
||||||
document = DocumentService.get_document(dataset_id, document_id)
|
|
||||||
if not document:
|
|
||||||
raise NotFound("Document not found.")
|
|
||||||
segment_ids = request.args.getlist("segment_id")
|
|
||||||
|
|
||||||
# The role of the current user in the ta table must be admin or owner
|
|
||||||
if not current_user.is_editor:
|
|
||||||
raise Forbidden()
|
|
||||||
try:
|
|
||||||
DatasetService.check_dataset_permission(dataset, current_user)
|
|
||||||
except services.errors.account.NoPermissionError as e:
|
|
||||||
raise Forbidden(str(e))
|
|
||||||
SegmentService.delete_segments(segment_ids, document, dataset)
|
|
||||||
return {"result": "success"}, 200
|
|
||||||
|
|
||||||
|
|
||||||
class DatasetDocumentSegmentApi(Resource):
|
class DatasetDocumentSegmentApi(Resource):
|
||||||
@@ -137,15 +115,11 @@ class DatasetDocumentSegmentApi(Resource):
|
|||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@cloud_edition_billing_resource_check("vector_space")
|
@cloud_edition_billing_resource_check("vector_space")
|
||||||
def patch(self, dataset_id, document_id, action):
|
def patch(self, dataset_id, segment_id, action):
|
||||||
dataset_id = str(dataset_id)
|
dataset_id = str(dataset_id)
|
||||||
dataset = DatasetService.get_dataset(dataset_id)
|
dataset = DatasetService.get_dataset(dataset_id)
|
||||||
if not dataset:
|
if not dataset:
|
||||||
raise NotFound("Dataset not found.")
|
raise NotFound("Dataset not found.")
|
||||||
document_id = str(document_id)
|
|
||||||
document = DocumentService.get_document(dataset_id, document_id)
|
|
||||||
if not document:
|
|
||||||
raise NotFound("Document not found.")
|
|
||||||
# check user's model setting
|
# check user's model setting
|
||||||
DatasetService.check_dataset_model_setting(dataset)
|
DatasetService.check_dataset_model_setting(dataset)
|
||||||
# The role of the current user in the ta table must be admin, owner, or editor
|
# The role of the current user in the ta table must be admin, owner, or editor
|
||||||
@@ -168,21 +142,64 @@ class DatasetDocumentSegmentApi(Resource):
|
|||||||
)
|
)
|
||||||
except LLMBadRequestError:
|
except LLMBadRequestError:
|
||||||
raise ProviderNotInitializeError(
|
raise ProviderNotInitializeError(
|
||||||
"No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider."
|
"No Embedding Model available. Please configure a valid provider "
|
||||||
|
"in the Settings -> Model Provider."
|
||||||
)
|
)
|
||||||
except ProviderTokenNotInitError as ex:
|
except ProviderTokenNotInitError as ex:
|
||||||
raise ProviderNotInitializeError(ex.description)
|
raise ProviderNotInitializeError(ex.description)
|
||||||
segment_ids = request.args.getlist("segment_id")
|
|
||||||
|
|
||||||
document_indexing_cache_key = "document_{}_indexing".format(document.id)
|
segment = DocumentSegment.query.filter(
|
||||||
|
DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not segment:
|
||||||
|
raise NotFound("Segment not found.")
|
||||||
|
|
||||||
|
if segment.status != "completed":
|
||||||
|
raise NotFound("Segment is not completed, enable or disable function is not allowed")
|
||||||
|
|
||||||
|
document_indexing_cache_key = "document_{}_indexing".format(segment.document_id)
|
||||||
cache_result = redis_client.get(document_indexing_cache_key)
|
cache_result = redis_client.get(document_indexing_cache_key)
|
||||||
if cache_result is not None:
|
if cache_result is not None:
|
||||||
raise InvalidActionError("Document is being indexed, please try again later")
|
raise InvalidActionError("Document is being indexed, please try again later")
|
||||||
try:
|
|
||||||
SegmentService.update_segments_status(segment_ids, action, dataset, document)
|
indexing_cache_key = "segment_{}_indexing".format(segment.id)
|
||||||
except Exception as e:
|
cache_result = redis_client.get(indexing_cache_key)
|
||||||
raise InvalidActionError(str(e))
|
if cache_result is not None:
|
||||||
return {"result": "success"}, 200
|
raise InvalidActionError("Segment is being indexed, please try again later")
|
||||||
|
|
||||||
|
if action == "enable":
|
||||||
|
if segment.enabled:
|
||||||
|
raise InvalidActionError("Segment is already enabled.")
|
||||||
|
|
||||||
|
segment.enabled = True
|
||||||
|
segment.disabled_at = None
|
||||||
|
segment.disabled_by = None
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
|
# Set cache to prevent indexing the same segment multiple times
|
||||||
|
redis_client.setex(indexing_cache_key, 600, 1)
|
||||||
|
|
||||||
|
enable_segment_to_index_task.delay(segment.id)
|
||||||
|
|
||||||
|
return {"result": "success"}, 200
|
||||||
|
elif action == "disable":
|
||||||
|
if not segment.enabled:
|
||||||
|
raise InvalidActionError("Segment is already disabled.")
|
||||||
|
|
||||||
|
segment.enabled = False
|
||||||
|
segment.disabled_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
|
segment.disabled_by = current_user.id
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
|
# Set cache to prevent indexing the same segment multiple times
|
||||||
|
redis_client.setex(indexing_cache_key, 600, 1)
|
||||||
|
|
||||||
|
disable_segment_from_index_task.delay(segment.id)
|
||||||
|
|
||||||
|
return {"result": "success"}, 200
|
||||||
|
else:
|
||||||
|
raise InvalidActionError()
|
||||||
|
|
||||||
|
|
||||||
class DatasetDocumentSegmentAddApi(Resource):
|
class DatasetDocumentSegmentAddApi(Resource):
|
||||||
@@ -216,7 +233,8 @@ class DatasetDocumentSegmentAddApi(Resource):
|
|||||||
)
|
)
|
||||||
except LLMBadRequestError:
|
except LLMBadRequestError:
|
||||||
raise ProviderNotInitializeError(
|
raise ProviderNotInitializeError(
|
||||||
"No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider."
|
"No Embedding Model available. Please configure a valid provider "
|
||||||
|
"in the Settings -> Model Provider."
|
||||||
)
|
)
|
||||||
except ProviderTokenNotInitError as ex:
|
except ProviderTokenNotInitError as ex:
|
||||||
raise ProviderNotInitializeError(ex.description)
|
raise ProviderNotInitializeError(ex.description)
|
||||||
@@ -265,7 +283,8 @@ class DatasetDocumentSegmentUpdateApi(Resource):
|
|||||||
)
|
)
|
||||||
except LLMBadRequestError:
|
except LLMBadRequestError:
|
||||||
raise ProviderNotInitializeError(
|
raise ProviderNotInitializeError(
|
||||||
"No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider."
|
"No Embedding Model available. Please configure a valid provider "
|
||||||
|
"in the Settings -> Model Provider."
|
||||||
)
|
)
|
||||||
except ProviderTokenNotInitError as ex:
|
except ProviderTokenNotInitError as ex:
|
||||||
raise ProviderNotInitializeError(ex.description)
|
raise ProviderNotInitializeError(ex.description)
|
||||||
@@ -288,12 +307,9 @@ class DatasetDocumentSegmentUpdateApi(Resource):
|
|||||||
parser.add_argument("content", type=str, required=True, nullable=False, location="json")
|
parser.add_argument("content", type=str, required=True, nullable=False, location="json")
|
||||||
parser.add_argument("answer", type=str, required=False, nullable=True, location="json")
|
parser.add_argument("answer", type=str, required=False, nullable=True, location="json")
|
||||||
parser.add_argument("keywords", type=list, required=False, nullable=True, location="json")
|
parser.add_argument("keywords", type=list, required=False, nullable=True, location="json")
|
||||||
parser.add_argument(
|
|
||||||
"regenerate_child_chunks", type=bool, required=False, nullable=True, default=False, location="json"
|
|
||||||
)
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
SegmentService.segment_create_args_validate(args, document)
|
SegmentService.segment_create_args_validate(args, document)
|
||||||
segment = SegmentService.update_segment(SegmentUpdateArgs(**args), segment, document, dataset)
|
segment = SegmentService.update_segment(args, segment, document, dataset)
|
||||||
return {"data": marshal(segment, segment_fields), "doc_form": document.doc_form}, 200
|
return {"data": marshal(segment, segment_fields), "doc_form": document.doc_form}, 200
|
||||||
|
|
||||||
@setup_required
|
@setup_required
|
||||||
@@ -365,9 +381,9 @@ class DatasetDocumentSegmentBatchImportApi(Resource):
|
|||||||
result = []
|
result = []
|
||||||
for index, row in df.iterrows():
|
for index, row in df.iterrows():
|
||||||
if document.doc_form == "qa_model":
|
if document.doc_form == "qa_model":
|
||||||
data = {"content": row.iloc[0], "answer": row.iloc[1]}
|
data = {"content": row[0], "answer": row[1]}
|
||||||
else:
|
else:
|
||||||
data = {"content": row.iloc[0]}
|
data = {"content": row[0]}
|
||||||
result.append(data)
|
result.append(data)
|
||||||
if len(result) == 0:
|
if len(result) == 0:
|
||||||
raise ValueError("The CSV file is empty.")
|
raise ValueError("The CSV file is empty.")
|
||||||
@@ -396,247 +412,8 @@ class DatasetDocumentSegmentBatchImportApi(Resource):
|
|||||||
return {"job_id": job_id, "job_status": cache_result.decode()}, 200
|
return {"job_id": job_id, "job_status": cache_result.decode()}, 200
|
||||||
|
|
||||||
|
|
||||||
class ChildChunkAddApi(Resource):
|
|
||||||
@setup_required
|
|
||||||
@login_required
|
|
||||||
@account_initialization_required
|
|
||||||
@cloud_edition_billing_resource_check("vector_space")
|
|
||||||
@cloud_edition_billing_knowledge_limit_check("add_segment")
|
|
||||||
def post(self, dataset_id, document_id, segment_id):
|
|
||||||
# check dataset
|
|
||||||
dataset_id = str(dataset_id)
|
|
||||||
dataset = DatasetService.get_dataset(dataset_id)
|
|
||||||
if not dataset:
|
|
||||||
raise NotFound("Dataset not found.")
|
|
||||||
# check document
|
|
||||||
document_id = str(document_id)
|
|
||||||
document = DocumentService.get_document(dataset_id, document_id)
|
|
||||||
if not document:
|
|
||||||
raise NotFound("Document not found.")
|
|
||||||
# check segment
|
|
||||||
segment_id = str(segment_id)
|
|
||||||
segment = DocumentSegment.query.filter(
|
|
||||||
DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id
|
|
||||||
).first()
|
|
||||||
if not segment:
|
|
||||||
raise NotFound("Segment not found.")
|
|
||||||
if not current_user.is_editor:
|
|
||||||
raise Forbidden()
|
|
||||||
# check embedding model setting
|
|
||||||
if dataset.indexing_technique == "high_quality":
|
|
||||||
try:
|
|
||||||
model_manager = ModelManager()
|
|
||||||
model_manager.get_model_instance(
|
|
||||||
tenant_id=current_user.current_tenant_id,
|
|
||||||
provider=dataset.embedding_model_provider,
|
|
||||||
model_type=ModelType.TEXT_EMBEDDING,
|
|
||||||
model=dataset.embedding_model,
|
|
||||||
)
|
|
||||||
except LLMBadRequestError:
|
|
||||||
raise ProviderNotInitializeError(
|
|
||||||
"No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider."
|
|
||||||
)
|
|
||||||
except ProviderTokenNotInitError as ex:
|
|
||||||
raise ProviderNotInitializeError(ex.description)
|
|
||||||
try:
|
|
||||||
DatasetService.check_dataset_permission(dataset, current_user)
|
|
||||||
except services.errors.account.NoPermissionError as e:
|
|
||||||
raise Forbidden(str(e))
|
|
||||||
# validate args
|
|
||||||
parser = reqparse.RequestParser()
|
|
||||||
parser.add_argument("content", type=str, required=True, nullable=False, location="json")
|
|
||||||
args = parser.parse_args()
|
|
||||||
try:
|
|
||||||
child_chunk = SegmentService.create_child_chunk(args.get("content"), segment, document, dataset)
|
|
||||||
except ChildChunkIndexingServiceError as e:
|
|
||||||
raise ChildChunkIndexingError(str(e))
|
|
||||||
return {"data": marshal(child_chunk, child_chunk_fields)}, 200
|
|
||||||
|
|
||||||
@setup_required
|
|
||||||
@login_required
|
|
||||||
@account_initialization_required
|
|
||||||
def get(self, dataset_id, document_id, segment_id):
|
|
||||||
# check dataset
|
|
||||||
dataset_id = str(dataset_id)
|
|
||||||
dataset = DatasetService.get_dataset(dataset_id)
|
|
||||||
if not dataset:
|
|
||||||
raise NotFound("Dataset not found.")
|
|
||||||
# check user's model setting
|
|
||||||
DatasetService.check_dataset_model_setting(dataset)
|
|
||||||
# check document
|
|
||||||
document_id = str(document_id)
|
|
||||||
document = DocumentService.get_document(dataset_id, document_id)
|
|
||||||
if not document:
|
|
||||||
raise NotFound("Document not found.")
|
|
||||||
# check segment
|
|
||||||
segment_id = str(segment_id)
|
|
||||||
segment = DocumentSegment.query.filter(
|
|
||||||
DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id
|
|
||||||
).first()
|
|
||||||
if not segment:
|
|
||||||
raise NotFound("Segment not found.")
|
|
||||||
parser = reqparse.RequestParser()
|
|
||||||
parser.add_argument("limit", type=int, default=20, location="args")
|
|
||||||
parser.add_argument("keyword", type=str, default=None, location="args")
|
|
||||||
parser.add_argument("page", type=int, default=1, location="args")
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
page = args["page"]
|
|
||||||
limit = min(args["limit"], 100)
|
|
||||||
keyword = args["keyword"]
|
|
||||||
|
|
||||||
child_chunks = SegmentService.get_child_chunks(segment_id, document_id, dataset_id, page, limit, keyword)
|
|
||||||
return {
|
|
||||||
"data": marshal(child_chunks.items, child_chunk_fields),
|
|
||||||
"total": child_chunks.total,
|
|
||||||
"total_pages": child_chunks.pages,
|
|
||||||
"page": page,
|
|
||||||
"limit": limit,
|
|
||||||
}, 200
|
|
||||||
|
|
||||||
@setup_required
|
|
||||||
@login_required
|
|
||||||
@account_initialization_required
|
|
||||||
@cloud_edition_billing_resource_check("vector_space")
|
|
||||||
def patch(self, dataset_id, document_id, segment_id):
|
|
||||||
# check dataset
|
|
||||||
dataset_id = str(dataset_id)
|
|
||||||
dataset = DatasetService.get_dataset(dataset_id)
|
|
||||||
if not dataset:
|
|
||||||
raise NotFound("Dataset not found.")
|
|
||||||
# check user's model setting
|
|
||||||
DatasetService.check_dataset_model_setting(dataset)
|
|
||||||
# check document
|
|
||||||
document_id = str(document_id)
|
|
||||||
document = DocumentService.get_document(dataset_id, document_id)
|
|
||||||
if not document:
|
|
||||||
raise NotFound("Document not found.")
|
|
||||||
# check segment
|
|
||||||
segment_id = str(segment_id)
|
|
||||||
segment = DocumentSegment.query.filter(
|
|
||||||
DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id
|
|
||||||
).first()
|
|
||||||
if not segment:
|
|
||||||
raise NotFound("Segment not found.")
|
|
||||||
# The role of the current user in the ta table must be admin, owner, or editor
|
|
||||||
if not current_user.is_editor:
|
|
||||||
raise Forbidden()
|
|
||||||
try:
|
|
||||||
DatasetService.check_dataset_permission(dataset, current_user)
|
|
||||||
except services.errors.account.NoPermissionError as e:
|
|
||||||
raise Forbidden(str(e))
|
|
||||||
# validate args
|
|
||||||
parser = reqparse.RequestParser()
|
|
||||||
parser.add_argument("chunks", type=list, required=True, nullable=False, location="json")
|
|
||||||
args = parser.parse_args()
|
|
||||||
try:
|
|
||||||
chunks = [ChildChunkUpdateArgs(**chunk) for chunk in args.get("chunks")]
|
|
||||||
child_chunks = SegmentService.update_child_chunks(chunks, segment, document, dataset)
|
|
||||||
except ChildChunkIndexingServiceError as e:
|
|
||||||
raise ChildChunkIndexingError(str(e))
|
|
||||||
return {"data": marshal(child_chunks, child_chunk_fields)}, 200
|
|
||||||
|
|
||||||
|
|
||||||
class ChildChunkUpdateApi(Resource):
|
|
||||||
@setup_required
|
|
||||||
@login_required
|
|
||||||
@account_initialization_required
|
|
||||||
def delete(self, dataset_id, document_id, segment_id, child_chunk_id):
|
|
||||||
# check dataset
|
|
||||||
dataset_id = str(dataset_id)
|
|
||||||
dataset = DatasetService.get_dataset(dataset_id)
|
|
||||||
if not dataset:
|
|
||||||
raise NotFound("Dataset not found.")
|
|
||||||
# check user's model setting
|
|
||||||
DatasetService.check_dataset_model_setting(dataset)
|
|
||||||
# check document
|
|
||||||
document_id = str(document_id)
|
|
||||||
document = DocumentService.get_document(dataset_id, document_id)
|
|
||||||
if not document:
|
|
||||||
raise NotFound("Document not found.")
|
|
||||||
# check segment
|
|
||||||
segment_id = str(segment_id)
|
|
||||||
segment = DocumentSegment.query.filter(
|
|
||||||
DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id
|
|
||||||
).first()
|
|
||||||
if not segment:
|
|
||||||
raise NotFound("Segment not found.")
|
|
||||||
# check child chunk
|
|
||||||
child_chunk_id = str(child_chunk_id)
|
|
||||||
child_chunk = ChildChunk.query.filter(
|
|
||||||
ChildChunk.id == str(child_chunk_id), ChildChunk.tenant_id == current_user.current_tenant_id
|
|
||||||
).first()
|
|
||||||
if not child_chunk:
|
|
||||||
raise NotFound("Child chunk not found.")
|
|
||||||
# The role of the current user in the ta table must be admin or owner
|
|
||||||
if not current_user.is_editor:
|
|
||||||
raise Forbidden()
|
|
||||||
try:
|
|
||||||
DatasetService.check_dataset_permission(dataset, current_user)
|
|
||||||
except services.errors.account.NoPermissionError as e:
|
|
||||||
raise Forbidden(str(e))
|
|
||||||
try:
|
|
||||||
SegmentService.delete_child_chunk(child_chunk, dataset)
|
|
||||||
except ChildChunkDeleteIndexServiceError as e:
|
|
||||||
raise ChildChunkDeleteIndexError(str(e))
|
|
||||||
return {"result": "success"}, 200
|
|
||||||
|
|
||||||
@setup_required
|
|
||||||
@login_required
|
|
||||||
@account_initialization_required
|
|
||||||
@cloud_edition_billing_resource_check("vector_space")
|
|
||||||
def patch(self, dataset_id, document_id, segment_id, child_chunk_id):
|
|
||||||
# check dataset
|
|
||||||
dataset_id = str(dataset_id)
|
|
||||||
dataset = DatasetService.get_dataset(dataset_id)
|
|
||||||
if not dataset:
|
|
||||||
raise NotFound("Dataset not found.")
|
|
||||||
# check user's model setting
|
|
||||||
DatasetService.check_dataset_model_setting(dataset)
|
|
||||||
# check document
|
|
||||||
document_id = str(document_id)
|
|
||||||
document = DocumentService.get_document(dataset_id, document_id)
|
|
||||||
if not document:
|
|
||||||
raise NotFound("Document not found.")
|
|
||||||
# check segment
|
|
||||||
segment_id = str(segment_id)
|
|
||||||
segment = DocumentSegment.query.filter(
|
|
||||||
DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id
|
|
||||||
).first()
|
|
||||||
if not segment:
|
|
||||||
raise NotFound("Segment not found.")
|
|
||||||
# check child chunk
|
|
||||||
child_chunk_id = str(child_chunk_id)
|
|
||||||
child_chunk = ChildChunk.query.filter(
|
|
||||||
ChildChunk.id == str(child_chunk_id), ChildChunk.tenant_id == current_user.current_tenant_id
|
|
||||||
).first()
|
|
||||||
if not child_chunk:
|
|
||||||
raise NotFound("Child chunk not found.")
|
|
||||||
# The role of the current user in the ta table must be admin or owner
|
|
||||||
if not current_user.is_editor:
|
|
||||||
raise Forbidden()
|
|
||||||
try:
|
|
||||||
DatasetService.check_dataset_permission(dataset, current_user)
|
|
||||||
except services.errors.account.NoPermissionError as e:
|
|
||||||
raise Forbidden(str(e))
|
|
||||||
# validate args
|
|
||||||
parser = reqparse.RequestParser()
|
|
||||||
parser.add_argument("content", type=str, required=True, nullable=False, location="json")
|
|
||||||
args = parser.parse_args()
|
|
||||||
try:
|
|
||||||
child_chunk = SegmentService.update_child_chunk(
|
|
||||||
args.get("content"), child_chunk, segment, document, dataset
|
|
||||||
)
|
|
||||||
except ChildChunkIndexingServiceError as e:
|
|
||||||
raise ChildChunkIndexingError(str(e))
|
|
||||||
return {"data": marshal(child_chunk, child_chunk_fields)}, 200
|
|
||||||
|
|
||||||
|
|
||||||
api.add_resource(DatasetDocumentSegmentListApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments")
|
api.add_resource(DatasetDocumentSegmentListApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments")
|
||||||
api.add_resource(
|
api.add_resource(DatasetDocumentSegmentApi, "/datasets/<uuid:dataset_id>/segments/<uuid:segment_id>/<string:action>")
|
||||||
DatasetDocumentSegmentApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segment/<string:action>"
|
|
||||||
)
|
|
||||||
api.add_resource(DatasetDocumentSegmentAddApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segment")
|
api.add_resource(DatasetDocumentSegmentAddApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segment")
|
||||||
api.add_resource(
|
api.add_resource(
|
||||||
DatasetDocumentSegmentUpdateApi,
|
DatasetDocumentSegmentUpdateApi,
|
||||||
@@ -647,11 +424,3 @@ api.add_resource(
|
|||||||
"/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments/batch_import",
|
"/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments/batch_import",
|
||||||
"/datasets/batch_import_status/<uuid:job_id>",
|
"/datasets/batch_import_status/<uuid:job_id>",
|
||||||
)
|
)
|
||||||
api.add_resource(
|
|
||||||
ChildChunkAddApi,
|
|
||||||
"/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments/<uuid:segment_id>/child_chunks",
|
|
||||||
)
|
|
||||||
api.add_resource(
|
|
||||||
ChildChunkUpdateApi,
|
|
||||||
"/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments/<uuid:segment_id>/child_chunks/<uuid:child_chunk_id>",
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -89,15 +89,3 @@ class IndexingEstimateError(BaseHTTPException):
|
|||||||
error_code = "indexing_estimate_error"
|
error_code = "indexing_estimate_error"
|
||||||
description = "Knowledge indexing estimate failed: {message}"
|
description = "Knowledge indexing estimate failed: {message}"
|
||||||
code = 500
|
code = 500
|
||||||
|
|
||||||
|
|
||||||
class ChildChunkIndexingError(BaseHTTPException):
|
|
||||||
error_code = "child_chunk_indexing_error"
|
|
||||||
description = "Create child chunk index failed: {message}"
|
|
||||||
code = 500
|
|
||||||
|
|
||||||
|
|
||||||
class ChildChunkDeleteIndexError(BaseHTTPException):
|
|
||||||
error_code = "child_chunk_delete_index_error"
|
|
||||||
description = "Delete child chunk index failed: {message}"
|
|
||||||
code = 500
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
from flask import request
|
from flask import request
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, marshal, reqparse # type: ignore
|
from flask_restful import Resource, marshal, reqparse
|
||||||
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
|
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
|
||||||
|
|
||||||
import services
|
import services
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from flask_restful import Resource # type: ignore
|
from flask_restful import Resource
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.datasets.hit_testing_base import DatasetsHitTestingBase
|
from controllers.console.datasets.hit_testing_base import DatasetsHitTestingBase
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import marshal, reqparse # type: ignore
|
from flask_restful import marshal, reqparse
|
||||||
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
|
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
|
||||||
|
|
||||||
import services.dataset_service
|
import services.dataset_service
|
||||||
|
|||||||
@@ -1,143 +0,0 @@
|
|||||||
from flask_login import current_user # type: ignore # type: ignore
|
|
||||||
from flask_restful import Resource, marshal_with, reqparse # type: ignore
|
|
||||||
from werkzeug.exceptions import NotFound
|
|
||||||
|
|
||||||
from controllers.console import api
|
|
||||||
from controllers.console.wraps import account_initialization_required, enterprise_license_required, setup_required
|
|
||||||
from fields.dataset_fields import dataset_metadata_fields
|
|
||||||
from libs.login import login_required
|
|
||||||
from services.dataset_service import DatasetService
|
|
||||||
from services.entities.knowledge_entities.knowledge_entities import (
|
|
||||||
MetadataArgs,
|
|
||||||
MetadataOperationData,
|
|
||||||
)
|
|
||||||
from services.metadata_service import MetadataService
|
|
||||||
|
|
||||||
|
|
||||||
def _validate_name(name):
|
|
||||||
if not name or len(name) < 1 or len(name) > 40:
|
|
||||||
raise ValueError("Name must be between 1 to 40 characters.")
|
|
||||||
return name
|
|
||||||
|
|
||||||
|
|
||||||
def _validate_description_length(description):
|
|
||||||
if len(description) > 400:
|
|
||||||
raise ValueError("Description cannot exceed 400 characters.")
|
|
||||||
return description
|
|
||||||
|
|
||||||
|
|
||||||
class DatasetListApi(Resource):
|
|
||||||
@setup_required
|
|
||||||
@login_required
|
|
||||||
@account_initialization_required
|
|
||||||
@enterprise_license_required
|
|
||||||
@marshal_with(dataset_metadata_fields)
|
|
||||||
def post(self, dataset_id):
|
|
||||||
parser = reqparse.RequestParser()
|
|
||||||
parser.add_argument("type", type=str, required=True, nullable=True, location="json")
|
|
||||||
parser.add_argument("name", type=str, required=True, nullable=True, location="json")
|
|
||||||
args = parser.parse_args()
|
|
||||||
metadata_args = MetadataArgs(**args)
|
|
||||||
|
|
||||||
dataset_id_str = str(dataset_id)
|
|
||||||
dataset = DatasetService.get_dataset(dataset_id_str)
|
|
||||||
if dataset is None:
|
|
||||||
raise NotFound("Dataset not found.")
|
|
||||||
DatasetService.check_dataset_permission(dataset, current_user)
|
|
||||||
|
|
||||||
metadata = MetadataService.create_metadata(dataset_id_str, metadata_args)
|
|
||||||
return metadata, 201
|
|
||||||
|
|
||||||
|
|
||||||
class DatasetMetadataApi(Resource):
|
|
||||||
@setup_required
|
|
||||||
@login_required
|
|
||||||
@account_initialization_required
|
|
||||||
@enterprise_license_required
|
|
||||||
def patch(self, dataset_id, metadata_id):
|
|
||||||
parser = reqparse.RequestParser()
|
|
||||||
parser.add_argument("name", type=str, required=True, nullable=True, location="json")
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
dataset_id_str = str(dataset_id)
|
|
||||||
metadata_id_str = str(metadata_id)
|
|
||||||
dataset = DatasetService.get_dataset(dataset_id_str)
|
|
||||||
if dataset is None:
|
|
||||||
raise NotFound("Dataset not found.")
|
|
||||||
DatasetService.check_dataset_permission(dataset, current_user)
|
|
||||||
|
|
||||||
metadata = MetadataService.update_metadata_name(dataset_id_str, metadata_id_str, args.get("name"))
|
|
||||||
return metadata, 200
|
|
||||||
|
|
||||||
@setup_required
|
|
||||||
@login_required
|
|
||||||
@account_initialization_required
|
|
||||||
@enterprise_license_required
|
|
||||||
def delete(self, dataset_id, metadata_id):
|
|
||||||
dataset_id_str = str(dataset_id)
|
|
||||||
metadata_id_str = str(metadata_id)
|
|
||||||
dataset = DatasetService.get_dataset(dataset_id_str)
|
|
||||||
if dataset is None:
|
|
||||||
raise NotFound("Dataset not found.")
|
|
||||||
DatasetService.check_dataset_permission(dataset, current_user)
|
|
||||||
|
|
||||||
MetadataService.delete_metadata(dataset_id_str, metadata_id_str)
|
|
||||||
return 200
|
|
||||||
|
|
||||||
|
|
||||||
class DatasetMetadataBuiltInFieldApi(Resource):
|
|
||||||
@setup_required
|
|
||||||
@login_required
|
|
||||||
@account_initialization_required
|
|
||||||
@enterprise_license_required
|
|
||||||
def get(self):
|
|
||||||
built_in_fields = MetadataService.get_built_in_fields()
|
|
||||||
return built_in_fields, 200
|
|
||||||
|
|
||||||
|
|
||||||
class DatasetMetadataBuiltInFieldActionApi(Resource):
|
|
||||||
@setup_required
|
|
||||||
@login_required
|
|
||||||
@account_initialization_required
|
|
||||||
@enterprise_license_required
|
|
||||||
def post(self, dataset_id, action):
|
|
||||||
dataset_id_str = str(dataset_id)
|
|
||||||
dataset = DatasetService.get_dataset(dataset_id_str)
|
|
||||||
if dataset is None:
|
|
||||||
raise NotFound("Dataset not found.")
|
|
||||||
DatasetService.check_dataset_permission(dataset, current_user)
|
|
||||||
|
|
||||||
if action == "enable":
|
|
||||||
MetadataService.enable_built_in_field(dataset)
|
|
||||||
elif action == "disable":
|
|
||||||
MetadataService.disable_built_in_field(dataset)
|
|
||||||
return 200
|
|
||||||
|
|
||||||
|
|
||||||
class DocumentMetadataApi(Resource):
|
|
||||||
@setup_required
|
|
||||||
@login_required
|
|
||||||
@account_initialization_required
|
|
||||||
@enterprise_license_required
|
|
||||||
def post(self, dataset_id):
|
|
||||||
dataset_id_str = str(dataset_id)
|
|
||||||
dataset = DatasetService.get_dataset(dataset_id_str)
|
|
||||||
if dataset is None:
|
|
||||||
raise NotFound("Dataset not found.")
|
|
||||||
DatasetService.check_dataset_permission(dataset, current_user)
|
|
||||||
|
|
||||||
parser = reqparse.RequestParser()
|
|
||||||
parser.add_argument("operation_data", type=list, required=True, nullable=True, location="json")
|
|
||||||
args = parser.parse_args()
|
|
||||||
metadata_args = MetadataOperationData(**args)
|
|
||||||
|
|
||||||
MetadataService.update_documents_metadata(dataset, metadata_args)
|
|
||||||
|
|
||||||
return 200
|
|
||||||
|
|
||||||
|
|
||||||
api.add_resource(DatasetListApi, "/datasets/<uuid:dataset_id>/metadata")
|
|
||||||
api.add_resource(DatasetMetadataApi, "/datasets/<uuid:dataset_id>/metadata/<uuid:metadata_id>")
|
|
||||||
api.add_resource(DatasetMetadataBuiltInFieldApi, "/datasets/metadata/built-in")
|
|
||||||
api.add_resource(DatasetMetadataBuiltInFieldActionApi, "/datasets/metadata/built-in/<string:action>")
|
|
||||||
api.add_resource(DocumentMetadataApi, "/datasets/<uuid:dataset_id>/documents/metadata")
|
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
from flask_restful import Resource, reqparse # type: ignore
|
from flask_restful import Resource, reqparse
|
||||||
|
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
from controllers.console.datasets.error import WebsiteCrawlError
|
from controllers.console.datasets.error import WebsiteCrawlError
|
||||||
|
|||||||
@@ -92,12 +92,3 @@ class UnauthorizedAndForceLogout(BaseHTTPException):
|
|||||||
error_code = "unauthorized_and_force_logout"
|
error_code = "unauthorized_and_force_logout"
|
||||||
description = "Unauthorized and force logout."
|
description = "Unauthorized and force logout."
|
||||||
code = 401
|
code = 401
|
||||||
|
|
||||||
|
|
||||||
class AccountInFreezeError(BaseHTTPException):
|
|
||||||
error_code = "account_in_freeze"
|
|
||||||
code = 400
|
|
||||||
description = (
|
|
||||||
"This email account has been deleted within the past 30 days"
|
|
||||||
"and is temporarily unavailable for new account registration."
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ from flask import request
|
|||||||
from werkzeug.exceptions import InternalServerError
|
from werkzeug.exceptions import InternalServerError
|
||||||
|
|
||||||
import services
|
import services
|
||||||
|
from controllers.console import api
|
||||||
from controllers.console.app.error import (
|
from controllers.console.app.error import (
|
||||||
AppUnavailableError,
|
AppUnavailableError,
|
||||||
AudioTooLargeError,
|
AudioTooLargeError,
|
||||||
@@ -66,7 +67,7 @@ class ChatAudioApi(InstalledAppResource):
|
|||||||
|
|
||||||
class ChatTextApi(InstalledAppResource):
|
class ChatTextApi(InstalledAppResource):
|
||||||
def post(self, installed_app):
|
def post(self, installed_app):
|
||||||
from flask_restful import reqparse # type: ignore
|
from flask_restful import reqparse
|
||||||
|
|
||||||
app_model = installed_app.app
|
app_model = installed_app.app
|
||||||
try:
|
try:
|
||||||
@@ -117,3 +118,9 @@ class ChatTextApi(InstalledAppResource):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.exception("internal server error.")
|
logging.exception("internal server error.")
|
||||||
raise InternalServerError()
|
raise InternalServerError()
|
||||||
|
|
||||||
|
|
||||||
|
api.add_resource(ChatAudioApi, "/installed-apps/<uuid:installed_app_id>/audio-to-text", endpoint="installed_app_audio")
|
||||||
|
api.add_resource(ChatTextApi, "/installed-apps/<uuid:installed_app_id>/text-to-audio", endpoint="installed_app_text")
|
||||||
|
# api.add_resource(ChatTextApiWithMessageId, '/installed-apps/<uuid:installed_app_id>/text-to-audio/message-id',
|
||||||
|
# endpoint='installed_app_text_with_message_id')
|
||||||
|
|||||||
@@ -1,11 +1,12 @@
|
|||||||
import logging
|
import logging
|
||||||
from datetime import UTC, datetime
|
from datetime import UTC, datetime
|
||||||
|
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import reqparse # type: ignore
|
from flask_restful import reqparse
|
||||||
from werkzeug.exceptions import InternalServerError, NotFound
|
from werkzeug.exceptions import InternalServerError, NotFound
|
||||||
|
|
||||||
import services
|
import services
|
||||||
|
from controllers.console import api
|
||||||
from controllers.console.app.error import (
|
from controllers.console.app.error import (
|
||||||
AppUnavailableError,
|
AppUnavailableError,
|
||||||
CompletionRequestError,
|
CompletionRequestError,
|
||||||
@@ -18,11 +19,7 @@ from controllers.console.explore.error import NotChatAppError, NotCompletionAppE
|
|||||||
from controllers.console.explore.wraps import InstalledAppResource
|
from controllers.console.explore.wraps import InstalledAppResource
|
||||||
from core.app.apps.base_app_queue_manager import AppQueueManager
|
from core.app.apps.base_app_queue_manager import AppQueueManager
|
||||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||||
from core.errors.error import (
|
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
|
||||||
ModelCurrentlyNotSupportError,
|
|
||||||
ProviderTokenNotInitError,
|
|
||||||
QuotaExceededError,
|
|
||||||
)
|
|
||||||
from core.model_runtime.errors.invoke import InvokeError
|
from core.model_runtime.errors.invoke import InvokeError
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
from libs import helper
|
from libs import helper
|
||||||
@@ -150,3 +147,21 @@ class ChatStopApi(InstalledAppResource):
|
|||||||
AppQueueManager.set_stop_flag(task_id, InvokeFrom.EXPLORE, current_user.id)
|
AppQueueManager.set_stop_flag(task_id, InvokeFrom.EXPLORE, current_user.id)
|
||||||
|
|
||||||
return {"result": "success"}, 200
|
return {"result": "success"}, 200
|
||||||
|
|
||||||
|
|
||||||
|
api.add_resource(
|
||||||
|
CompletionApi, "/installed-apps/<uuid:installed_app_id>/completion-messages", endpoint="installed_app_completion"
|
||||||
|
)
|
||||||
|
api.add_resource(
|
||||||
|
CompletionStopApi,
|
||||||
|
"/installed-apps/<uuid:installed_app_id>/completion-messages/<string:task_id>/stop",
|
||||||
|
endpoint="installed_app_stop_completion",
|
||||||
|
)
|
||||||
|
api.add_resource(
|
||||||
|
ChatApi, "/installed-apps/<uuid:installed_app_id>/chat-messages", endpoint="installed_app_chat_completion"
|
||||||
|
)
|
||||||
|
api.add_resource(
|
||||||
|
ChatStopApi,
|
||||||
|
"/installed-apps/<uuid:installed_app_id>/chat-messages/<string:task_id>/stop",
|
||||||
|
endpoint="installed_app_stop_chat_completion",
|
||||||
|
)
|
||||||
|
|||||||
@@ -1,13 +1,12 @@
|
|||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import marshal_with, reqparse # type: ignore
|
from flask_restful import marshal_with, reqparse
|
||||||
from flask_restful.inputs import int_range # type: ignore
|
from flask_restful.inputs import int_range
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
from werkzeug.exceptions import NotFound
|
from werkzeug.exceptions import NotFound
|
||||||
|
|
||||||
|
from controllers.console import api
|
||||||
from controllers.console.explore.error import NotChatAppError
|
from controllers.console.explore.error import NotChatAppError
|
||||||
from controllers.console.explore.wraps import InstalledAppResource
|
from controllers.console.explore.wraps import InstalledAppResource
|
||||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||||
from extensions.ext_database import db
|
|
||||||
from fields.conversation_fields import conversation_infinite_scroll_pagination_fields, simple_conversation_fields
|
from fields.conversation_fields import conversation_infinite_scroll_pagination_fields, simple_conversation_fields
|
||||||
from libs.helper import uuid_value
|
from libs.helper import uuid_value
|
||||||
from models.model import AppMode
|
from models.model import AppMode
|
||||||
@@ -32,19 +31,17 @@ class ConversationListApi(InstalledAppResource):
|
|||||||
|
|
||||||
pinned = None
|
pinned = None
|
||||||
if "pinned" in args and args["pinned"] is not None:
|
if "pinned" in args and args["pinned"] is not None:
|
||||||
pinned = args["pinned"] == "true"
|
pinned = True if args["pinned"] == "true" else False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with Session(db.engine) as session:
|
return WebConversationService.pagination_by_last_id(
|
||||||
return WebConversationService.pagination_by_last_id(
|
app_model=app_model,
|
||||||
session=session,
|
user=current_user,
|
||||||
app_model=app_model,
|
last_id=args["last_id"],
|
||||||
user=current_user,
|
limit=args["limit"],
|
||||||
last_id=args["last_id"],
|
invoke_from=InvokeFrom.EXPLORE,
|
||||||
limit=args["limit"],
|
pinned=pinned,
|
||||||
invoke_from=InvokeFrom.EXPLORE,
|
)
|
||||||
pinned=pinned,
|
|
||||||
)
|
|
||||||
except LastConversationNotExistsError:
|
except LastConversationNotExistsError:
|
||||||
raise NotFound("Last Conversation Not Exists.")
|
raise NotFound("Last Conversation Not Exists.")
|
||||||
|
|
||||||
@@ -117,3 +114,28 @@ class ConversationUnPinApi(InstalledAppResource):
|
|||||||
WebConversationService.unpin(app_model, conversation_id, current_user)
|
WebConversationService.unpin(app_model, conversation_id, current_user)
|
||||||
|
|
||||||
return {"result": "success"}
|
return {"result": "success"}
|
||||||
|
|
||||||
|
|
||||||
|
api.add_resource(
|
||||||
|
ConversationRenameApi,
|
||||||
|
"/installed-apps/<uuid:installed_app_id>/conversations/<uuid:c_id>/name",
|
||||||
|
endpoint="installed_app_conversation_rename",
|
||||||
|
)
|
||||||
|
api.add_resource(
|
||||||
|
ConversationListApi, "/installed-apps/<uuid:installed_app_id>/conversations", endpoint="installed_app_conversations"
|
||||||
|
)
|
||||||
|
api.add_resource(
|
||||||
|
ConversationApi,
|
||||||
|
"/installed-apps/<uuid:installed_app_id>/conversations/<uuid:c_id>",
|
||||||
|
endpoint="installed_app_conversation",
|
||||||
|
)
|
||||||
|
api.add_resource(
|
||||||
|
ConversationPinApi,
|
||||||
|
"/installed-apps/<uuid:installed_app_id>/conversations/<uuid:c_id>/pin",
|
||||||
|
endpoint="installed_app_conversation_pin",
|
||||||
|
)
|
||||||
|
api.add_resource(
|
||||||
|
ConversationUnPinApi,
|
||||||
|
"/installed-apps/<uuid:installed_app_id>/conversations/<uuid:c_id>/unpin",
|
||||||
|
endpoint="installed_app_conversation_unpin",
|
||||||
|
)
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
from datetime import UTC, datetime
|
from datetime import UTC, datetime
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, inputs, marshal_with, reqparse # type: ignore
|
from flask_restful import Resource, inputs, marshal_with, reqparse
|
||||||
from sqlalchemy import and_
|
from sqlalchemy import and_
|
||||||
from werkzeug.exceptions import BadRequest, Forbidden, NotFound
|
from werkzeug.exceptions import BadRequest, Forbidden, NotFound
|
||||||
|
|
||||||
@@ -35,7 +34,7 @@ class InstalledAppsListApi(Resource):
|
|||||||
installed_apps = db.session.query(InstalledApp).filter(InstalledApp.tenant_id == current_tenant_id).all()
|
installed_apps = db.session.query(InstalledApp).filter(InstalledApp.tenant_id == current_tenant_id).all()
|
||||||
|
|
||||||
current_user.role = TenantService.get_user_role(current_user, current_user.current_tenant)
|
current_user.role = TenantService.get_user_role(current_user, current_user.current_tenant)
|
||||||
installed_app_list: list[dict[str, Any]] = [
|
installed_apps = [
|
||||||
{
|
{
|
||||||
"id": installed_app.id,
|
"id": installed_app.id,
|
||||||
"app": installed_app.app,
|
"app": installed_app.app,
|
||||||
@@ -48,7 +47,7 @@ class InstalledAppsListApi(Resource):
|
|||||||
for installed_app in installed_apps
|
for installed_app in installed_apps
|
||||||
if installed_app.app is not None
|
if installed_app.app is not None
|
||||||
]
|
]
|
||||||
installed_app_list.sort(
|
installed_apps.sort(
|
||||||
key=lambda app: (
|
key=lambda app: (
|
||||||
-app["is_pinned"],
|
-app["is_pinned"],
|
||||||
app["last_used_at"] is None,
|
app["last_used_at"] is None,
|
||||||
@@ -56,7 +55,7 @@ class InstalledAppsListApi(Resource):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
return {"installed_apps": installed_app_list}
|
return {"installed_apps": installed_apps}
|
||||||
|
|
||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
|
|||||||
@@ -1,11 +1,12 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import marshal_with, reqparse # type: ignore
|
from flask_restful import marshal_with, reqparse
|
||||||
from flask_restful.inputs import int_range # type: ignore
|
from flask_restful.inputs import int_range
|
||||||
from werkzeug.exceptions import InternalServerError, NotFound
|
from werkzeug.exceptions import InternalServerError, NotFound
|
||||||
|
|
||||||
import services
|
import services
|
||||||
|
from controllers.console import api
|
||||||
from controllers.console.app.error import (
|
from controllers.console.app.error import (
|
||||||
AppMoreLikeThisDisabledError,
|
AppMoreLikeThisDisabledError,
|
||||||
CompletionRequestError,
|
CompletionRequestError,
|
||||||
@@ -50,7 +51,7 @@ class MessageListApi(InstalledAppResource):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
return MessageService.pagination_by_first_id(
|
return MessageService.pagination_by_first_id(
|
||||||
app_model, current_user, args["conversation_id"], args["first_id"], args["limit"]
|
app_model, current_user, args["conversation_id"], args["first_id"], args["limit"], "desc"
|
||||||
)
|
)
|
||||||
except services.errors.conversation.ConversationNotExistsError:
|
except services.errors.conversation.ConversationNotExistsError:
|
||||||
raise NotFound("Conversation Not Exists.")
|
raise NotFound("Conversation Not Exists.")
|
||||||
@@ -66,17 +67,10 @@ class MessageFeedbackApi(InstalledAppResource):
|
|||||||
|
|
||||||
parser = reqparse.RequestParser()
|
parser = reqparse.RequestParser()
|
||||||
parser.add_argument("rating", type=str, choices=["like", "dislike", None], location="json")
|
parser.add_argument("rating", type=str, choices=["like", "dislike", None], location="json")
|
||||||
parser.add_argument("content", type=str, location="json")
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
MessageService.create_feedback(
|
MessageService.create_feedback(app_model, message_id, current_user, args["rating"])
|
||||||
app_model=app_model,
|
|
||||||
message_id=message_id,
|
|
||||||
user=current_user,
|
|
||||||
rating=args.get("rating"),
|
|
||||||
content=args.get("content"),
|
|
||||||
)
|
|
||||||
except services.errors.message.MessageNotExistsError:
|
except services.errors.message.MessageNotExistsError:
|
||||||
raise NotFound("Message Not Exists.")
|
raise NotFound("Message Not Exists.")
|
||||||
|
|
||||||
@@ -159,3 +153,21 @@ class MessageSuggestedQuestionApi(InstalledAppResource):
|
|||||||
raise InternalServerError()
|
raise InternalServerError()
|
||||||
|
|
||||||
return {"data": questions}
|
return {"data": questions}
|
||||||
|
|
||||||
|
|
||||||
|
api.add_resource(MessageListApi, "/installed-apps/<uuid:installed_app_id>/messages", endpoint="installed_app_messages")
|
||||||
|
api.add_resource(
|
||||||
|
MessageFeedbackApi,
|
||||||
|
"/installed-apps/<uuid:installed_app_id>/messages/<uuid:message_id>/feedbacks",
|
||||||
|
endpoint="installed_app_message_feedback",
|
||||||
|
)
|
||||||
|
api.add_resource(
|
||||||
|
MessageMoreLikeThisApi,
|
||||||
|
"/installed-apps/<uuid:installed_app_id>/messages/<uuid:message_id>/more-like-this",
|
||||||
|
endpoint="installed_app_more_like_this",
|
||||||
|
)
|
||||||
|
api.add_resource(
|
||||||
|
MessageSuggestedQuestionApi,
|
||||||
|
"/installed-apps/<uuid:installed_app_id>/messages/<uuid:message_id>/suggested-questions",
|
||||||
|
endpoint="installed_app_suggested_question",
|
||||||
|
)
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from flask_restful import marshal_with # type: ignore
|
from flask_restful import marshal_with
|
||||||
|
|
||||||
from controllers.common import fields
|
from controllers.common import fields
|
||||||
from controllers.common import helpers as controller_helpers
|
from controllers.common import helpers as controller_helpers
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user
|
||||||
from flask_restful import Resource, fields, marshal_with, reqparse # type: ignore
|
from flask_restful import Resource, fields, marshal_with, reqparse
|
||||||
|
|
||||||
from constants.languages import languages
|
from constants.languages import languages
|
||||||
from controllers.console import api
|
from controllers.console import api
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user