Compare commits

..

1 Commits

Author SHA1 Message Date
jyong
909cc5a49f redis iam check 2024-06-14 03:07:17 +08:00
2133 changed files with 21349 additions and 82520 deletions

View File

@@ -1,10 +1,9 @@
#!/bin/bash
cd web && npm install
pipx install poetry
echo 'alias start-api="cd /workspaces/dify/api && poetry run python -m flask run --host 0.0.0.0 --port=5001 --debug"' >> ~/.bashrc
echo 'alias start-worker="cd /workspaces/dify/api && poetry run python -m celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion"' >> ~/.bashrc
echo 'alias start-api="cd /workspaces/dify/api && flask run --host 0.0.0.0 --port=5001 --debug"' >> ~/.bashrc
echo 'alias start-worker="cd /workspaces/dify/api && celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail"' >> ~/.bashrc
echo 'alias start-web="cd /workspaces/dify/web && npm run dev"' >> ~/.bashrc
echo 'alias start-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify up -d"' >> ~/.bashrc

View File

@@ -1,3 +1,3 @@
#!/bin/bash
poetry install -C api
cd api && pip install -r requirements.txt

7
.gitattributes vendored
View File

@@ -1,7 +0,0 @@
# Ensure that .sh scripts use LF as line separator, even if they are checked out
# to Windows(NTFS) file-system, by a user of Docker for Window.
# These .sh scripts will be run from the Container after `docker compose up -d`.
# If they appear to be CRLF style, Dash from the Container will fail to execute
# them.
*.sh text eol=lf

View File

@@ -1,24 +0,0 @@
title: "General Discussion"
body:
- type: checkboxes
attributes:
label: Self Checks
description: "To make sure we get to you in time, please check the following :)"
options:
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
required: true
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
required: true
- label: "[FOR CHINESE USERS] 请务必使用英文提交 Issue否则会被关闭。谢谢:"
required: true
- label: "Please do not modify this template :) and fill in all the required fields."
required: true
- type: textarea
attributes:
label: Content
placeholder: Please describe the content you would like to discuss.
validations:
required: true
- type: markdown
attributes:
value: Please limit one request per issue.

View File

@@ -1,30 +0,0 @@
title: "Help"
body:
- type: checkboxes
attributes:
label: Self Checks
description: "To make sure we get to you in time, please check the following :)"
options:
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
required: true
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
required: true
- label: "[FOR CHINESE USERS] 请务必使用英文提交 Issue否则会被关闭。谢谢:"
required: true
- label: "Please do not modify this template :) and fill in all the required fields."
required: true
- type: textarea
attributes:
label: 1. Is this request related to a challenge you're experiencing? Tell me about your story.
placeholder: Please describe the specific scenario or problem you're facing as clearly as possible. For instance "I was trying to use [feature] for [specific task], and [what happened]... It was frustrating because...."
validations:
required: true
- type: textarea
attributes:
label: 2. Additional context or comments
placeholder: (Any other information, comments, documentations, links, or screenshots that would provide more clarity. This is the place to add anything else not covered above.)
validations:
required: false
- type: markdown
attributes:
value: Please limit one request per issue.

View File

@@ -1,37 +0,0 @@
title: Suggestions for New Features
body:
- type: checkboxes
attributes:
label: Self Checks
description: "To make sure we get to you in time, please check the following :)"
options:
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
required: true
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
required: true
- label: "[FOR CHINESE USERS] 请务必使用英文提交 Issue否则会被关闭。谢谢:"
required: true
- label: "Please do not modify this template :) and fill in all the required fields."
required: true
- type: textarea
attributes:
label: 1. Is this request related to a challenge you're experiencing? Tell me about your story.
placeholder: Please describe the specific scenario or problem you're facing as clearly as possible. For instance "I was trying to use [feature] for [specific task], and [what happened]... It was frustrating because...."
validations:
required: true
- type: textarea
attributes:
label: 2. Additional context or comments
placeholder: (Any other information, comments, documentations, links, or screenshots that would provide more clarity. This is the place to add anything else not covered above.)
validations:
required: false
- type: checkboxes
attributes:
label: 3. Can you help us with this feature?
description: Let us know! This is not a commitment, but a starting point for collaboration.
options:
- label: I am interested in contributing to this feature.
required: false
- type: markdown
attributes:
value: Please limit one request per issue.

View File

@@ -14,14 +14,13 @@ body:
required: true
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
required: true
- label: "[FOR CHINESE USERS] 请务必使用英文提交 Issue否则会被关闭。谢谢:"
required: true
- label: "Please do not modify this template :) and fill in all the required fields."
required: true
- type: input
attributes:
label: Dify version
placeholder: 0.3.21
description: See about section in Dify console
validations:
required: true
@@ -41,7 +40,7 @@ body:
- type: textarea
attributes:
label: Steps to reproduce
description: We highly suggest including screenshots and a bug report log. Please use the right markdown syntax for code blocks.
description: We highly suggest including screenshots and a bug report log.
placeholder: Having detailed steps helps us reproduce the bug.
validations:
required: true

View File

@@ -12,8 +12,6 @@ body:
required: true
- label: I confirm that I am using English to submit report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
required: true
- label: "[FOR CHINESE USERS] 请务必使用英文提交 Issue否则会被关闭。谢谢:"
required: true
- label: "Please do not modify this template :) and fill in all the required fields."
required: true
- type: textarea

View File

@@ -12,25 +12,35 @@ body:
required: true
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
required: true
- label: "[FOR CHINESE USERS] 请务必使用英文提交 Issue否则会被关闭。谢谢:"
required: true
- label: "Please do not modify this template :) and fill in all the required fields."
required: true
- type: textarea
attributes:
label: 1. Is this request related to a challenge you're experiencing? Tell me about your story.
label: 1. Is this request related to a challenge you're experiencing?
placeholder: Please describe the specific scenario or problem you're facing as clearly as possible. For instance "I was trying to use [feature] for [specific task], and [what happened]... It was frustrating because...."
validations:
required: true
- type: textarea
attributes:
label: 2. Additional context or comments
label: 2. Describe the feature you'd like to see
placeholder: Think about what you want to achieve and how this feature will help you. Sketches, flow diagrams, or any visual representation will be a major plus.
validations:
required: true
- type: textarea
attributes:
label: 3. How will this feature improve your workflow or experience?
placeholder: Tell us how this change will benefit your work. This helps us prioritize based on user impact.
validations:
required: true
- type: textarea
attributes:
label: 4. Additional context or comments
placeholder: (Any other information, comments, documentations, links, or screenshots that would provide more clarity. This is the place to add anything else not covered above.)
validations:
required: false
- type: checkboxes
attributes:
label: 3. Can you help us with this feature?
label: 5. Can you help us with this feature?
description: Let us know! This is not a commitment, but a starting point for collaboration.
options:
- label: I am interested in contributing to this feature.

View File

@@ -12,13 +12,12 @@ body:
required: true
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
required: true
- label: "[FOR CHINESE USERS] 请务必使用英文提交 Issue否则会被关闭。谢谢:"
required: true
- label: "Please do not modify this template :) and fill in all the required fields."
required: true
- type: input
attributes:
label: Dify version
placeholder: 0.3.21
description: Hover over system tray icon or look at Settings
validations:
required: true

View File

@@ -1,21 +1,13 @@
# Checklist:
> [!IMPORTANT]
> Please review the checklist below before submitting your pull request.
- [ ] Please open an issue before creating a PR or link to an existing issue
- [ ] I have performed a self-review of my own code
- [ ] I have commented my code, particularly in hard-to-understand areas
- [ ] I ran `dev/reformat`(backend) and `cd web && npx lint-staged`(frontend) to appease the lint gods
# Description
Describe the big picture of your changes here to communicate to the maintainers why we should accept this pull request. If it fixes a bug or resolves a feature request, be sure to link to that issue. Close issue syntax: `Fixes #<issue number>`, see [documentation](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword) for more details.
Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change.
Fixes
Fixes # (issue)
## Type of Change
Please delete options that are not relevant.
- [ ] Bug fix (non-breaking change which fixes an issue)
- [ ] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
@@ -23,12 +15,18 @@ Fixes
- [ ] Improvement, including but not limited to code refactoring, performance optimization, and UI/UX improvement
- [ ] Dependency upgrade
# Testing Instructions
# How Has This Been Tested?
Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration
- [ ] Test A
- [ ] Test B
- [ ] TODO
# Suggested Checklist:
- [ ] I have performed a self-review of my own code
- [ ] I have commented my code, particularly in hard-to-understand areas
- [ ] My changes generate no new warnings
- [ ] I ran `dev/reformat`(backend) and `cd web && npx lint-staged`(frontend) to appease the lint gods
- [ ] `optional` I have made corresponding changes to the documentation
- [ ] `optional` I have added tests that prove my fix is effective or that my feature works
- [ ] `optional` New and existing unit tests pass locally with my changes

View File

@@ -14,6 +14,74 @@ concurrency:
jobs:
test:
runs-on: ubuntu-latest
strategy:
matrix:
python-version:
- "3.10"
- "3.11"
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: 'pip'
cache-dependency-path: |
./api/requirements.txt
./api/requirements-dev.txt
- name: Install dependencies
run: pip install -r ./api/requirements.txt -r ./api/requirements-dev.txt
- name: Run Unit tests
run: dev/pytest/pytest_unit_tests.sh
- name: Run ModelRuntime
run: dev/pytest/pytest_model_runtime.sh
- name: Run Tool
run: dev/pytest/pytest_tools.sh
- name: Set up Sandbox
uses: hoverkraft-tech/compose-action@v2.0.0
with:
compose-file: |
docker/docker-compose.middleware.yaml
services: |
sandbox
ssrf_proxy
- name: Run Workflow
run: dev/pytest/pytest_workflow.sh
- name: Set up Vector Stores (Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma)
uses: hoverkraft-tech/compose-action@v2.0.0
with:
compose-file: |
docker/docker-compose.middleware.yaml
docker/docker-compose.qdrant.yaml
docker/docker-compose.milvus.yaml
docker/docker-compose.pgvecto-rs.yaml
docker/docker-compose.pgvector.yaml
docker/docker-compose.chroma.yaml
services: |
weaviate
qdrant
etcd
minio
milvus-standalone
pgvecto-rs
pgvector
chroma
- name: Test Vector Stores
run: dev/pytest/pytest_vdb.sh
test-in-poetry:
name: API Tests
runs-on: ubuntu-latest
strategy:
@@ -21,7 +89,6 @@ jobs:
python-version:
- "3.10"
- "3.11"
- "3.12"
steps:
- name: Checkout code
@@ -40,9 +107,7 @@ jobs:
api/poetry.lock
- name: Poetry check
run: |
poetry check -C api --lock
poetry show -C api
run: poetry check -C api
- name: Install dependencies
run: poetry install -C api --with dev
@@ -56,14 +121,6 @@ jobs:
- name: Run Tool
run: poetry run -C api bash dev/pytest/pytest_tools.sh
- name: Set up dotenvs
run: |
cp docker/.env.example docker/.env
cp docker/middleware.env.example docker/middleware.env
- name: Expose Service Ports
run: sh .github/workflows/expose_service_ports.sh
- name: Set up Sandbox
uses: hoverkraft-tech/compose-action@v2.0.0
with:
@@ -76,11 +133,16 @@ jobs:
- name: Run Workflow
run: poetry run -C api bash dev/pytest/pytest_workflow.sh
- name: Set up Vector Stores (Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma, MyScale)
- name: Set up Vector Stores (Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma)
uses: hoverkraft-tech/compose-action@v2.0.0
with:
compose-file: |
docker/docker-compose.yaml
docker/docker-compose.middleware.yaml
docker/docker-compose.qdrant.yaml
docker/docker-compose.milvus.yaml
docker/docker-compose.pgvecto-rs.yaml
docker/docker-compose.pgvector.yaml
docker/docker-compose.chroma.yaml
services: |
weaviate
qdrant
@@ -90,5 +152,6 @@ jobs:
pgvecto-rs
pgvector
chroma
- name: Test Vector Stores
run: poetry run -C api bash dev/pytest/pytest_vdb.sh

View File

@@ -8,10 +8,6 @@ on:
release:
types: [published]
concurrency:
group: build-push-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
env:
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
@@ -19,107 +15,32 @@ env:
DIFY_API_IMAGE_NAME: ${{ vars.DIFY_API_IMAGE_NAME || 'langgenius/dify-api' }}
jobs:
build:
runs-on: ${{ matrix.platform == 'linux/arm64' && 'arm64_runner' || 'ubuntu-latest' }}
if: github.repository == 'langgenius/dify'
build-and-push:
runs-on: ubuntu-latest
if: github.event.pull_request.draft == false
strategy:
matrix:
include:
- service_name: "build-api-amd64"
image_name_env: "DIFY_API_IMAGE_NAME"
context: "api"
platform: linux/amd64
- service_name: "build-api-arm64"
image_name_env: "DIFY_API_IMAGE_NAME"
context: "api"
platform: linux/arm64
- service_name: "build-web-amd64"
- service_name: "web"
image_name_env: "DIFY_WEB_IMAGE_NAME"
context: "web"
platform: linux/amd64
- service_name: "build-web-arm64"
image_name_env: "DIFY_WEB_IMAGE_NAME"
context: "web"
platform: linux/arm64
- service_name: "api"
image_name_env: "DIFY_API_IMAGE_NAME"
context: "api"
steps:
- name: Prepare
run: |
platform=${{ matrix.platform }}
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ env.DOCKERHUB_USER }}
password: ${{ env.DOCKERHUB_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Extract metadata for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env[matrix.image_name_env] }}
- name: Build Docker image
id: build
uses: docker/build-push-action@v6
with:
context: "{{defaultContext}}:${{ matrix.context }}"
platforms: ${{ matrix.platform }}
build-args: COMMIT_SHA=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.revision'] }}
labels: ${{ steps.meta.outputs.labels }}
outputs: type=image,name=${{ env[matrix.image_name_env] }},push-by-digest=true,name-canonical=true,push=true
cache-from: type=gha,scope=${{ matrix.service_name }}
cache-to: type=gha,mode=max,scope=${{ matrix.service_name }}
- name: Export digest
run: |
mkdir -p /tmp/digests
digest="${{ steps.build.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@v4
with:
name: digests-${{ matrix.context }}-${{ env.PLATFORM_PAIR }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
create-manifest:
needs: build
runs-on: ubuntu-latest
if: github.repository == 'langgenius/dify'
strategy:
matrix:
include:
- service_name: "merge-api-images"
image_name_env: "DIFY_API_IMAGE_NAME"
context: "api"
- service_name: "merge-web-images"
image_name_env: "DIFY_WEB_IMAGE_NAME"
context: "web"
steps:
- name: Download digests
uses: actions/download-artifact@v4
with:
path: /tmp/digests
pattern: digests-${{ matrix.context }}-*
merge-multiple: true
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ env.DOCKERHUB_USER }}
password: ${{ env.DOCKERHUB_TOKEN }}
- name: Extract metadata for Docker
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
with:
@@ -130,12 +51,14 @@ jobs:
type=sha,enable=true,priority=100,prefix=,suffix=,format=long
type=raw,value=${{ github.ref_name }},enable=${{ startsWith(github.ref, 'refs/tags/') }}
- name: Create manifest list and push
working-directory: /tmp/digests
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env[matrix.image_name_env] }}@sha256:%s ' *)
- name: Inspect image
run: |
docker buildx imagetools inspect ${{ env[matrix.image_name_env] }}:${{ steps.meta.outputs.version }}
- name: Build and push
uses: docker/build-push-action@v5
with:
context: "{{defaultContext}}:${{ matrix.context }}"
platforms: ${{ startsWith(github.ref, 'refs/tags/') && 'linux/amd64,linux/arm64' || 'linux/amd64' }}
build-args: COMMIT_SHA=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.revision'] }}
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -38,19 +38,13 @@ jobs:
- name: Install dependencies
run: poetry install -C api
- name: Prepare middleware env
run: |
cd docker
cp middleware.env.example middleware.env
- name: Set up Middlewares
- name: Set up Middleware
uses: hoverkraft-tech/compose-action@v2.0.0
with:
compose-file: |
docker/docker-compose.middleware.yaml
services: |
db
redis
- name: Prepare configs
run: |
@@ -60,4 +54,4 @@ jobs:
- name: Run DB Migration
run: |
cd api
poetry run python -m flask upgrade-db
poetry run python -m flask db upgrade

View File

@@ -1,10 +0,0 @@
#!/bin/bash
yq eval '.services.weaviate.ports += ["8080:8080"]' -i docker/docker-compose.yaml
yq eval '.services.qdrant.ports += ["6333:6333"]' -i docker/docker-compose.yaml
yq eval '.services.chroma.ports += ["8000:8000"]' -i docker/docker-compose.yaml
yq eval '.services["milvus-standalone"].ports += ["19530:19530"]' -i docker/docker-compose.yaml
yq eval '.services.pgvector.ports += ["5433:5432"]' -i docker/docker-compose.yaml
yq eval '.services["pgvecto-rs"].ports += ["5431:5432"]' -i docker/docker-compose.yaml
echo "Ports exposed for sandbox, weaviate, qdrant, chroma, milvus, pgvector, pgvecto-rs."

View File

@@ -39,7 +39,7 @@ jobs:
- name: Ruff check
if: steps.changed-files.outputs.any_changed == 'true'
run: poetry run -C api ruff check ./api
run: poetry run -C api ruff check --preview ./api
- name: Dotenv check
if: steps.changed-files.outputs.any_changed == 'true'
@@ -99,8 +99,7 @@ jobs:
**.sh
**.yaml
**.yml
**Dockerfile
dev/**
Dockerfile
- name: Super-linter
uses: super-linter/super-linter/slim@v6
@@ -113,8 +112,7 @@ jobs:
IGNORE_GITIGNORED_FILES: true
VALIDATE_BASH: true
VALIDATE_BASH_EXEC: true
# FIXME: temporarily disabled until api-docker.yaml's run script is fixed for shellcheck
# VALIDATE_GITHUB_ACTIONS: true
VALIDATE_GITHUB_ACTIONS: true
VALIDATE_DOCKERFILE_HADOLINT: true
VALIDATE_XML: true
VALIDATE_YAML: true

20
.gitignore vendored
View File

@@ -136,26 +136,12 @@ web/.vscode/settings.json
# Intellij IDEA Files
.idea/*
!.idea/vcs.xml
!.idea/icon.png
.ideaDataSources/
*.iml
api/.idea
api/.env
api/storage/*
docker-legacy/volumes/app/storage/*
docker-legacy/volumes/db/data/*
docker-legacy/volumes/redis/data/*
docker-legacy/volumes/weaviate/*
docker-legacy/volumes/qdrant/*
docker-legacy/volumes/etcd/*
docker-legacy/volumes/minio/*
docker-legacy/volumes/milvus/*
docker-legacy/volumes/chroma/*
docker/volumes/app/storage/*
docker/volumes/certbot/*
docker/volumes/db/data/*
docker/volumes/redis/data/*
docker/volumes/weaviate/*
@@ -165,9 +151,6 @@ docker/volumes/minio/*
docker/volumes/milvus/*
docker/volumes/chroma/*
docker/nginx/conf.d/default.conf
docker/middleware.env
sdks/python-client/build
sdks/python-client/dist
sdks/python-client/dify_client.egg-info
@@ -175,6 +158,3 @@ sdks/python-client/dify_client.egg-info
.vscode/*
!.vscode/launch.json
pyrightconfig.json
api/.vscode
.idea/

BIN
.idea/icon.png generated

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.7 KiB

View File

@@ -81,7 +81,7 @@ Dify requires the following dependencies to build, make sure they're installed o
Dify is composed of a backend and a frontend. Navigate to the backend directory by `cd api/`, then follow the [Backend README](api/README.md) to install it. In a separate terminal, navigate to the frontend directory by `cd web/`, then follow the [Frontend README](web/README.md) to install.
Check the [installation FAQ](https://docs.dify.ai/learn-more/faq/self-host-faq) for a list of common issues and steps to troubleshoot.
Check the [installation FAQ](https://docs.dify.ai/getting-started/faq/install-faq) for a list of common issues and steps to troubleshoot.
### 5. Visit dify in your browser

View File

@@ -2,17 +2,17 @@
考虑到我们的现状,我们需要灵活快速地交付,但我们也希望确保像你这样的贡献者在贡献过程中获得尽可能顺畅的体验。我们为此编写了这份贡献指南,旨在让你熟悉代码库和我们与贡献者的合作方式,以便你能快速进入有趣的部分。
这份指南,就像 Dify 本身一样,是一个不断改进的工作。如果有时它落后于实际项目,我们非常感谢你的理解,并欢迎提供任何反馈以供我们改进。
这份指南,就像 Dify 本身一样,是一个不断改进的工作。如果有时它落后于实际项目,我们非常感谢你的理解,并欢迎任何反馈以供我们改进。
在许可方面,请花一分钟阅读我们简短的 [许可证和贡献者协议](./LICENSE)。社区还遵守 [行为准则](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。
在许可方面,请花一分钟阅读我们简短的[许可证和贡献者协议](./LICENSE)。社区还遵守[行为准则](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。
## 在开始之前
[查找](https://github.com/langgenius/dify/issues?q=is:issue+is:closed)现有问题,或 [创建](https://github.com/langgenius/dify/issues/new/choose) 一个新问题。我们将问题分为两类:
[查找](https://github.com/langgenius/dify/issues?q=is:issue+is:closed)现有问题,或[创建](https://github.com/langgenius/dify/issues/new/choose)一个新问题。我们将问题分为两类:
### 功能请求:
* 如果您要提出新的功能请求,请解释所提议的功能的目标,并尽可能提供详细的上下文。[@perzeusss](https://github.com/perzeuss) 制作了一个很好的 [功能请求助手](https://udify.app/chat/MK2kVSnw1gakVwMX),可以帮助您起草需求。随时尝试一下。
* 如果您要提出新的功能请求,请解释所提议的功能的目标,并尽可能提供详细的上下文。[@perzeusss](https://github.com/perzeuss)制作了一个很好的[功能请求助手](https://udify.app/chat/MK2kVSnw1gakVwMX),可以帮助您起草需求。随时尝试一下。
* 如果您想从现有问题中选择一个,请在其下方留下评论表示您的意愿。
@@ -20,44 +20,45 @@
根据所提议的功能所属的领域不同,您可能需要与不同的团队成员交流。以下是我们团队成员目前正在从事的各个领域的概述:
| 团队成员 | 工作范围 |
| Member | Scope |
| ------------------------------------------------------------ | ---------------------------------------------------- |
| [@yeuoly](https://github.com/Yeuoly) | 架构 Agents |
| [@jyong](https://github.com/JohnJyong) | RAG 流水线设计 |
| [@GarfieldDai](https://github.com/GarfieldDai) | 构建 workflow 编排 |
| [@iamjoel](https://github.com/iamjoel) & [@zxhlyh](https://github.com/zxhlyh) | 让我们的前端更易用 |
| [@guchenhe](https://github.com/guchenhe) & [@crazywoola](https://github.com/crazywoola) | 开发人员体验, 综合事项联系人 |
| [@takatost](https://github.com/takatost) | 产品整体方向和架构 |
| [@yeuoly](https://github.com/Yeuoly) | Architecting Agents |
| [@jyong](https://github.com/JohnJyong) | RAG pipeline design |
| [@GarfieldDai](https://github.com/GarfieldDai) | Building workflow orchestrations |
| [@iamjoel](https://github.com/iamjoel) & [@zxhlyh](https://github.com/zxhlyh) | Making our frontend a breeze to use |
| [@guchenhe](https://github.com/guchenhe) & [@crazywoola](https://github.com/crazywoola) | Developer experience, points of contact for anything |
| [@takatost](https://github.com/takatost) | Overall product direction and architecture |
事项优先级:
How we prioritize:
| 功能类型 | 优先级 |
| Feature Type | Priority |
| ------------------------------------------------------------ | --------------- |
| 被团队成员标记为高优先级的功能 | 高优先级 |
| [community feedback board](https://github.com/langgenius/dify/discussions/categories/feedbacks) 内反馈的常见功能请求 | 中等优先级 |
| 非核心功能和小幅改进 | 低优先级 |
| 有价值当不紧急 | 未来功能 |
| High-Priority Features as being labeled by a team member | High Priority |
| Popular feature requests from our [community feedback board](https://github.com/langgenius/dify/discussions/categories/feedbacks) | Medium Priority |
| Non-core features and minor enhancements | Low Priority |
| Valuable but not immediate | Future-Feature |
### 其他任何事情(例如 bug 报告、性能优化、拼写错误更正):
### 其他任何事情例如bug报告、性能优化、拼写错误更正
* 立即开始编码。
事项优先级:
How we prioritize:
| Issue 类型 | 优先级 |
| Issue Type | Priority |
| ------------------------------------------------------------ | --------------- |
| 核心功能的 Bugs例如无法登录、应用无法工作、安全漏洞 | 紧急 |
| 非紧急 bugs, 性能提升 | 中等优先级 |
| 小幅修复(错别字, 能正常工作但存在误导的 UI) | 低优先级 |
| Bugs in core functions (cannot login, applications not working, security loopholes) | Critical |
| Non-critical bugs, performance boosts | Medium Priority |
| Minor fixes (typos, confusing but working UI) | Low Priority |
## 安装
以下是设置 Dify 进行开发的步骤:
以下是设置Dify进行开发的步骤
### 1. Fork 该仓库
### 1. Fork该仓库
### 2. 克隆仓库
从终端克隆代码仓库:
从终端克隆fork的仓库:
```
git clone git@github.com:<github_username>/dify.git
@@ -75,72 +76,72 @@ Dify 依赖以下工具和库:
### 4. 安装
Dify 由后端和前端组成。通过 `cd api/` 导航到后端目录,然后按照 [后端 README](api/README.md) 进行安装。在另一个终端中,通过 `cd web/` 导航到前端目录,然后按照 [前端 README](web/README.md) 进行安装。
Dify由后端和前端组成。通过`cd api/`导航到后端目录,然后按照[后端README](api/README.md)进行安装。在另一个终端中,通过`cd web/`导航到前端目录,然后按照[前端README](web/README.md)进行安装。
查看 [安装常见问题解答](https://docs.dify.ai/v/zh-hans/learn-more/faq/install-faq) 以获取常见问题列表和故障排除步骤。
查看[安装常见问题解答](https://docs.dify.ai/getting-started/faq/install-faq)以获取常见问题列表和故障排除步骤。
### 5. 在浏览器中访问 Dify
### 5. 在浏览器中访问Dify
为了验证您的设置,打开浏览器并访问 [http://localhost:3000](http://localhost:3000)(默认或您自定义的 URL 和端口)。现在您应该看到 Dify 正在运行。
为了验证您的设置,打开浏览器并访问[http://localhost:3000](http://localhost:3000)默认或您自定义的URL和端口。现在您应该看到Dify正在运行。
## 开发
如果您要添加模型提供程序,请参考 [此指南](https://github.com/langgenius/dify/blob/main/api/core/model_runtime/README.md)。
如果您要添加模型提供程序,请参考[此指南](https://github.com/langgenius/dify/blob/main/api/core/model_runtime/README.md)。
如果您要向 AgentWorkflow 添加工具提供程序,请参考 [此指南](./api/core/tools/README.md)。
如果您要向AgentWorkflow添加工具提供程序请参考[此指南](./api/core/tools/README.md)。
为了帮助您快速了解您的贡献在哪个部分,以下是 Dify 后端和前端的简要注释大纲:
为了帮助您快速了解您的贡献在哪个部分以下是Dify后端和前端的简要注释大纲
### 后端
Dify 的后端使用 Python 编写,使用 [Flask](https://flask.palletsprojects.com/en/3.0.x/) 框架。它使用 [SQLAlchemy](https://www.sqlalchemy.org/) 作为 ORM使用 [Celery](https://docs.celeryq.dev/en/stable/getting-started/introduction.html) 作为任务队列。授权逻辑通过 Flask-login 进行处理。
Dify的后端使用Python编写使用[Flask](https://flask.palletsprojects.com/en/3.0.x/)框架。它使用[SQLAlchemy](https://www.sqlalchemy.org/)作为ORM使用[Celery](https://docs.celeryq.dev/en/stable/getting-started/introduction.html)作为任务队列。授权逻辑通过Flask-login进行处理。
```
[api/]
├── constants // 用于整个代码库的常量设置。
├── controllers // API 路由定义和请求处理逻辑。
├── core // 核心应用编排、模型集成和工具。
├── docker // Docker 和容器化相关配置。
├── events // 事件处理和处理。
├── extensions // 与第三方框架/平台的扩展。
├── fields // 用于序列化/封装的字段定义。
├── libs // 可重用的库和助手。
├── migrations // 数据库迁移脚本。
├── models // 数据库模型和架构定义。
├── services // 指定业务逻辑。
├── storage // 私钥存储。
├── tasks // 异步任务和后台作业的处理。
├── constants // Constant settings used throughout code base.
├── controllers // API route definitions and request handling logic.
├── core // Core application orchestration, model integrations, and tools.
├── docker // Docker & containerization related configurations.
├── events // Event handling and processing
├── extensions // Extensions with 3rd party frameworks/platforms.
├── fields // field definitions for serialization/marshalling.
├── libs // Reusable libraries and helpers.
├── migrations // Scripts for database migration.
├── models // Database models & schema definitions.
├── services // Specifies business logic.
├── storage // Private key storage.
├── tasks // Handling of async tasks and background jobs.
└── tests
```
### 前端
该网站使用基于 Typescript[Next.js](https://nextjs.org/) 模板进行引导,并使用 [Tailwind CSS](https://tailwindcss.com/) 进行样式设计。[React-i18next](https://react.i18next.com/) 用于国际化。
该网站使用基于Typescript[Next.js](https://nextjs.org/)模板进行引导,并使用[Tailwind CSS](https://tailwindcss.com/)进行样式设计。[React-i18next](https://react.i18next.com/)用于国际化。
```
[web/]
├── app // 布局、页面和组件
│ ├── (commonLayout) // 整个应用通用的布局
│ ├── (shareLayout) // 在特定会话中共享的布局
│ ├── activate // 激活页面
│ ├── components // 页面和布局共享的组件
│ ├── install // 安装页面
│ ├── signin // 登录页面
│ └── styles // 全局共享的样式
├── assets // 静态资源
├── bin // 构建步骤运行的脚本
├── config // 可调整的设置和选项
├── context // 应用中不同部分使用的共享上下文
├── dictionaries // 语言特定的翻译文件
├── docker // 容器配置
├── hooks // 可重用的钩子
├── i18n // 国际化配置
├── models // 描述数据模型和 API 响应的形状
├── public // favicon 等元资源
├── service // 定义 API 操作的形状
├── app // layouts, pages, and components
│ ├── (commonLayout) // common layout used throughout the app
│ ├── (shareLayout) // layouts specifically shared across token-specific sessions
│ ├── activate // activate page
│ ├── components // shared by pages and layouts
│ ├── install // install page
│ ├── signin // signin page
│ └── styles // globally shared styles
├── assets // Static assets
├── bin // scripts ran at build step
├── config // adjustable settings and options
├── context // shared contexts used by different portions of the app
├── dictionaries // Language-specific translate files
├── docker // container configurations
├── hooks // Reusable hooks
├── i18n // Internationalization configuration
├── models // describes data models & shapes of API responses
├── public // meta assets like favicon
├── service // specifies shapes of API actions
├── test
├── types // 函数参数和返回值的描述
└── utils // 共享的实用函数
├── types // descriptions of function params and return values
└── utils // Shared utility functions
```
## 提交你的 PR

View File

@@ -1,10 +1,10 @@
Dify にコントリビュートしたいとお考えなのですね。それは素晴らしいことです。
私たちは、LLM アプリケーションの構築と管理のための最も直感的なワークフローを設計するという壮大な野望を持っています。人数も資金も限られている新興企業として、コミュニティからの支援は本当に重要です。
私たちは現状を鑑み、機敏かつ迅速に開発をする必要がありますが、同時にあなたのようなコントリビューターの方々に、可能な限りスムーズな貢献体験をしていただきたいと思っています。そのためにこのコントリビュートガイドを作成しました。
私たちは現状を鑑み、機敏かつ迅速に開発をする必要がありますが、同時にあなたのようなコントリビューターの方々に、可能な限りスムーズな貢献体験をしていただきたいと思っています。そのためにこのコントリビュートガイドを作成しました。
コードベースやコントリビュータの方々と私たちがどのように仕事をしているのかに慣れていただき、楽しいパートにすぐに飛び込めるようにすることが目的です。
このガイドは Dify そのものと同様に、継続的に改善されています。実際のプロジェクトに遅れをとることがあるかもしれませんが、ご理解のほどよろしくお願いいたします。
このガイドは Dify そのものと同様に、継続的に改善されています。実際のプロジェクトに遅れをとることがあるかもしれませんが、ご理解お願いします。
ライセンスに関しては、私たちの短い[ライセンスおよびコントリビューター規約](./LICENSE)をお読みください。また、コミュニティは[行動規範](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)を遵守しています。
@@ -14,13 +14,13 @@ Dify にコントリビュートしたいとお考えなのですね。それは
### 機能リクエスト
* 新しい機能要望を出す場合は、提案する機能が何を実現するものなのかを説明し、可能な限り多くのコンテキストを含めてください。[@perzeusss](https://github.com/perzeuss)は、あなたの要望を書き出すのに役立つ [Feature Request Copilot](https://udify.app/chat/MK2kVSnw1gakVwMX) を作ってくれました。気軽に試してみてください。
* 新しい機能要望を出す場合は、提案する機能が何を実現するものなのかを説明し、可能な限り多くの文脈を含めてください。[@perzeusss](https://github.com/perzeuss)は、あなたの要望を書き出すのに役立つ [Feature Request Copilot](https://udify.app/chat/MK2kVSnw1gakVwMX) を作ってくれました。気軽に試してみてください。
* 既存の課題から 1 つ選びたい場合は、その下にコメントを書いてください。
関連する方向で作業しているチームメンバーが参加します。すべてが良好であれば、コーディングを開始する許可が与えられます。私たちが変更を提案した場合にあなたの作業が無駄になることがないよう、それまでこの機能の作業を控えていただくようお願いいたします。
関連する方向で作業しているチームメンバーが参加します。すべてが良好であれば、コーディングを開始する許可が与えられます。私たちが変更を提案した場合にあなたの作業が無駄になることがないよう、それまでこの機能の作業を控えていただくようお願いいたします。
提案された機能がどの分野に属するかによって、あなたは異なるチーム・メンバーと話をするかもしれません。以下は、各チームメンバーが現在取り組んでいる分野の概要です。
提案された機能がどの分野に属するかによって、あなたは異なるチーム・メンバーと話をするかもしれません。以下は、各チームメンバーが現在取り組んでいる分野の概要です。
| Member | Scope |
| --------------------------------------------------------------------------------------- | ------------------------------------ |
@@ -54,7 +54,7 @@ Dify にコントリビュートしたいとお考えなのですね。それは
## インストール
以下の手順で 、Difyのセットアップをしてください
Dify を開発用にセットアップする手順は以下の通りです
### 1. このリポジトリをフォークする
@@ -82,7 +82,7 @@ Dify はバックエンドとフロントエンドから構成されています
まず`cd api/`でバックエンドのディレクトリに移動し、[Backend README](api/README.md)に従ってインストールします。
次に別のターミナルで、`cd web/`でフロントエンドのディレクトリに移動し、[Frontend README](web/README.md)に従ってインストールしてください。
よくある問題とトラブルシューティングの手順については、[installation FAQ](https://docs.dify.ai/v/japanese/learn-more/faq/install-faq) を確認してください。
よくある問題とトラブルシューティングの手順については、[installation FAQ](https://docs.dify.ai/getting-started/faq/install-faq) を確認してください。
### 5. ブラウザで dify にアクセスする
@@ -120,7 +120,7 @@ Dify のバックエンドは[Flask](https://flask.palletsprojects.com/en/3.0.x/
### フロントエンド
このウェブサイトは、Typescriptベースの[Next.js](https://nextjs.org/)テンプレートを使ってブートストラップされ、[Tailwind CSS](https://tailwindcss.com/)を使ってスタイリングされています。国際化には[React-i18next](https://react.i18next.com/)を使用しています。
このウェブサイトは、Typescript の[Next.js](https://nextjs.org/)ボイラープレートでブートストラップされており、スタイリングには[Tailwind CSS](https://tailwindcss.com/)を使用しています。国際化には[React-i18next](https://react.i18next.com/)を使用しています。
```
[web/]
@@ -153,7 +153,7 @@ Dify のバックエンドは[Flask](https://flask.palletsprojects.com/en/3.0.x/
いよいよ、私たちのリポジトリにプルリクエスト (PR) を提出する時が来ました。主要な機能については、まず `deploy/dev` ブランチにマージしてテストしてから `main` ブランチにマージします。
マージ競合などの問題が発生した場合、またはプル リクエストを開く方法がわからない場合は、[GitHub's pull request tutorial](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests) をチェックしてみてください。
これで完了です!あなたの PR がマージされると、[README](https://github.com/langgenius/dify/blob/main/README.md) にコントリビューターとして紹介されます。
これで完了です!あなたの PR がマージされると、[README](https://github.com/langgenius/dify/blob/main/README.md) にコントリビューターとして紹介されます。
## ヘルプを得る

View File

@@ -37,7 +37,6 @@
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
</p>
@@ -175,7 +174,6 @@ The easiest way to start the Dify server is to run our [docker-compose.yml](dock
```bash
cd docker
cp .env.example .env
docker compose up -d
```
@@ -185,19 +183,13 @@ After running, you can access the Dify dashboard in your browser at [http://loca
## Next steps
If you need to customize the configuration, please refer to the comments in our [.env.example](docker/.env.example) file and update the corresponding values in your `.env` file. Additionally, you might need to make adjustments to the `docker-compose.yaml` file itself, such as changing image versions, port mappings, or volume mounts, based on your specific deployment environment and requirements. After making any changes, please re-run `docker-compose up -d`. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments).
If you need to customize the configuration, please refer to the comments in our [docker-compose.yml](docker/docker-compose.yaml) file and manually set the environment configuration. After making the changes, please run `docker-compose up -d` again. You can see the full list of environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments).
If you'd like to configure a highly-available setup, there are community-contributed [Helm Charts](https://helm.sh/) and YAML files which allow Dify to be deployed on Kubernetes.
If you'd like to configure a highly-available setup, there are community-contributed [Helm Charts](https://helm.sh/) which allow Dify to be deployed on Kubernetes.
- [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify)
- [Helm Chart by @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
- [YAML file by @Winson-030](https://github.com/Winson-030/dify-kubernetes)
#### Using Terraform for Deployment
##### Azure Global
Deploy Dify to Azure with a single click using [terraform](https://www.terraform.io/).
- [Azure Terraform by @nikawang](https://github.com/nikawang/dify-azure-terraform)
## Contributing
@@ -217,6 +209,7 @@ At the same time, please consider supporting Dify by sharing it on social media
* [Github Discussion](https://github.com/langgenius/dify/discussions). Best for: sharing feedback and asking questions.
* [GitHub Issues](https://github.com/langgenius/dify/issues). Best for: bugs you encounter using Dify.AI, and feature proposals. See our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
* [Email](mailto:support@dify.ai?subject=[GitHub]Questions%20About%20Dify). Best for: questions you have about using Dify.AI.
* [Discord](https://discord.gg/FngNHpbcY7). Best for: sharing your applications and hanging out with the community.
* [Twitter](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community.

View File

@@ -37,7 +37,6 @@
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
</p>
<div style="text-align: right;">
@@ -158,29 +157,20 @@
```bash
cd docker
cp .env.example .env
docker compose up -d
```
بعد التشغيل، يمكنك الوصول إلى لوحة تحكم Dify في متصفحك على [http://localhost/install](http://localhost/install) وبدء عملية التهيئة.
> إذا كنت ترغب في المساهمة في Dify أو القيام بتطوير إضافي، فانظر إلى [دليلنا للنشر من الشفرة (code) المصدرية](https://docs.dify.ai/getting-started/install-self-hosted/local-source-code)
## الخطوات التالية
إذا كنت بحاجة إلى تخصيص الإعدادات، فيرجى الرجوع إلى التعليقات في ملف [.env.example](docker/.env.example) وتحديث القيم المقابلة في ملف `.env`. بالإضافة إلى ذلك، قد تحتاج إلى إجراء تعديلات على ملف `docker-compose.yaml` نفسه، مثل تغيير إصدارات الصور أو تعيينات المنافذ أو نقاط تحميل وحدات التخزين، بناءً على بيئة النشر ومتطلباتك الخاصة. بعد إجراء أي تغييرات، يرجى إعادة تشغيل `docker-compose up -d`. يمكنك العثور على قائمة كاملة بمتغيرات البيئة المتاحة [هنا](https://docs.dify.ai/getting-started/install-self-hosted/environments).
إذا كنت بحاجة إلى تخصيص التكوين، يرجى الرجوع إلى التعليقات في ملف [docker-compose.yml](docker/docker-compose.yaml) لدينا وتعيين التكوينات البيئية يدويًا. بعد إجراء التغييرات، يرجى تشغيل `docker-compose up -d` مرة أخرى. يمكنك رؤية قائمة كاملة بالمتغيرات البيئية [هنا](https://docs.dify.ai/getting-started/install-self-hosted/environments).
يوجد مجتمع خاص بـ [Helm Charts](https://helm.sh/) وملفات YAML التي تسمح بتنفيذ Dify على Kubernetes للنظام من الإيجابيات العلوية.
إذا كنت ترغب في تكوين إعداد متوفر بشكل عالي، فهناك [رسوم بيانية Helm](https://helm.sh/) المساهمة من المجتمع تسمح بنشر Dify على Kubernetes.
- [رسم بياني Helm من قبل @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify)
- [رسم بياني Helm من قبل @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
- [ملف YAML من قبل @Winson-030](https://github.com/Winson-030/dify-kubernetes)
#### استخدام Terraform للتوزيع
##### Azure Global
استخدم [terraform](https://www.terraform.io/) لنشر Dify على Azure بنقرة واحدة.
- [Azure Terraform بواسطة @nikawang](https://github.com/nikawang/dify-azure-terraform)
## المساهمة
@@ -200,6 +190,7 @@ docker compose up -d
## المجتمع والاتصال
* [مناقشة Github](https://github.com/langgenius/dify/discussions). الأفضل لـ: مشاركة التعليقات وطرح الأسئلة.
* [المشكلات على GitHub](https://github.com/langgenius/dify/issues). الأفضل لـ: الأخطاء التي تواجهها في استخدام Dify.AI، واقتراحات الميزات. انظر [دليل المساهمة](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
* [البريد الإلكتروني](mailto:support@dify.ai?subject=[GitHub]Questions%20About%20Dify). الأفضل لـ: الأسئلة التي تتعلق باستخدام Dify.AI.
* [Discord](https://discord.gg/FngNHpbcY7). الأفضل لـ: مشاركة تطبيقاتك والترفيه مع المجتمع.
* [تويتر](https://twitter.com/dify_ai). الأفضل لـ: مشاركة تطبيقاتك والترفيه مع المجتمع.

View File

@@ -36,7 +36,6 @@
<a href="./README_KL.md"><img alt="上个月的提交次数" src="https://img.shields.io/badge/法语-d9d9d9"></a>
<a href="./README_FR.md"><img alt="上个月的提交次数" src="https://img.shields.io/badge/克林贡语-d9d9d9"></a>
<a href="./README_KR.md"><img alt="上个月的提交次数" src="https://img.shields.io/badge/韓國語-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
</div>
@@ -180,29 +179,21 @@ Dify 是一个开源的 LLM 应用开发平台。其直观的界面结合了 AI
```bash
cd docker
cp .env.example .env
docker compose up -d
```
运行后,可以在浏览器上访问 [http://localhost/install](http://localhost/install) 进入 Dify 控制台并开始初始化安装操作。
### 自定义配置
如果您需要自定义配置,请参考 [.env.example](docker/.env.example) 文件中的注释,并更新 `.env` 文件中对应的值。此外,您可能需要根据您的具体部署环境和需求对 `docker-compose.yaml` 文件本身进行调整,例如更改镜像版本、端口映射或卷挂载。完成任何更改后,请重新运行 `docker-compose up -d`。您可以在[此处](https://docs.dify.ai/getting-started/install-self-hosted/environments)找到可用环境变量的完整列表。
#### 使用 Helm Chart 部署
使用 [Helm Chart](https://helm.sh/) 版本或者 YAML 文件,可以在 Kubernetes 上部署 Dify。
使用 [Helm Chart](https://helm.sh/) 版本,可以在 Kubernetes 上部署 Dify。
- [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify)
- [Helm Chart by @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
- [YAML 文件 by @Winson-030](https://github.com/Winson-030/dify-kubernetes)
#### 使用 Terraform 部署
### 配置
##### Azure Global
使用 [terraform](https://www.terraform.io/) 一键部署 Dify 到 Azure。
- [Azure Terraform by @nikawang](https://github.com/nikawang/dify-azure-terraform)
如果您需要自定义配置,请参考我们的 [docker-compose.yml](docker/docker-compose.yaml) 文件中的注释,并手动设置环境配置。更改后,请再次运行 `docker-compose up -d`。您可以在我们的[文档](https://docs.dify.ai/getting-started/install-self-hosted/environments)中查看所有环境变量的完整列表。
## Star History

View File

@@ -36,7 +36,6 @@
<a href="./README_KL.md"><img alt="Actividad de Commits el último mes" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_FR.md"><img alt="Actividad de Commits el último mes" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="Actividad de Commits el último mes" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
</p>
#
@@ -180,7 +179,6 @@ La forma más fácil de iniciar el servidor de Dify es ejecutar nuestro archivo
```bash
cd docker
cp .env.example .env
docker compose up -d
```
@@ -190,21 +188,14 @@ Después de ejecutarlo, puedes acceder al panel de control de Dify en tu navegad
## Próximos pasos
Si necesita personalizar la configuración, consulte los comentarios en nuestro archivo [.env.example](docker/.env.example) y actualice los valores correspondientes en su archivo `.env`. Además, es posible que deba realizar ajustes en el propio archivo `docker-compose.yaml`, como cambiar las versiones de las imágenes, las asignaciones de puertos o los montajes de volúmenes, según su entorno de implementación y requisitos específicos. Después de realizar cualquier cambio, vuelva a ejecutar `docker-compose up -d`. Puede encontrar la lista completa de variables de entorno disponibles [aquí](https://docs.dify.ai/getting-started/install-self-hosted/environments).
Si necesitas personalizar la configuración, consulta los comentarios en nuestro archivo [docker-compose.yml](docker/docker-compose.yaml) y configura manualmente la configuración del entorno
. Después de realizar los cambios, ejecuta `docker-compose up -d` nuevamente. Puedes ver la lista completa de variables de entorno [aquí](https://docs.dify.ai/getting-started/install-self-hosted/environments).
Si desea configurar una configuración de alta disponibilidad, la comunidad proporciona [Gráficos Helm](https://helm.sh/) y archivos YAML, a través de los cuales puede desplegar Dify en Kubernetes.
Si deseas configurar una instalación altamente disponible, hay [Gráficos Helm](https://helm.sh/) contribuidos por la comunidad que permiten implementar Dify en Kubernetes.
- [Gráfico Helm por @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify)
- [Gráfico Helm por @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
- [Ficheros YAML por @Winson-030](https://github.com/Winson-030/dify-kubernetes)
#### Uso de Terraform para el despliegue
##### Azure Global
Utiliza [terraform](https://www.terraform.io/) para desplegar Dify en Azure con un solo clic.
- [Azure Terraform por @nikawang](https://github.com/nikawang/dify-azure-terraform)
## Contribuir
@@ -225,6 +216,7 @@ Al mismo tiempo, considera apoyar a Dify compartiéndolo en redes sociales y en
* [Discusión en GitHub](https://github.com/langgenius/dify/discussions). Lo mejor para: compartir comentarios y hacer preguntas.
* [Reporte de problemas en GitHub](https://github.com/langgenius/dify/issues). Lo mejor para: errores que encuentres usando Dify.AI y propuestas de características. Consulta nuestra [Guía de contribución](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
* [Correo electrónico](mailto:support@dify.ai?subject=[GitHub]Questions%20About%20Dify). Lo mejor para: preguntas que tengas sobre el uso de Dify.AI.
* [Discord](https://discord.gg/FngNHpbcY7). Lo mejor para: compartir tus aplicaciones y pasar el rato con la comunidad.
* [Twitter](https://twitter.com/dify_ai). Lo mejor para: compartir tus aplicaciones y pasar el rato con la comunidad.

View File

@@ -36,7 +36,6 @@
<a href="./README_KL.md"><img alt="Commits le mois dernier" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_FR.md"><img alt="Commits le mois dernier" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="Commits le mois dernier" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
</p>
#
@@ -180,7 +179,6 @@ La manière la plus simple de démarrer le serveur Dify est d'exécuter notre fi
```bash
cd docker
cp .env.example .env
docker compose up -d
```
@@ -190,19 +188,14 @@ Après l'exécution, vous pouvez accéder au tableau de bord Dify dans votre nav
## Prochaines étapes
Si vous devez personnaliser la configuration, veuillez vous référer aux commentaires dans notre fichier [.env.example](docker/.env.example) et mettre à jour les valeurs correspondantes dans votre fichier `.env`. De plus, vous devrez peut-être apporter des modifications au fichier `docker-compose.yaml` lui-même, comme changer les versions d'image, les mappages de ports ou les montages de volumes, en fonction de votre environnement de déploiement et de vos exigences spécifiques. Après avoir effectué des modifications, veuillez réexécuter `docker-compose up -d`. Vous pouvez trouver la liste complète des variables d'environnement disponibles [ici](https://docs.dify.ai/getting-started/install-self-hosted/environments).
Si vous devez personnaliser la configuration, veuillez
Si vous souhaitez configurer une configuration haute disponibilité, la communauté fournit des [Helm Charts](https://helm.sh/) et des fichiers YAML, à travers lesquels vous pouvez déployer Dify sur Kubernetes.
vous référer aux commentaires dans notre fichier [docker-compose.yml](docker/docker-compose.yaml) et définir manuellement la configuration de l'environnement. Après avoir apporté les modifications, veuillez exécuter à nouveau `docker-compose up -d`. Vous pouvez voir la liste complète des variables d'environnement [ici](https://docs.dify.ai/getting-started/install-self-hosted/environments).
Si vous souhaitez configurer une installation hautement disponible, il existe des [Helm Charts](https://helm.sh/) contribués par la communauté qui permettent de déployer Dify sur Kubernetes.
- [Helm Chart par @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify)
- [Helm Chart par @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
- [Fichier YAML par @Winson-030](https://github.com/Winson-030/dify-kubernetes)
#### Utilisation de Terraform pour le déploiement
##### Azure Global
Utilisez [terraform](https://www.terraform.io/) pour déployer Dify sur Azure en un clic.
- [Azure Terraform par @nikawang](https://github.com/nikawang/dify-azure-terraform)
## Contribuer
@@ -223,6 +216,7 @@ Dans le même temps, veuillez envisager de soutenir Dify en le partageant sur le
* [Discussion GitHub](https://github.com/langgenius/dify/discussions). Meilleur pour: partager des commentaires et poser des questions.
* [Problèmes GitHub](https://github.com/langgenius/dify/issues). Meilleur pour: les bogues que vous rencontrez en utilisant Dify.AI et les propositions de fonctionnalités. Consultez notre [Guide de contribution](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
* [E-mail](mailto:support@dify.ai?subject=[GitHub]Questions%20About%20Dify). Meilleur pour: les questions que vous avez sur l'utilisation de Dify.AI.
* [Discord](https://discord.gg/FngNHpbcY7). Meilleur pour: partager vos applications et passer du temps avec la communauté.
* [Twitter](https://twitter.com/dify_ai). Meilleur pour: partager vos applications et passer du temps avec la communauté.

View File

@@ -2,9 +2,9 @@
<p align="center">
<a href="https://cloud.dify.ai">Dify Cloud</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">セルフホスティング</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">セルフホス</a> ·
<a href="https://docs.dify.ai">ドキュメント</a> ·
<a href="https://cal.com/guchenhe/dify-demo">デモの予約</a>
<a href="https://cal.com/guchenhe/dify-demo">デモのスケジュール</a>
</p>
<p align="center">
@@ -36,7 +36,6 @@
<a href="./README_KL.md"><img alt="先月のコミット" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_FR.md"><img alt="先月のコミット" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="先月のコミット" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
</p>
#
@@ -45,37 +44,37 @@
<a href="https://trendshift.io/repositories/2152" target="_blank"><img src="https://trendshift.io/api/badge/repositories/2152" alt="langgenius%2Fdify | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
</p>
DifyはオープンソースのLLMアプリケーション開発プラットフォームです。直感的なインターフェスには、AIワークフロー、RAGパイプライン、エージェント機能、モデル管理、観測機能などが組み合わさっており、プロトタイプから生産まで迅速に進めることができます。以下の機能が含まれます:
DifyはオープンソースのLLMアプリケーション開発プラットフォームです。直感的なインターフェスには、AIワークフロー、RAGパイプライン、エージェント機能、モデル管理、観測機能などが組み合わさっており、プロトタイプから本番までの移行を迅速に行うことができます。以下は、主要機能のリストです:
</br> </br>
**1. ワークフロー**:
強力なAIワークフローをビジュアルキャンバス上で構築しテストできます。すべての機能、および以下の機能を使用できます。
ビジュアルキャンバス上で強力なAIワークフローを構築しテストし、以下の機能を活用してプロトタイプを超えることができます。
https://github.com/langgenius/dify/assets/13230914/356df23e-1604-483d-80a6-9517ece318aa
**2. 総合的なモデルサポート**:
数百のプロプライエタリ/オープンソースのLLMと、数十の推論プロバイダーおよびセルフホスティングソリューションとのシームレスな統合を提供します。GPT、Mistral、Llama3、OpenAI API互換性のあるすべてのモデルを統合されています。サポートされているモデルプロバイダーの完全なリストは[こちら](https://docs.dify.ai/getting-started/readme/model-providers)をご覧ください。
**2. 包括的なモデルサポート**:
数百のプロプライエタリ/オープンソースのLLMと、数十の推論プロバイダーおよびセルフホスティングソリューションとのシームレスな統合を提供します。GPT、Mistral、Llama3、およびOpenAI API互換のモデルをカバーします。サポートされているモデルプロバイダーの完全なリストは[こちら](https://docs.dify.ai/getting-started/readme/model-providers)をご覧ください。
![providers-v5](https://github.com/langgenius/dify/assets/13230914/5a17bdbe-097a-4100-8363-40255b70f6e3)
**3. プロンプトIDE**:
プロンプト作成、モデルパフォーマンス比較が行え、チャットベースのアプリに音声合成などの機能も追加できます
チャットベースのアプリにテキスト読み上げなどの追加機能を追加するプロンプト作成、モデルパフォーマンス比較する直感的なインターフェース
**4. RAGパイプライン**:
ドキュメントの取り込みから検索までをカバーする広範なRAG機能ができます。ほかにもPDF、PPT、その他の一般的なドキュメントフォーマットからのテキスト抽出のサーポイントも提供します。
文書の取り込みから取得までをカバーする幅広いRAG機能で、PDF、PPTなどの一般的なドキュメント形式からのテキスト抽出に対するアウトオブボックスのサポートを提供します。
**5. エージェント機能**:
LLM Function CallingやReActに基づエージェント定義が可能で、AIエージェント用のプリビルトまたはカスタムツールを追加できます。Difyには、Google検索、DELL·E、Stable Diffusion、WolframAlphaなどのAIエージェント用の50以上の組み込みツールが提供します。
LLM関数呼び出しまたはReActに基づいてエージェント定義し、エージェント向けの事前構築済みまたはカスタムツールを追加できます。Difyには、Google検索、DELL·E、Stable Diffusion、WolframAlphaなどのAIエージェント用の50以上の組み込みツールが用意されています。
**6. LLMOps**:
アプリケーションログパフォーマンスを監視と分析し、生産のデータと注釈に基づいて、プロンプト、データセット、モデルを継続的に改善できます。
アプリケーションログパフォーマンスを時間の経過とともにモニタリングおよび分析します。本番データと注釈に基づいて、プロンプト、データセット、およびモデルを継続的に改善できます。
**7. Backend-as-a-Service**:
すべての機能はAPIを提供されており、Difyを自分のビジネスロジックに簡単に統合できます。
Difyのすべての提供には、それに対応するAPIが付属しており、独自のビジネスロジックにDifyをシームレスに統合できます。
## 機能比較
@@ -96,9 +95,9 @@ DifyはオープンソースのLLMアプリケーション開発プラットフ
</tr>
<tr>
<td align="center">サポートされているLLM</td>
<td align="center">バラエティ豊か</td>
<td align="center">バラエティ豊か</td>
<td align="center">バラエティ豊か</td>
<td align="center">バリエーション豊富</td>
<td align="center">バリエーション豊富</td>
<td align="center">バリエーション豊富</td>
<td align="center">OpenAIのみ</td>
</tr>
<tr>
@@ -148,15 +147,15 @@ DifyはオープンソースのLLMアプリケーション開発プラットフ
## Difyの使用方法
- **クラウド </br>**
[こちら](https://dify.ai)のDify Cloudサービスを利用して、セットアップ不要で試すことができます。サンドボックスプランには、200回のGPT-4呼び出しが無料で含まれています。
[こちら](https://dify.ai)のDify Cloudサービスを利用して、セットアップ不要で試すことができます。サンドボックスプランには、200回の無料のGPT-4呼び出しが含まれています。
- **Dify Community Editionのセルフホスティング</br>**
この[スターガイド](#quick-start)を使用して、ローカル環境でDifyを簡単に実行できます。
詳しくは[ドキュメント](https://docs.dify.ai)をご覧ください。
この[スターターガイド](#quick-start)を使用して、ローカル環境でDifyを簡単に実行できます。
さらなる参考資料や詳細な手順については、[ドキュメント](https://docs.dify.ai)をご覧ください。
- **企業/組織向けのDify</br>**
企業中心の機能を提供しています。[こちらからミーティングを予約](https://cal.com/guchenhe/30min)したり、[メールを送信](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)して企業のニーズについて相談してください。 </br>
> AWSを使用しているスタートアップ企業や中小企業の場合は、[AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6)のDify Premiumをチェックして、ワンクリックで自のAWS VPCにデプロイできます。さらに、手頃な価格のAMIオファリングどして、ロゴブランディングをカスタマイズしてアプリケーションを作成するオプションがあります。
- **エンタープライズ/組織向けのDify</br>**
追加のエンタープライズ向け機能を提供しています。[こちらからミーティングを予約](https://cal.com/guchenhe/30min)したり、[メールを送信](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)してエンタープライズのニーズについて相談してください。 </br>
> AWSを使用しているスタートアップや中小企業の場合は、[AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6)のDify Premiumをチェックして、ワンクリックで自のAWS VPCにデプロイできます。カスタムロゴブランディングでアプリを作成するオプションを備えた手頃な価格のAMIオファリングです。
## 最新の情報を入手
@@ -179,7 +178,6 @@ Difyサーバーを起動する最も簡単な方法は、[docker-compose.yml](d
```bash
cd docker
cp .env.example .env
docker compose up -d
```
@@ -189,19 +187,12 @@ docker compose up -d
## 次のステップ
設定をカスタマイズする必要がある場合は、[.env.example](docker/.env.example) ファイルのコメントを参照し、`.env` ファイルの対応する値を更新してください。さらに、デプロイ環境や要件に応じて、`docker-compose.yaml` ファイル自体を調整する必要がある場合があります。たとえば、イメージのバージョン、ポートのマッピング、ボリュームのマウントなどを変更します。変更を加えた後は、`docker-compose up -d`実行してください。利用可能な環境変数の全一覧は、[こちら](https://docs.dify.ai/getting-started/install-self-hosted/environments)で確認できます
環境設定をカスタマイズする場合は、[docker-compose.yml](docker/docker-compose.yaml)ファイル内のコメントを参照して、環境設定を手動で設定してください。変更を加えた後は、再び `docker-compose up -d` を実行してください。環境変数の完全なリストは[こちら](https://docs.dify.ai/getting-started/install-self-hosted/environments)をご覧ください
高可用性設定を設定する必要がある場合、コミュニティ[Helm Charts](https://helm.sh/)とYAMLファイルにより、DifyをKubernetesにデプロイすることができます。
高可用性のセットアップを構成する場合、コミュニティによって提供されている[Helm Charts](https://helm.sh/)があり、これによりKubernetes上にDifyを展開できます。
- [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify)
- [Helm Chart by @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
- [YAML file by @Winson-030](https://github.com/Winson-030/dify-kubernetes)
#### Terraformを使用したデプロイ
##### Azure Global
[terraform](https://www.terraform.io/) を使用して、AzureにDifyをワンクリックでデプロイします。
- [nikawangのAzure Terraform](https://github.com/nikawang/dify-azure-terraform)
## 貢献
@@ -221,7 +212,8 @@ docker compose up -d
## コミュニティ & お問い合わせ
* [Github Discussion](https://github.com/langgenius/dify/discussions). 主に: フィードバックの共有や質問。
* [GitHub Issues](https://github.com/langgenius/dify/issues). 主に: Dify.AI使用する際に発生するエラーや問題については、[貢献ガイド](CONTRIBUTING_JA.md)を参照してください
* [GitHub Issues](https://github.com/langgenius/dify/issues). 主に: Dify.AI使用中に遭遇したバグや機能提案。
* [Email](mailto:support@dify.ai?subject=[GitHub]Questions%20About%20Dify). 主に: Dify.AIの使用に関する質問。
* [Discord](https://discord.gg/FngNHpbcY7). 主に: アプリケーションの共有やコミュニティとの交流。
* [Twitter](https://twitter.com/dify_ai). 主に: アプリケーションの共有やコミュニティとの交流。
@@ -239,7 +231,7 @@ docker compose up -d
<td>無料の30分間のミーティングをスケジュール</td>
</tr>
<tr>
<td><a href='https://github.com/langgenius/dify/issues'>技術サポート</a></td>
<td><a href='mailto:support@dify.ai?subject=[GitHub]Technical%20Support'>技術サポート</a></td>
<td>技術的な問題やサポートに関する質問</td>
</tr>
<tr>

View File

@@ -36,7 +36,6 @@
<a href="./README_KL.md"><img alt="Commits last month" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_FR.md"><img alt="Commits last month" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="Commits last month" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
</p>
#
@@ -180,7 +179,6 @@ The easiest way to start the Dify server is to run our [docker-compose.yml](dock
```bash
cd docker
cp .env.example .env
docker compose up -d
```
@@ -190,19 +188,12 @@ After running, you can access the Dify dashboard in your browser at [http://loca
## Next steps
If you need to customize the configuration, please refer to the comments in our [.env.example](docker/.env.example) file and update the corresponding values in your `.env` file. Additionally, you might need to make adjustments to the `docker-compose.yaml` file itself, such as changing image versions, port mappings, or volume mounts, based on your specific deployment environment and requirements. After making any changes, please re-run `docker-compose up -d`. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments).
If you need to customize the configuration, please refer to the comments in our [docker-compose.yml](docker/docker-compose.yaml) file and manually set the environment configuration. After making the changes, please run `docker-compose up -d` again. You can see the full list of environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments).
If you'd like to configure a highly-available setup, there are community-contributed [Helm Charts](https://helm.sh/) and YAML files which allow Dify to be deployed on Kubernetes.
If you'd like to configure a highly-available setup, there are community-contributed [Helm Charts](https://helm.sh/) which allow Dify to be deployed on Kubernetes.
- [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify)
- [Helm Chart by @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
- [YAML file by @Winson-030](https://github.com/Winson-030/dify-kubernetes)
#### Terraform atorlugu pilersitsineq
##### Azure Global
Atoruk [terraform](https://www.terraform.io/) Dify-mik Azure-mut ataatsikkut ikkussuilluarlugu.
- [Azure Terraform atorlugu @nikawang](https://github.com/nikawang/dify-azure-terraform)
## Contributing
@@ -225,6 +216,7 @@ At the same time, please consider supporting Dify by sharing it on social media
). Best for: sharing feedback and asking questions.
* [GitHub Issues](https://github.com/langgenius/dify/issues). Best for: bugs you encounter using Dify.AI, and feature proposals. See our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
* [Email](mailto:support@dify.ai?subject=[GitHub]Questions%20About%20Dify). Best for: questions you have about using Dify.AI.
* [Discord](https://discord.gg/FngNHpbcY7). Best for: sharing your applications and hanging out with the community.
* [Twitter](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community.

View File

@@ -36,7 +36,6 @@
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="한국어 README" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
</p>
@@ -173,7 +172,6 @@ Dify 서버를 시작하는 가장 쉬운 방법은 [docker-compose.yml](docker/
```bash
cd docker
cp .env.example .env
docker compose up -d
```
@@ -183,19 +181,14 @@ docker compose up -d
## 다음 단계
구성을 사용자 정의해야 하는 경우 [.env.example](docker/.env.example) 파일의 주석을 참조하고 `.env` 파일에서 해당 값을 업데이트하십시오. 또한 특정 배포 환경 및 요구 사항에 따라 `docker-compose.yaml` 파일 자체를 조정해야 할 수도 있습니다. 예를 들어 이미지 버전, 포트 매핑 또는 볼륨 마운트를 변경합니다. 변경`docker-compose up -d`를 다시 실행하십시오. 사용 가능한 환경 변수의 전체 목록은 [여기](https://docs.dify.ai/getting-started/install-self-hosted/environments)에서 찾을 수 있습니다.
구성 커스터마이징이 필요한 경우, [docker-compose.yml](docker/docker-compose.yaml) 파일의 코멘트를 참조하여 환경 구성을 수동으로 설정하십시오. 변경 후 `docker-compose up -d` 를 다시 실행하십시오. 환경 변수의 전체 목록은 [여기](https://docs.dify.ai/getting-started/install-self-hosted/environments)에서 확인할 수 있습니다.
Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했다는 커뮤니티가 제공하는 [Helm Charts](https://helm.sh/)와 YAML 파일이 존재합니다.
고가용성 설정을 구성하려면 Dify를 Kubernetes에 배포할 수 있는 커뮤니티 제공 [Helm Charts](https://helm.sh/)가 있습니다.
- [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify)
- [Helm Chart by @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
- [YAML file by @Winson-030](https://github.com/Winson-030/dify-kubernetes)
#### Terraform을 사용한 배포
##### Azure Global
[terraform](https://www.terraform.io/)을 사용하여 Azure에 Dify를 원클릭으로 배포하세요.
- [nikawang의 Azure Terraform](https://github.com/nikawang/dify-azure-terraform)
## 기여
@@ -215,6 +208,7 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했
* [Github 토론](https://github.com/langgenius/dify/discussions). 피드백 공유 및 질문하기에 적합합니다.
* [GitHub 이슈](https://github.com/langgenius/dify/issues). Dify.AI 사용 중 발견한 버그와 기능 제안에 적합합니다. [기여 가이드](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)를 참조하세요.
* [이메일](mailto:support@dify.ai?subject=[GitHub]Questions%20About%20Dify). Dify.AI 사용에 대한 질문하기에 적합합니다.
* [디스코드](https://discord.gg/FngNHpbcY7). 애플리케이션 공유 및 커뮤니티와 소통하기에 적합합니다.
* [트위터](https://twitter.com/dify_ai). 애플리케이션 공유 및 커뮤니티와 소통하기에 적합합니다.

View File

@@ -1,253 +0,0 @@
![cover-v5-optimized](https://github.com/langgenius/dify/assets/13230914/f9e19af5-61ba-4119-b926-d10c4c06ebab)
<p align="center">
<a href="https://cloud.dify.ai">Dify Bulut</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">Kendi Sunucunuzda Barındırma</a> ·
<a href="https://docs.dify.ai">Dokümantasyon</a> ·
<a href="https://cal.com/guchenhe/60-min-meeting">Kurumsal Sorgu</a>
</p>
<p align="center">
<a href="https://dify.ai" target="_blank">
<img alt="Statik Rozet" src="https://img.shields.io/badge/Ürün-F04438"></a>
<a href="https://dify.ai/pricing" target="_blank">
<img alt="Statik Rozet" src="https://img.shields.io/badge/ücretsiz-fiyatlandırma?logo=free&color=%20%23155EEF&label=fiyatlandirma&labelColor=%20%23528bff"></a>
<a href="https://discord.gg/FngNHpbcY7" target="_blank">
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
alt="Discord'da sohbet et"></a>
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
alt="Twitter'da takip et"></a>
<a href="https://hub.docker.com/u/langgenius" target="_blank">
<img alt="Docker Çekmeleri" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
<img alt="Geçen ay yapılan commitler" src="https://img.shields.io/github/commit-activity/m/langgenius/dify?labelColor=%20%2332b583&color=%20%2312b76a"></a>
<a href="https://github.com/langgenius/dify/" target="_blank">
<img alt="Kapatılan sorunlar" src="https://img.shields.io/github/issues-search?query=repo%3Alanggenius%2Fdify%20is%3Aclosed&label=kapatilan%20sorunlar&labelColor=%20%237d89b0&color=%20%235d6b98"></a>
<a href="https://github.com/langgenius/dify/discussions/" target="_blank">
<img alt="Tartışma gönderileri" src="https://img.shields.io/github/discussions/langgenius/dify?labelColor=%20%239b8afb&color=%20%237a5af8"></a>
</p>
<p align="center">
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
</p>
Dify, açık kaynaklı bir LLM uygulama geliştirme platformudur. Sezgisel arayüzü, AI iş akışı, RAG pipeline'ı, ajan yetenekleri, model yönetimi, gözlemlenebilirlik özellikleri ve daha fazlasını birleştirerek, prototipten üretime hızlıca geçmenizi sağlar. İşte temel özelliklerin bir listesi:
</br> </br>
**1. Workflow**:
Görsel bir arayüz üzerinde güçlü AI iş akışları oluşturun ve test edin, aşağıdaki tüm özellikleri ve daha fazlasını kullanarak.
https://github.com/langgenius/dify/assets/13230914/356df23e-1604-483d-80a6-9517ece318aa
**2. Kapsamlı model desteği**:
Çok sayıda çıkarım sağlayıcısı ve kendi kendine barındırılan çözümlerden yüzlerce özel / açık kaynaklı LLM ile sorunsuz entegrasyon sağlar. GPT, Mistral, Llama3 ve OpenAI API uyumlu tüm modelleri kapsar. Desteklenen model sağlayıcılarının tam listesine [buradan](https://docs.dify.ai/getting-started/readme/model-providers) ulaşabilirsiniz.
![providers-v5](https://github.com/langgenius/dify/assets/13230914/5a17bdbe-097a-4100-8363-40255b70f6e3)
Özür dilerim, haklısınız. Daha anlamlı ve akıcı bir çeviri yapmaya çalışayım. İşte güncellenmiş çeviri:
**3. Prompt IDE**:
Komut istemlerini oluşturmak, model performansını karşılaştırmak ve sohbet tabanlı uygulamalara metin-konuşma gibi ek özellikler eklemek için kullanıcı dostu bir arayüz.
**4. RAG Pipeline**:
Belge alımından bilgi çekmeye kadar geniş kapsamlı RAG yetenekleri. PDF'ler, PPT'ler ve diğer yaygın belge formatlarından metin çıkarma için hazır destek sunar.
**5. Ajan yetenekleri**:
LLM Fonksiyon Çağırma veya ReAct'a dayalı ajanlar tanımlayabilir ve bu ajanlara önceden hazırlanmış veya özel araçlar ekleyebilirsiniz. Dify, AI ajanları için Google Arama, DALL·E, Stable Diffusion ve WolframAlpha gibi 50'den fazla yerleşik araç sağlar.
**6. LLMOps**:
Uygulama loglarını ve performans metriklerini zaman içinde izleme ve analiz etme imkanı. Üretim ortamından elde edilen verilere ve kullanıcı geri bildirimlerine dayanarak, prompt'ları, veri setlerini ve modelleri sürekli olarak optimize edebilirsiniz. Bu sayede, AI uygulamanızın performansını ve doğruluğunu sürekli olarak artırabilirsiniz.
**7. Hizmet Olarak Backend**:
Dify'ın tüm özellikleri ilgili API'lerle birlikte gelir, böylece Dify'ı kendi iş mantığınıza kolayca entegre edebilirsiniz.
## Özellik karşılaştırması
<table style="width: 100%;">
<tr>
<th align="center">Özellik</th>
<th align="center">Dify.AI</th>
<th align="center">LangChain</th>
<th align="center">Flowise</th>
<th align="center">OpenAI Assistants API</th>
</tr>
<tr>
<td align="center">Programlama Yaklaşımı</td>
<td align="center">API + Uygulama odaklı</td>
<td align="center">Python Kodu</td>
<td align="center">Uygulama odaklı</td>
<td align="center">API odaklı</td>
</tr>
<tr>
<td align="center">Desteklenen LLM'ler</td>
<td align="center">Zengin Çeşitlilik</td>
<td align="center">Zengin Çeşitlilik</td>
<td align="center">Zengin Çeşitlilik</td>
<td align="center">Yalnızca OpenAI</td>
</tr>
<tr>
<td align="center">RAG Motoru</td>
<td align="center">✅</td>
<td align="center">✅</td>
<td align="center">✅</td>
<td align="center">✅</td>
</tr>
<tr>
<td align="center">Ajan</td>
<td align="center">✅</td>
<td align="center">✅</td>
<td align="center">❌</td>
<td align="center">✅</td>
</tr>
<tr>
<td align="center">İş Akışı</td>
<td align="center">✅</td>
<td align="center">❌</td>
<td align="center">✅</td>
<td align="center">❌</td>
</tr>
<tr>
<td align="center">Gözlemlenebilirlik</td>
<td align="center">✅</td>
<td align="center">✅</td>
<td align="center">❌</td>
<td align="center">❌</td>
</tr>
<tr>
<td align="center">Kurumsal Özellikler (SSO/Erişim kontrolü)</td>
<td align="center">✅</td>
<td align="center">❌</td>
<td align="center">❌</td>
<td align="center">❌</td>
</tr>
<tr>
<td align="center">Yerel Dağıtım</td>
<td align="center">✅</td>
<td align="center">✅</td>
<td align="center">✅</td>
<td align="center">❌</td>
</tr>
</table>
## Dify'ı Kullanma
- **Cloud </br>**
İşte verdiğiniz metnin Türkçe çevirisi, kod bloğu içinde:
-
Herkesin sıfır kurulumla denemesi için bir [Dify Cloud](https://dify.ai) hizmeti sunuyoruz. Bu hizmet, kendi kendine dağıtılan versiyonun tüm yeteneklerini sağlar ve sandbox planında 200 ücretsiz GPT-4 çağrısı içerir.
- **Dify Topluluk Sürümünü Kendi Sunucunuzda Barındırma</br>**
Bu [başlangıç kılavuzu](#quick-start) ile Dify'ı kendi ortamınızda hızlıca çalıştırın.
Daha fazla referans ve detaylı talimatlar için [dokümantasyonumuzu](https://docs.dify.ai) kullanın.
- **Kurumlar / organizasyonlar için Dify</br>**
Ek kurumsal odaklı özellikler sunuyoruz. Kurumsal ihtiyaçları görüşmek için [bizimle bir toplantı planlayın](https://cal.com/guchenhe/30min) veya [bize bir e-posta gönderin](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry). </br>
> AWS kullanan startuplar ve küçük işletmeler için, [AWS Marketplace'deki Dify Premium'a](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) göz atın ve tek tıklamayla kendi AWS VPC'nize dağıtın. Bu, özel logo ve marka ile uygulamalar oluşturma seçeneğine sahip uygun fiyatlı bir AMI teklifdir.
## Güncel Kalma
GitHub'da Dify'a yıldız verin ve yeni sürümlerden anında haberdar olun.
![bizi-yıldızlayın](https://github.com/langgenius/dify/assets/13230914/b823edc1-6388-4e25-ad45-2f6b187adbb4)
## Hızlı başlangıç
> Dify'ı kurmadan önce, makinenizin aşağıdaki minimum sistem gereksinimlerini karşıladığından emin olun:
>
>- CPU >= 2 Çekirdek
>- RAM >= 4GB
</br>
İşte verdiğiniz metnin Türkçe çevirisi, kod bloğu içinde:
Dify sunucusunu başlatmanın en kolay yolu, [docker-compose.yml](docker/docker-compose.yaml) dosyamızı çalıştırmaktır. Kurulum komutunu çalıştırmadan önce, makinenizde [Docker](https://docs.docker.com/get-docker/) ve [Docker Compose](https://docs.docker.com/compose/install/)'un kurulu olduğundan emin olun:
```bash
cd docker
cp .env.example .env
docker compose up -d
```
Çalıştırdıktan sonra, tarayıcınızda [http://localhost/install](http://localhost/install) adresinden Dify kontrol paneline erişebilir ve başlangıç ayarları sürecini başlatabilirsiniz.
> Eğer Dify'a katkıda bulunmak veya ek geliştirmeler yapmak isterseniz, [kaynak koddan dağıtım kılavuzumuza](https://docs.dify.ai/getting-started/install-self-hosted/local-source-code) başvurun.
## Sonraki adımlar
Yapılandırmayı özelleştirmeniz gerekiyorsa, lütfen [.env.example](docker/.env.example) dosyamızdaki yorumlara bakın ve `.env` dosyanızdaki ilgili değerleri güncelleyin. Ayrıca, spesifik dağıtım ortamınıza ve gereksinimlerinize bağlı olarak `docker-compose.yaml` dosyasının kendisinde de, imaj sürümlerini, port eşlemelerini veya hacim bağlantılarını değiştirmek gibi ayarlamalar yapmanız gerekebilir. Herhangi bir değişiklik yaptıktan sonra, lütfen `docker-compose up -d` komutunu tekrar çalıştırın. Kullanılabilir tüm ortam değişkenlerinin tam listesini [burada](https://docs.dify.ai/getting-started/install-self-hosted/environments) bulabilirsiniz.
Yüksek kullanılabilirliğe sahip bir kurulum yapılandırmak isterseniz, Dify'ın Kubernetes üzerine dağıtılmasına olanak tanıyan topluluk katkılı [Helm Charts](https://helm.sh/) ve YAML dosyaları mevcuttur.
- [@LeoQuote tarafından Helm Chart](https://github.com/douban/charts/tree/master/charts/dify)
- [@BorisPolonsky tarafından Helm Chart](https://github.com/BorisPolonsky/dify-helm)
- [@Winson-030 tarafından YAML dosyası](https://github.com/Winson-030/dify-kubernetes)
#### Dağıtım için Terraform Kullanımı
##### Azure Global
[Terraform](https://www.terraform.io/) kullanarak Dify'ı Azure'a tek tıklamayla dağıtın.
- [@nikawang tarafından Azure Terraform](https://github.com/nikawang/dify-azure-terraform)
## Katkıda Bulunma
Kod katkısında bulunmak isteyenler için [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) bakabilirsiniz.
Aynı zamanda, lütfen Dify'ı sosyal medyada, etkinliklerde ve konferanslarda paylaşarak desteklemeyi düşünün.
> Dify'ı Mandarin veya İngilizce dışındaki dillere çevirmemize yardımcı olacak katkıda bulunanlara ihtiyacımız var. Yardımcı olmakla ilgileniyorsanız, lütfen daha fazla bilgi için [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) dosyasına bakın ve [Discord Topluluk Sunucumuzdaki](https://discord.gg/8Tpq4AcN9c) `global-users` kanalında bize bir yorum bırakın.
**Katkıda Bulunanlar**
<a href="https://github.com/langgenius/dify/graphs/contributors">
<img src="https://contrib.rocks/image?repo=langgenius/dify" />
</a>
## Topluluk & iletişim
* [Github Tartışmaları](https://github.com/langgenius/dify/discussions). En uygun: geri bildirim paylaşmak ve soru sormak için.
* [GitHub Sorunları](https://github.com/langgenius/dify/issues). En uygun: Dify.AI kullanırken karşılaştığınız hatalar ve özellik önerileri için. [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) bakın.
* [Discord](https://discord.gg/FngNHpbcY7). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için.
* [Twitter](https://twitter.com/dify_ai). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için.
Veya doğrudan bir ekip üyesiyle toplantı planlayın:
<table>
<tr>
<th>İletişim Noktası</th>
<th>Amaç</th>
</tr>
<tr>
<td><a href='https://cal.com/guchenhe/15min' target='_blank'><img class="schedule-button" src='https://github.com/langgenius/dify/assets/13230914/9ebcd111-1205-4d71-83d5-948d70b809f5' alt='Git-Hub-README-Button-3x' style="width: 180px; height: auto; object-fit: contain;"/></a></td>
<td>İş sorgulamaları & ürün geri bildirimleri</td>
</tr>
<tr>
<td><a href='https://cal.com/pinkbanana' target='_blank'><img class="schedule-button" src='https://github.com/langgenius/dify/assets/13230914/d1edd00a-d7e4-4513-be6c-e57038e143fd' alt='Git-Hub-README-Button-2x' style="width: 180px; height: auto; object-fit: contain;"/></a></td>
<td>Katkılar, sorunlar & özellik istekleri</td>
</tr>
</table>
## Star history
[![Star History Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)
## Güvenlik açıklaması
Gizliliğinizi korumak için, lütfen güvenlik sorunlarını GitHub'da paylaşmaktan kaçının. Bunun yerine, sorularınızı security@dify.ai adresine gönderin ve size daha detaylı bir cevap vereceğiz.
## Lisans
Bu depo, temel olarak Apache 2.0 lisansı ve birkaç ek kısıtlama içeren [Dify Açık Kaynak Lisansı](LICENSE) altında kullanıma sunulmuştur.

View File

@@ -8,7 +8,4 @@ logs
*.log*
# jetbrains
.idea
# venv
.venv
.idea

View File

@@ -39,10 +39,9 @@ DB_DATABASE=dify
# Storage configuration
# use for store upload files, private keys...
# storage type: local, s3, azure-blob, google-storage
# storage type: local, s3, azure-blob
STORAGE_TYPE=local
STORAGE_LOCAL_PATH=storage
S3_USE_AWS_MANAGED_IAM=false
S3_ENDPOINT=https://your-bucket-name.storage.s3.clooudflare.com
S3_BUCKET_NAME=your-bucket-name
S3_ACCESS_KEY=your-access-key
@@ -63,27 +62,13 @@ ALIYUN_OSS_REGION=your-region
# Google Storage configuration
GOOGLE_STORAGE_BUCKET_NAME=yout-bucket-name
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64=your-google-service-account-json-base64-string
# Tencent COS Storage configuration
TENCENT_COS_BUCKET_NAME=your-bucket-name
TENCENT_COS_SECRET_KEY=your-secret-key
TENCENT_COS_SECRET_ID=your-secret-id
TENCENT_COS_REGION=your-region
TENCENT_COS_SCHEME=your-scheme
# OCI Storage configuration
OCI_ENDPOINT=your-endpoint
OCI_BUCKET_NAME=your-bucket-name
OCI_ACCESS_KEY=your-access-key
OCI_SECRET_KEY=your-secret-key
OCI_REGION=your-region
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON=your-google-service-account-json-base64-string
# CORS configuration
WEB_API_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
CONSOLE_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
# Vector database configuration, support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector
# Vector database configuration, support: weaviate, qdrant, milvus, relyt, pgvecto_rs, pgvector
VECTOR_STORE=weaviate
# Weaviate configuration
@@ -106,14 +91,6 @@ MILVUS_USER=root
MILVUS_PASSWORD=Milvus
MILVUS_SECURE=false
# MyScale configuration
MYSCALE_HOST=127.0.0.1
MYSCALE_PORT=8123
MYSCALE_USER=default
MYSCALE_PASSWORD=
MYSCALE_DATABASE=default
MYSCALE_FTS_PARAMS=
# Relyt configuration
RELYT_HOST=127.0.0.1
RELYT_PORT=5432
@@ -121,15 +98,6 @@ RELYT_USER=postgres
RELYT_PASSWORD=postgres
RELYT_DATABASE=postgres
# Tencent configuration
TENCENT_VECTOR_DB_URL=http://127.0.0.1
TENCENT_VECTOR_DB_API_KEY=dify
TENCENT_VECTOR_DB_TIMEOUT=30
TENCENT_VECTOR_DB_USERNAME=dify
TENCENT_VECTOR_DB_DATABASE=dify
TENCENT_VECTOR_DB_SHARD=1
TENCENT_VECTOR_DB_REPLICAS=2
# PGVECTO_RS configuration
PGVECTO_RS_HOST=localhost
PGVECTO_RS_PORT=5431
@@ -159,23 +127,6 @@ CHROMA_DATABASE=default_database
CHROMA_AUTH_PROVIDER=chromadb.auth.token_authn.TokenAuthenticationServerProvider
CHROMA_AUTH_CREDENTIALS=difyai123456
# AnalyticDB configuration
ANALYTICDB_KEY_ID=your-ak
ANALYTICDB_KEY_SECRET=your-sk
ANALYTICDB_REGION_ID=cn-hangzhou
ANALYTICDB_INSTANCE_ID=gp-ab123456
ANALYTICDB_ACCOUNT=testaccount
ANALYTICDB_PASSWORD=testpassword
ANALYTICDB_NAMESPACE=dify
ANALYTICDB_NAMESPACE_PASSWORD=difypassword
# OpenSearch configuration
OPENSEARCH_HOST=127.0.0.1
OPENSEARCH_PORT=9200
OPENSEARCH_USER=admin
OPENSEARCH_PASSWORD=admin
OPENSEARCH_SECURE=true
# Upload configuration
UPLOAD_FILE_SIZE_LIMIT=15
UPLOAD_FILE_BATCH_LIMIT=5
@@ -183,7 +134,6 @@ UPLOAD_IMAGE_FILE_SIZE_LIMIT=10
# Model Configuration
MULTIMODAL_SEND_IMAGE_FORMAT=base64
PROMPT_GENERATION_MAX_TOKENS=512
# Mail configuration, support: resend, smtp
MAIL_TYPE=
@@ -217,7 +167,6 @@ UNSTRUCTURED_API_KEY=
SSRF_PROXY_HTTP_URL=
SSRF_PROXY_HTTPS_URL=
SSRF_DEFAULT_MAX_RETRIES=3
BATCH_UPLOAD_LIMIT=10
KEYWORD_DATA_SOURCE_TYPE=database
@@ -254,11 +203,3 @@ INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=1000
WORKFLOW_MAX_EXECUTION_STEPS=500
WORKFLOW_MAX_EXECUTION_TIME=1200
WORKFLOW_CALL_MAX_DEPTH=5
# App configuration
APP_MAX_EXECUTION_TIME=1200
APP_MAX_ACTIVE_REQUESTS=0
# Celery beat configuration
CELERY_BEAT_SCHEDULER_TIME=1

View File

@@ -1,54 +1,44 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Python: Flask",
"type": "debugpy",
"request": "launch",
"python": "${workspaceFolder}/api/.venv/bin/python",
"cwd": "${workspaceFolder}/api",
"envFile": ".env",
"module": "flask",
"justMyCode": true,
"jinja": true,
"env": {
"FLASK_APP": "app.py",
"GEVENT_SUPPORT": "True"
},
"args": [
"run",
"--host=0.0.0.0",
"--port=5001",
]
},
{
"name": "Python: Celery",
"type": "debugpy",
"request": "launch",
"python": "${workspaceFolder}/api/.venv/bin/python",
"cwd": "${workspaceFolder}/api",
"module": "celery",
"justMyCode": true,
"envFile": ".env",
"args": ["-A", "app.celery", "worker", "-P", "gevent", "-c", "1", "--loglevel", "info", "-Q", "dataset,generation,mail"],
"envFile": "${workspaceFolder}/.env",
"env": {
"FLASK_APP": "app.py",
"FLASK_DEBUG": "1",
"GEVENT_SUPPORT": "True"
},
"console": "integratedTerminal",
"python": "${command:python.interpreterPath}"
},
{
"name": "Python: Flask",
"type": "debugpy",
"request": "launch",
"module": "flask",
"env": {
"FLASK_APP": "app.py",
"FLASK_DEBUG": "1",
"GEVENT_SUPPORT": "True"
},
"args": [
"-A",
"app.celery",
"worker",
"-P",
"gevent",
"-c",
"1",
"--loglevel",
"info",
"-Q",
"dataset,generation,mail,ops_trace,app_deletion"
]
},
"run",
"--host=0.0.0.0",
"--port=5001",
"--debug"
],
"jinja": true,
"justMyCode": true,
"python": "${command:python.interpreterPath}"
}
]
}

View File

@@ -1,42 +1,34 @@
# base image
FROM python:3.10-slim-bookworm AS base
WORKDIR /app/api
LABEL maintainer="takatost@gmail.com"
# Install Poetry
ENV POETRY_VERSION=1.8.3
RUN pip install --no-cache-dir poetry==${POETRY_VERSION}
# Configure Poetry
ENV POETRY_CACHE_DIR=/tmp/poetry_cache
ENV POETRY_NO_INTERACTION=1
ENV POETRY_VIRTUALENVS_IN_PROJECT=true
ENV POETRY_VIRTUALENVS_CREATE=true
FROM base AS packages
# install packages
FROM base as packages
RUN apt-get update \
&& apt-get install -y --no-install-recommends gcc g++ libc-dev libffi-dev libgmp-dev libmpfr-dev libmpc-dev
# Install Python dependencies
COPY pyproject.toml poetry.lock ./
RUN poetry install --sync --no-cache --no-root
COPY requirements.txt /requirements.txt
RUN --mount=type=cache,target=/root/.cache/pip \
pip install --prefix=/pkg -r requirements.txt
# production stage
FROM base AS production
ENV FLASK_APP=app.py
ENV EDITION=SELF_HOSTED
ENV DEPLOY_ENV=PRODUCTION
ENV CONSOLE_API_URL=http://127.0.0.1:5001
ENV CONSOLE_WEB_URL=http://127.0.0.1:3000
ENV SERVICE_API_URL=http://127.0.0.1:5001
ENV APP_WEB_URL=http://127.0.0.1:3000
ENV FLASK_APP app.py
ENV EDITION SELF_HOSTED
ENV DEPLOY_ENV PRODUCTION
ENV CONSOLE_API_URL http://127.0.0.1:5001
ENV CONSOLE_WEB_URL http://127.0.0.1:3000
ENV SERVICE_API_URL http://127.0.0.1:5001
ENV APP_WEB_URL http://127.0.0.1:3000
EXPOSE 5001
# set timezone
ENV TZ=UTC
ENV TZ UTC
WORKDIR /app/api
@@ -45,20 +37,13 @@ RUN apt-get update \
&& apt-get autoremove \
&& rm -rf /var/lib/apt/lists/*
# Copy Python environment and packages
ENV VIRTUAL_ENV=/app/api/.venv
COPY --from=packages ${VIRTUAL_ENV} ${VIRTUAL_ENV}
ENV PATH="${VIRTUAL_ENV}/bin:${PATH}"
# Copy source code
COPY --from=packages /pkg /usr/local
COPY . /app/api/
# Copy entrypoint
COPY docker/entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
ARG COMMIT_SHA
ENV COMMIT_SHA=${COMMIT_SHA}
ENV COMMIT_SHA ${COMMIT_SHA}
ENTRYPOINT ["/bin/bash", "/entrypoint.sh"]
ENTRYPOINT ["/bin/bash", "/entrypoint.sh"]

View File

@@ -2,88 +2,84 @@
## Usage
> [!IMPORTANT]
> In the v0.6.12 release, we deprecated `pip` as the package management tool for Dify API Backend service and replaced it with `poetry`.
1. Start the docker-compose stack
The backend require some middleware, including PostgreSQL, Redis, and Weaviate, which can be started together using `docker-compose`.
```bash
cd ../docker
cp middleware.env.example middleware.env
# change the profile to other vector database if you are not using weaviate
docker compose -f docker-compose.middleware.yaml --profile weaviate -p dify up -d
docker-compose -f docker-compose.middleware.yaml -p dify up -d
cd ../api
```
2. Copy `.env.example` to `.env`
3. Generate a `SECRET_KEY` in the `.env` file.
```bash for Linux
```bash
sed -i "/^SECRET_KEY=/c\SECRET_KEY=$(openssl rand -base64 42)" .env
```
```bash for Mac
secret_key=$(openssl rand -base64 42)
sed -i '' "/^SECRET_KEY=/c\\
SECRET_KEY=${secret_key}" .env
```
4. Create environment.
Dify API service uses [Poetry](https://python-poetry.org/docs/) to manage dependencies. You can execute `poetry shell` to activate the environment.
5. Install dependencies
- Anaconda
If you use Anaconda, create a new environment and activate it
```bash
conda create --name dify python=3.10
conda activate dify
```
- Poetry
If you use Poetry, you don't need to manually create the environment. You can execute `poetry shell` to activate the environment.
5. Install dependencies
- Anaconda
```bash
pip install -r requirements.txt
```
- Poetry
```bash
poetry env use 3.10
poetry install
```
In case of contributors missing to update dependencies for `pyproject.toml`, you can perform the following shell instead.
```bash
```base
poetry shell # activate current environment
poetry add $(cat requirements.txt) # install dependencies of production and update pyproject.toml
poetry add $(cat requirements-dev.txt) --group dev # install dependencies of development and update pyproject.toml
```
6. Run migrate
Before the first launch, migrate the database to the latest version.
```bash
poetry run python -m flask db upgrade
flask db upgrade
```
7. Start backend
⚠️ If you encounter problems with jieba, for example
```
> flask db upgrade
Error: While importing 'app', an ImportError was raised:
```
Please run the following command instead.
```
pip install -r requirements.txt --upgrade --force-reinstall
```
7. Start backend:
```bash
poetry run python -m flask run --host 0.0.0.0 --port=5001 --debug
flask run --host 0.0.0.0 --port=5001 --debug
```
8. Setup your application by visiting http://localhost:5001/console/api/setup or other apis...
9. If you need to debug local async processing, please start the worker service by running
`celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail`.
The started celery app handles the async tasks, e.g. dataset importing and documents indexing.
8. Start Dify [web](../web) service.
9. Setup your application by visiting `http://localhost:3000`...
10. If you need to debug local async processing, please start the worker service.
```bash
poetry run python -m celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion
```
The started celery app handles the async tasks, e.g. dataset importing and documents indexing.
## Testing
1. Install dependencies for both the backend and the test environment
```bash
poetry install --with dev
```
pip install -r requirements.txt -r requirements-dev.txt
```
2. Run the tests locally with mocked system environment variables in `tool.pytest_env` section in `pyproject.toml`
```bash
cd ../
poetry run -C api bash dev/pytest/pytest_all_tests.sh
dev/pytest/pytest_all_tests.sh
```

View File

@@ -1,6 +1,6 @@
import os
if os.environ.get("DEBUG", "false").lower() != 'true':
if not os.environ.get("DEBUG") or os.environ.get("DEBUG").lower() != 'true':
from gevent import monkey
monkey.patch_all()
@@ -21,9 +21,8 @@ from flask import Flask, Response, request
from flask_cors import CORS
from werkzeug.exceptions import Unauthorized
import contexts
from commands import register_commands
from configs import dify_config
from config import Config
# DO NOT REMOVE BELOW
from events import event_handlers
@@ -43,8 +42,6 @@ from extensions import (
from extensions.ext_database import db
from extensions.ext_login import login_manager
from libs.passport import PassportService
# TODO: Find a way to avoid importing models here
from models import account, dataset, model, source, task, tool, tools, web
from services.account_service import AccountService
@@ -77,28 +74,10 @@ config_type = os.getenv('EDITION', default='SELF_HOSTED') # ce edition first
# Application Factory Function
# ----------------------------
def create_flask_app_with_configs() -> Flask:
"""
create a raw flask app
with configs loaded from .env file
"""
dify_app = DifyApp(__name__)
dify_app.config.from_mapping(dify_config.model_dump())
# populate configs into system environment variables
for key, value in dify_app.config.items():
if isinstance(value, str):
os.environ[key] = value
elif isinstance(value, int | float | bool):
os.environ[key] = str(value)
elif value is None:
os.environ[key] = ''
return dify_app
def create_app() -> Flask:
app = create_flask_app_with_configs()
app = DifyApp(__name__)
app.config.from_object(Config())
app.secret_key = app.config['SECRET_KEY']
@@ -120,21 +99,9 @@ def create_app() -> Flask:
level=app.config.get('LOG_LEVEL'),
format=app.config.get('LOG_FORMAT'),
datefmt=app.config.get('LOG_DATEFORMAT'),
handlers=log_handlers,
force=True
handlers=log_handlers
)
log_tz = app.config.get('LOG_TZ')
if log_tz:
from datetime import datetime
import pytz
timezone = pytz.timezone(log_tz)
def time_converter(seconds):
return datetime.utcfromtimestamp(seconds).astimezone(timezone).timetuple()
for handler in logging.root.handlers:
handler.formatter.converter = time_converter
initialize_extensions(app)
register_blueprints(app)
register_commands(app)
@@ -162,29 +129,27 @@ def initialize_extensions(app):
@login_manager.request_loader
def load_user_from_request(request_from_flask_login):
"""Load user based on the request."""
if request.blueprint not in ['console', 'inner_api']:
return None
# Check if the user_id contains a dot, indicating the old format
auth_header = request.headers.get('Authorization', '')
if not auth_header:
auth_token = request.args.get('_token')
if not auth_token:
raise Unauthorized('Invalid Authorization token.')
if request.blueprint in ['console', 'inner_api']:
# Check if the user_id contains a dot, indicating the old format
auth_header = request.headers.get('Authorization', '')
if not auth_header:
auth_token = request.args.get('_token')
if not auth_token:
raise Unauthorized('Invalid Authorization token.')
else:
if ' ' not in auth_header:
raise Unauthorized('Invalid Authorization header format. Expected \'Bearer <api-key>\' format.')
auth_scheme, auth_token = auth_header.split(None, 1)
auth_scheme = auth_scheme.lower()
if auth_scheme != 'bearer':
raise Unauthorized('Invalid Authorization header format. Expected \'Bearer <api-key>\' format.')
decoded = PassportService().verify(auth_token)
user_id = decoded.get('user_id')
return AccountService.load_user(user_id)
else:
if ' ' not in auth_header:
raise Unauthorized('Invalid Authorization header format. Expected \'Bearer <api-key>\' format.')
auth_scheme, auth_token = auth_header.split(None, 1)
auth_scheme = auth_scheme.lower()
if auth_scheme != 'bearer':
raise Unauthorized('Invalid Authorization header format. Expected \'Bearer <api-key>\' format.')
decoded = PassportService().verify(auth_token)
user_id = decoded.get('user_id')
account = AccountService.load_logged_in_account(account_id=user_id, token=auth_token)
if account:
contexts.tenant_id.set(account.current_tenant_id)
return account
return None
@login_manager.unauthorized_handler
@@ -245,7 +210,7 @@ def register_blueprints(app):
app = create_app()
celery = app.extensions["celery"]
if app.config.get('TESTING'):
if app.config['TESTING']:
print("App is running in TESTING mode")
@@ -261,7 +226,6 @@ def after_request(response):
@app.route('/health')
def health():
return Response(json.dumps({
'pid': os.getpid(),
'status': 'ok',
'version': app.config['CURRENT_VERSION']
}), status=200, content_type="application/json")
@@ -285,7 +249,6 @@ def threads():
})
return {
'pid': os.getpid(),
'thread_num': num_threads,
'threads': thread_list
}
@@ -295,7 +258,6 @@ def threads():
def pool_stat():
engine = db.engine
return {
'pid': os.getpid(),
'pool_size': engine.pool.size(),
'checked_in_connections': engine.pool.checkedin(),
'checked_out_connections': engine.pool.checkedout(),

View File

@@ -1,6 +1,5 @@
import base64
import json
import logging
import secrets
from typing import Optional
@@ -8,14 +7,11 @@ import click
from flask import current_app
from werkzeug.exceptions import NotFound
from configs import dify_config
from constants.languages import languages
from core.rag.datasource.vdb.vector_factory import Vector
from core.rag.datasource.vdb.vector_type import VectorType
from core.rag.models.document import Document
from events.app_event import app_was_created
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from libs.helper import email as email_validate
from libs.password import hash_password, password_pattern, valid_password
from libs.rsa import generate_key_pair
@@ -113,7 +109,7 @@ def reset_encrypt_key_pair():
After the reset, all LLM credentials will become invalid, requiring re-entry.
Only support SELF_HOSTED mode.
"""
if dify_config.EDITION != 'SELF_HOSTED':
if current_app.config['EDITION'] != 'SELF_HOSTED':
click.echo(click.style('Sorry, only support SELF_HOSTED mode.', fg='red'))
return
@@ -249,7 +245,8 @@ def migrate_knowledge_vector_database():
create_count = 0
skipped_count = 0
total_count = 0
vector_type = dify_config.VECTOR_STORE
config = current_app.config
vector_type = config.get('VECTOR_STORE')
page = 1
while True:
try:
@@ -312,14 +309,6 @@ def migrate_knowledge_vector_database():
"vector_store": {"class_prefix": collection_name}
}
dataset.index_struct = json.dumps(index_struct_dict)
elif vector_type == VectorType.TENCENT:
dataset_id = dataset.id
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
index_struct_dict = {
"type": VectorType.TENCENT,
"vector_store": {"class_prefix": collection_name}
}
dataset.index_struct = json.dumps(index_struct_dict)
elif vector_type == VectorType.PGVECTOR:
dataset_id = dataset.id
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
@@ -328,22 +317,6 @@ def migrate_knowledge_vector_database():
"vector_store": {"class_prefix": collection_name}
}
dataset.index_struct = json.dumps(index_struct_dict)
elif vector_type == VectorType.OPENSEARCH:
dataset_id = dataset.id
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
index_struct_dict = {
"type": VectorType.OPENSEARCH,
"vector_store": {"class_prefix": collection_name}
}
dataset.index_struct = json.dumps(index_struct_dict)
elif vector_type == VectorType.ANALYTICDB:
dataset_id = dataset.id
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
index_struct_dict = {
"type": VectorType.ANALYTICDB,
"vector_store": {"class_prefix": collection_name}
}
dataset.index_struct = json.dumps(index_struct_dict)
else:
raise ValueError(f"Vector store {vector_type} is not supported.")
@@ -483,7 +456,8 @@ def convert_to_agent_apps():
@click.option('--field', default='metadata.doc_id', prompt=False, help='index field , default is metadata.doc_id.')
def add_qdrant_doc_id_index(field: str):
click.echo(click.style('Start add qdrant doc_id index.', fg='green'))
vector_type = dify_config.VECTOR_STORE
config = current_app.config
vector_type = config.get('VECTOR_STORE')
if vector_type != "qdrant":
click.echo(click.style('Sorry, only support qdrant vector store.', fg='red'))
return
@@ -500,15 +474,13 @@ def add_qdrant_doc_id_index(field: str):
from core.rag.datasource.vdb.qdrant.qdrant_vector import QdrantConfig
for binding in bindings:
if dify_config.QDRANT_URL is None:
raise ValueError('Qdrant url is required.')
qdrant_config = QdrantConfig(
endpoint=dify_config.QDRANT_URL,
api_key=dify_config.QDRANT_API_KEY,
endpoint=config.get('QDRANT_URL'),
api_key=config.get('QDRANT_API_KEY'),
root_path=current_app.root_path,
timeout=dify_config.QDRANT_CLIENT_TIMEOUT,
grpc_port=dify_config.QDRANT_GRPC_PORT,
prefer_grpc=dify_config.QDRANT_GRPC_ENABLED
timeout=config.get('QDRANT_CLIENT_TIMEOUT'),
grpc_port=config.get('QDRANT_GRPC_PORT'),
prefer_grpc=config.get('QDRANT_GRPC_ENABLED')
)
try:
client = qdrant_client.QdrantClient(**qdrant_config.to_qdrant_params())
@@ -573,75 +545,6 @@ def create_tenant(email: str, language: Optional[str] = None):
'Account: {}\nPassword: {}'.format(email, new_password), fg='green'))
@click.command('upgrade-db', help='upgrade the database')
def upgrade_db():
click.echo('Preparing database migration...')
lock = redis_client.lock(name='db_upgrade_lock', timeout=60)
if lock.acquire(blocking=False):
try:
click.echo(click.style('Start database migration.', fg='green'))
# run db migration
import flask_migrate
flask_migrate.upgrade()
click.echo(click.style('Database migration successful!', fg='green'))
except Exception as e:
logging.exception(f'Database migration failed, error: {e}')
finally:
lock.release()
else:
click.echo('Database migration skipped')
@click.command('fix-app-site-missing', help='Fix app related site missing issue.')
def fix_app_site_missing():
"""
Fix app related site missing issue.
"""
click.echo(click.style('Start fix app related site missing issue.', fg='green'))
failed_app_ids = []
while True:
sql = """select apps.id as id from apps left join sites on sites.app_id=apps.id
where sites.id is null limit 1000"""
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql))
processed_count = 0
for i in rs:
processed_count += 1
app_id = str(i.id)
if app_id in failed_app_ids:
continue
try:
app = db.session.query(App).filter(App.id == app_id).first()
tenant = app.tenant
if tenant:
accounts = tenant.get_accounts()
if not accounts:
print("Fix app {} failed.".format(app.id))
continue
account = accounts[0]
print("Fix app {} related site missing issue.".format(app.id))
app_was_created.send(app, account=account)
except Exception as e:
failed_app_ids.append(app_id)
click.echo(click.style('Fix app {} related site missing issue failed!'.format(app_id), fg='red'))
logging.exception(f'Fix app related site missing issue failed, error: {e}')
continue
if not processed_count:
break
click.echo(click.style('Congratulations! Fix app related site missing issue successful!', fg='green'))
def register_commands(app):
app.cli.add_command(reset_password)
app.cli.add_command(reset_email)
@@ -650,5 +553,4 @@ def register_commands(app):
app.cli.add_command(convert_to_agent_apps)
app.cli.add_command(add_qdrant_doc_id_index)
app.cli.add_command(create_tenant)
app.cli.add_command(upgrade_db)
app.cli.add_command(fix_app_site_missing)

437
api/config.py Normal file
View File

@@ -0,0 +1,437 @@
import os
import dotenv
dotenv.load_dotenv()
DEFAULTS = {
'EDITION': 'SELF_HOSTED',
'DB_USERNAME': 'postgres',
'DB_PASSWORD': '',
'DB_HOST': 'localhost',
'DB_PORT': '5432',
'DB_DATABASE': 'dify',
'DB_CHARSET': '',
'REDIS_HOST': 'localhost',
'REDIS_PORT': '6379',
'REDIS_DB': '0',
'REDIS_USE_SSL': 'False',
'OAUTH_REDIRECT_PATH': '/console/api/oauth/authorize',
'OAUTH_REDIRECT_INDEX_PATH': '/',
'CONSOLE_WEB_URL': 'https://cloud.dify.ai',
'CONSOLE_API_URL': 'https://cloud.dify.ai',
'SERVICE_API_URL': 'https://api.dify.ai',
'APP_WEB_URL': 'https://udify.app',
'FILES_URL': '',
'FILES_ACCESS_TIMEOUT': 300,
'S3_ADDRESS_STYLE': 'auto',
'STORAGE_TYPE': 'local',
'STORAGE_LOCAL_PATH': 'storage',
'CHECK_UPDATE_URL': 'https://updates.dify.ai',
'DEPLOY_ENV': 'PRODUCTION',
'SQLALCHEMY_DATABASE_URI_SCHEME': 'postgresql',
'SQLALCHEMY_POOL_SIZE': 30,
'SQLALCHEMY_MAX_OVERFLOW': 10,
'SQLALCHEMY_POOL_RECYCLE': 3600,
'SQLALCHEMY_POOL_PRE_PING': 'False',
'SQLALCHEMY_ECHO': 'False',
'SENTRY_TRACES_SAMPLE_RATE': 1.0,
'SENTRY_PROFILES_SAMPLE_RATE': 1.0,
'WEAVIATE_GRPC_ENABLED': 'True',
'WEAVIATE_BATCH_SIZE': 100,
'QDRANT_CLIENT_TIMEOUT': 20,
'QDRANT_GRPC_ENABLED': 'False',
'QDRANT_GRPC_PORT': '6334',
'CELERY_BACKEND': 'database',
'LOG_LEVEL': 'INFO',
'LOG_FILE': '',
'LOG_FORMAT': '%(asctime)s.%(msecs)03d %(levelname)s [%(threadName)s] [%(filename)s:%(lineno)d] - %(message)s',
'LOG_DATEFORMAT': '%Y-%m-%d %H:%M:%S',
'HOSTED_OPENAI_QUOTA_LIMIT': 200,
'HOSTED_OPENAI_TRIAL_ENABLED': 'False',
'HOSTED_OPENAI_TRIAL_MODELS': 'gpt-3.5-turbo,gpt-3.5-turbo-1106,gpt-3.5-turbo-instruct,gpt-3.5-turbo-16k,gpt-3.5-turbo-16k-0613,gpt-3.5-turbo-0613,gpt-3.5-turbo-0125,text-davinci-003',
'HOSTED_OPENAI_PAID_ENABLED': 'False',
'HOSTED_OPENAI_PAID_MODELS': 'gpt-4,gpt-4-turbo-preview,gpt-4-turbo-2024-04-09,gpt-4-1106-preview,gpt-4-0125-preview,gpt-3.5-turbo,gpt-3.5-turbo-16k,gpt-3.5-turbo-16k-0613,gpt-3.5-turbo-1106,gpt-3.5-turbo-0613,gpt-3.5-turbo-0125,gpt-3.5-turbo-instruct,text-davinci-003',
'HOSTED_AZURE_OPENAI_ENABLED': 'False',
'HOSTED_AZURE_OPENAI_QUOTA_LIMIT': 200,
'HOSTED_ANTHROPIC_QUOTA_LIMIT': 600000,
'HOSTED_ANTHROPIC_TRIAL_ENABLED': 'False',
'HOSTED_ANTHROPIC_PAID_ENABLED': 'False',
'HOSTED_MODERATION_ENABLED': 'False',
'HOSTED_MODERATION_PROVIDERS': '',
'HOSTED_FETCH_APP_TEMPLATES_MODE': 'remote',
'HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN': 'https://tmpl.dify.ai',
'CLEAN_DAY_SETTING': 30,
'UPLOAD_FILE_SIZE_LIMIT': 15,
'UPLOAD_FILE_BATCH_LIMIT': 5,
'UPLOAD_IMAGE_FILE_SIZE_LIMIT': 10,
'OUTPUT_MODERATION_BUFFER_SIZE': 300,
'MULTIMODAL_SEND_IMAGE_FORMAT': 'base64',
'INVITE_EXPIRY_HOURS': 72,
'BILLING_ENABLED': 'False',
'CAN_REPLACE_LOGO': 'False',
'MODEL_LB_ENABLED': 'False',
'ETL_TYPE': 'dify',
'KEYWORD_STORE': 'jieba',
'BATCH_UPLOAD_LIMIT': 20,
'CODE_EXECUTION_ENDPOINT': 'http://sandbox:8194',
'CODE_EXECUTION_API_KEY': 'dify-sandbox',
'TOOL_ICON_CACHE_MAX_AGE': 3600,
'MILVUS_DATABASE': 'default',
'KEYWORD_DATA_SOURCE_TYPE': 'database',
'INNER_API': 'False',
'ENTERPRISE_ENABLED': 'False',
'INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH': 1000,
'WORKFLOW_MAX_EXECUTION_STEPS': 500,
'WORKFLOW_MAX_EXECUTION_TIME': 1200,
'WORKFLOW_CALL_MAX_DEPTH': 5,
}
def get_env(key):
return os.environ.get(key, DEFAULTS.get(key))
def get_bool_env(key):
value = get_env(key)
return value.lower() == 'true' if value is not None else False
def get_cors_allow_origins(env, default):
cors_allow_origins = []
if get_env(env):
for origin in get_env(env).split(','):
cors_allow_origins.append(origin)
else:
cors_allow_origins = [default]
return cors_allow_origins
class Config:
"""Application configuration class."""
def __init__(self):
# ------------------------
# General Configurations.
# ------------------------
self.CURRENT_VERSION = "0.6.10"
self.COMMIT_SHA = get_env('COMMIT_SHA')
self.EDITION = get_env('EDITION')
self.DEPLOY_ENV = get_env('DEPLOY_ENV')
self.TESTING = False
self.LOG_LEVEL = get_env('LOG_LEVEL')
self.LOG_FILE = get_env('LOG_FILE')
self.LOG_FORMAT = get_env('LOG_FORMAT')
self.LOG_DATEFORMAT = get_env('LOG_DATEFORMAT')
self.API_COMPRESSION_ENABLED = get_bool_env('API_COMPRESSION_ENABLED')
# The backend URL prefix of the console API.
# used to concatenate the login authorization callback or notion integration callback.
self.CONSOLE_API_URL = get_env('CONSOLE_API_URL')
# The front-end URL prefix of the console web.
# used to concatenate some front-end addresses and for CORS configuration use.
self.CONSOLE_WEB_URL = get_env('CONSOLE_WEB_URL')
# WebApp Url prefix.
# used to display WebAPP API Base Url to the front-end.
self.APP_WEB_URL = get_env('APP_WEB_URL')
# Service API Url prefix.
# used to display Service API Base Url to the front-end.
self.SERVICE_API_URL = get_env('SERVICE_API_URL')
# File preview or download Url prefix.
# used to display File preview or download Url to the front-end or as Multi-model inputs;
# Url is signed and has expiration time.
self.FILES_URL = get_env('FILES_URL') if get_env('FILES_URL') else self.CONSOLE_API_URL
# File Access Time specifies a time interval in seconds for the file to be accessed.
# The default value is 300 seconds.
self.FILES_ACCESS_TIMEOUT = int(get_env('FILES_ACCESS_TIMEOUT'))
# Your App secret key will be used for securely signing the session cookie
# Make sure you are changing this key for your deployment with a strong key.
# You can generate a strong key using `openssl rand -base64 42`.
# Alternatively you can set it with `SECRET_KEY` environment variable.
self.SECRET_KEY = get_env('SECRET_KEY')
# Enable or disable the inner API.
self.INNER_API = get_bool_env('INNER_API')
# The inner API key is used to authenticate the inner API.
self.INNER_API_KEY = get_env('INNER_API_KEY')
# cors settings
self.CONSOLE_CORS_ALLOW_ORIGINS = get_cors_allow_origins(
'CONSOLE_CORS_ALLOW_ORIGINS', self.CONSOLE_WEB_URL)
self.WEB_API_CORS_ALLOW_ORIGINS = get_cors_allow_origins(
'WEB_API_CORS_ALLOW_ORIGINS', '*')
# check update url
self.CHECK_UPDATE_URL = get_env('CHECK_UPDATE_URL')
# ------------------------
# Database Configurations.
# ------------------------
db_credentials = {
key: get_env(key) for key in
['DB_USERNAME', 'DB_PASSWORD', 'DB_HOST', 'DB_PORT', 'DB_DATABASE', 'DB_CHARSET']
}
self.SQLALCHEMY_DATABASE_URI_SCHEME = get_env('SQLALCHEMY_DATABASE_URI_SCHEME')
db_extras = f"?client_encoding={db_credentials['DB_CHARSET']}" if db_credentials['DB_CHARSET'] else ""
self.SQLALCHEMY_DATABASE_URI = f"{self.SQLALCHEMY_DATABASE_URI_SCHEME}://{db_credentials['DB_USERNAME']}:{db_credentials['DB_PASSWORD']}@{db_credentials['DB_HOST']}:{db_credentials['DB_PORT']}/{db_credentials['DB_DATABASE']}{db_extras}"
self.SQLALCHEMY_ENGINE_OPTIONS = {
'pool_size': int(get_env('SQLALCHEMY_POOL_SIZE')),
'max_overflow': int(get_env('SQLALCHEMY_MAX_OVERFLOW')),
'pool_recycle': int(get_env('SQLALCHEMY_POOL_RECYCLE')),
'pool_pre_ping': get_bool_env('SQLALCHEMY_POOL_PRE_PING'),
'connect_args': {'options': '-c timezone=UTC'},
}
self.SQLALCHEMY_ECHO = get_bool_env('SQLALCHEMY_ECHO')
# ------------------------
# Redis Configurations.
# ------------------------
self.REDIS_HOST = get_env('REDIS_HOST')
self.REDIS_PORT = get_env('REDIS_PORT')
self.REDIS_USERNAME = get_env('REDIS_USERNAME')
self.REDIS_PASSWORD = get_env('REDIS_PASSWORD')
self.REDIS_DB = get_env('REDIS_DB')
self.REDIS_USE_SSL = get_bool_env('REDIS_USE_SSL')
# ------------------------
# Celery worker Configurations.
# ------------------------
self.CELERY_BROKER_URL = get_env('CELERY_BROKER_URL')
self.CELERY_BACKEND = get_env('CELERY_BACKEND')
self.CELERY_RESULT_BACKEND = 'db+{}'.format(self.SQLALCHEMY_DATABASE_URI) \
if self.CELERY_BACKEND == 'database' else self.CELERY_BROKER_URL
self.BROKER_USE_SSL = self.CELERY_BROKER_URL.startswith('rediss://')
# ------------------------
# Code Execution Sandbox Configurations.
# ------------------------
self.CODE_EXECUTION_ENDPOINT = get_env('CODE_EXECUTION_ENDPOINT')
self.CODE_EXECUTION_API_KEY = get_env('CODE_EXECUTION_API_KEY')
# ------------------------
# File Storage Configurations.
# ------------------------
self.STORAGE_TYPE = get_env('STORAGE_TYPE')
self.STORAGE_LOCAL_PATH = get_env('STORAGE_LOCAL_PATH')
# S3 Storage settings
self.S3_ENDPOINT = get_env('S3_ENDPOINT')
self.S3_BUCKET_NAME = get_env('S3_BUCKET_NAME')
self.S3_ACCESS_KEY = get_env('S3_ACCESS_KEY')
self.S3_SECRET_KEY = get_env('S3_SECRET_KEY')
self.S3_REGION = get_env('S3_REGION')
self.S3_ADDRESS_STYLE = get_env('S3_ADDRESS_STYLE')
# Azure Blob Storage settings
self.AZURE_BLOB_ACCOUNT_NAME = get_env('AZURE_BLOB_ACCOUNT_NAME')
self.AZURE_BLOB_ACCOUNT_KEY = get_env('AZURE_BLOB_ACCOUNT_KEY')
self.AZURE_BLOB_CONTAINER_NAME = get_env('AZURE_BLOB_CONTAINER_NAME')
self.AZURE_BLOB_ACCOUNT_URL = get_env('AZURE_BLOB_ACCOUNT_URL')
# Aliyun Storage settings
self.ALIYUN_OSS_BUCKET_NAME = get_env('ALIYUN_OSS_BUCKET_NAME')
self.ALIYUN_OSS_ACCESS_KEY = get_env('ALIYUN_OSS_ACCESS_KEY')
self.ALIYUN_OSS_SECRET_KEY = get_env('ALIYUN_OSS_SECRET_KEY')
self.ALIYUN_OSS_ENDPOINT = get_env('ALIYUN_OSS_ENDPOINT')
self.ALIYUN_OSS_REGION = get_env('ALIYUN_OSS_REGION')
self.ALIYUN_OSS_AUTH_VERSION = get_env('ALIYUN_OSS_AUTH_VERSION')
# Google Cloud Storage settings
self.GOOGLE_STORAGE_BUCKET_NAME = get_env('GOOGLE_STORAGE_BUCKET_NAME')
self.GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64 = get_env('GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64')
# ------------------------
# Vector Store Configurations.
# Currently, only support: qdrant, milvus, zilliz, weaviate, relyt, pgvector
# ------------------------
self.VECTOR_STORE = get_env('VECTOR_STORE')
self.KEYWORD_STORE = get_env('KEYWORD_STORE')
# qdrant settings
self.QDRANT_URL = get_env('QDRANT_URL')
self.QDRANT_API_KEY = get_env('QDRANT_API_KEY')
self.QDRANT_CLIENT_TIMEOUT = get_env('QDRANT_CLIENT_TIMEOUT')
self.QDRANT_GRPC_ENABLED = get_env('QDRANT_GRPC_ENABLED')
self.QDRANT_GRPC_PORT = get_env('QDRANT_GRPC_PORT')
# milvus / zilliz setting
self.MILVUS_HOST = get_env('MILVUS_HOST')
self.MILVUS_PORT = get_env('MILVUS_PORT')
self.MILVUS_USER = get_env('MILVUS_USER')
self.MILVUS_PASSWORD = get_env('MILVUS_PASSWORD')
self.MILVUS_SECURE = get_env('MILVUS_SECURE')
self.MILVUS_DATABASE = get_env('MILVUS_DATABASE')
# weaviate settings
self.WEAVIATE_ENDPOINT = get_env('WEAVIATE_ENDPOINT')
self.WEAVIATE_API_KEY = get_env('WEAVIATE_API_KEY')
self.WEAVIATE_GRPC_ENABLED = get_bool_env('WEAVIATE_GRPC_ENABLED')
self.WEAVIATE_BATCH_SIZE = int(get_env('WEAVIATE_BATCH_SIZE'))
# relyt settings
self.RELYT_HOST = get_env('RELYT_HOST')
self.RELYT_PORT = get_env('RELYT_PORT')
self.RELYT_USER = get_env('RELYT_USER')
self.RELYT_PASSWORD = get_env('RELYT_PASSWORD')
self.RELYT_DATABASE = get_env('RELYT_DATABASE')
# pgvecto rs settings
self.PGVECTO_RS_HOST = get_env('PGVECTO_RS_HOST')
self.PGVECTO_RS_PORT = get_env('PGVECTO_RS_PORT')
self.PGVECTO_RS_USER = get_env('PGVECTO_RS_USER')
self.PGVECTO_RS_PASSWORD = get_env('PGVECTO_RS_PASSWORD')
self.PGVECTO_RS_DATABASE = get_env('PGVECTO_RS_DATABASE')
# pgvector settings
self.PGVECTOR_HOST = get_env('PGVECTOR_HOST')
self.PGVECTOR_PORT = get_env('PGVECTOR_PORT')
self.PGVECTOR_USER = get_env('PGVECTOR_USER')
self.PGVECTOR_PASSWORD = get_env('PGVECTOR_PASSWORD')
self.PGVECTOR_DATABASE = get_env('PGVECTOR_DATABASE')
# tidb-vector settings
self.TIDB_VECTOR_HOST = get_env('TIDB_VECTOR_HOST')
self.TIDB_VECTOR_PORT = get_env('TIDB_VECTOR_PORT')
self.TIDB_VECTOR_USER = get_env('TIDB_VECTOR_USER')
self.TIDB_VECTOR_PASSWORD = get_env('TIDB_VECTOR_PASSWORD')
self.TIDB_VECTOR_DATABASE = get_env('TIDB_VECTOR_DATABASE')
# chroma settings
self.CHROMA_HOST = get_env('CHROMA_HOST')
self.CHROMA_PORT = get_env('CHROMA_PORT')
self.CHROMA_TENANT = get_env('CHROMA_TENANT')
self.CHROMA_DATABASE = get_env('CHROMA_DATABASE')
self.CHROMA_AUTH_PROVIDER = get_env('CHROMA_AUTH_PROVIDER')
self.CHROMA_AUTH_CREDENTIALS = get_env('CHROMA_AUTH_CREDENTIALS')
# ------------------------
# Mail Configurations.
# ------------------------
self.MAIL_TYPE = get_env('MAIL_TYPE')
self.MAIL_DEFAULT_SEND_FROM = get_env('MAIL_DEFAULT_SEND_FROM')
self.RESEND_API_KEY = get_env('RESEND_API_KEY')
self.RESEND_API_URL = get_env('RESEND_API_URL')
# SMTP settings
self.SMTP_SERVER = get_env('SMTP_SERVER')
self.SMTP_PORT = get_env('SMTP_PORT')
self.SMTP_USERNAME = get_env('SMTP_USERNAME')
self.SMTP_PASSWORD = get_env('SMTP_PASSWORD')
self.SMTP_USE_TLS = get_bool_env('SMTP_USE_TLS')
self.SMTP_OPPORTUNISTIC_TLS = get_bool_env('SMTP_OPPORTUNISTIC_TLS')
# ------------------------
# Workspace Configurations.
# ------------------------
self.INVITE_EXPIRY_HOURS = int(get_env('INVITE_EXPIRY_HOURS'))
# ------------------------
# Sentry Configurations.
# ------------------------
self.SENTRY_DSN = get_env('SENTRY_DSN')
self.SENTRY_TRACES_SAMPLE_RATE = float(get_env('SENTRY_TRACES_SAMPLE_RATE'))
self.SENTRY_PROFILES_SAMPLE_RATE = float(get_env('SENTRY_PROFILES_SAMPLE_RATE'))
# ------------------------
# Business Configurations.
# ------------------------
# multi model send image format, support base64, url, default is base64
self.MULTIMODAL_SEND_IMAGE_FORMAT = get_env('MULTIMODAL_SEND_IMAGE_FORMAT')
# Dataset Configurations.
self.CLEAN_DAY_SETTING = get_env('CLEAN_DAY_SETTING')
# File upload Configurations.
self.UPLOAD_FILE_SIZE_LIMIT = int(get_env('UPLOAD_FILE_SIZE_LIMIT'))
self.UPLOAD_FILE_BATCH_LIMIT = int(get_env('UPLOAD_FILE_BATCH_LIMIT'))
self.UPLOAD_IMAGE_FILE_SIZE_LIMIT = int(get_env('UPLOAD_IMAGE_FILE_SIZE_LIMIT'))
self.BATCH_UPLOAD_LIMIT = get_env('BATCH_UPLOAD_LIMIT')
# RAG ETL Configurations.
self.ETL_TYPE = get_env('ETL_TYPE')
self.UNSTRUCTURED_API_URL = get_env('UNSTRUCTURED_API_URL')
self.UNSTRUCTURED_API_KEY = get_env('UNSTRUCTURED_API_KEY')
self.KEYWORD_DATA_SOURCE_TYPE = get_env('KEYWORD_DATA_SOURCE_TYPE')
# Indexing Configurations.
self.INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH = get_env('INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH')
# Tool Configurations.
self.TOOL_ICON_CACHE_MAX_AGE = get_env('TOOL_ICON_CACHE_MAX_AGE')
self.WORKFLOW_MAX_EXECUTION_STEPS = int(get_env('WORKFLOW_MAX_EXECUTION_STEPS'))
self.WORKFLOW_MAX_EXECUTION_TIME = int(get_env('WORKFLOW_MAX_EXECUTION_TIME'))
self.WORKFLOW_CALL_MAX_DEPTH = int(get_env('WORKFLOW_CALL_MAX_DEPTH'))
# Moderation in app Configurations.
self.OUTPUT_MODERATION_BUFFER_SIZE = int(get_env('OUTPUT_MODERATION_BUFFER_SIZE'))
# Notion integration setting
self.NOTION_CLIENT_ID = get_env('NOTION_CLIENT_ID')
self.NOTION_CLIENT_SECRET = get_env('NOTION_CLIENT_SECRET')
self.NOTION_INTEGRATION_TYPE = get_env('NOTION_INTEGRATION_TYPE')
self.NOTION_INTERNAL_SECRET = get_env('NOTION_INTERNAL_SECRET')
self.NOTION_INTEGRATION_TOKEN = get_env('NOTION_INTEGRATION_TOKEN')
# ------------------------
# Platform Configurations.
# ------------------------
self.GITHUB_CLIENT_ID = get_env('GITHUB_CLIENT_ID')
self.GITHUB_CLIENT_SECRET = get_env('GITHUB_CLIENT_SECRET')
self.GOOGLE_CLIENT_ID = get_env('GOOGLE_CLIENT_ID')
self.GOOGLE_CLIENT_SECRET = get_env('GOOGLE_CLIENT_SECRET')
self.OAUTH_REDIRECT_PATH = get_env('OAUTH_REDIRECT_PATH')
self.HOSTED_OPENAI_API_KEY = get_env('HOSTED_OPENAI_API_KEY')
self.HOSTED_OPENAI_API_BASE = get_env('HOSTED_OPENAI_API_BASE')
self.HOSTED_OPENAI_API_ORGANIZATION = get_env('HOSTED_OPENAI_API_ORGANIZATION')
self.HOSTED_OPENAI_TRIAL_ENABLED = get_bool_env('HOSTED_OPENAI_TRIAL_ENABLED')
self.HOSTED_OPENAI_TRIAL_MODELS = get_env('HOSTED_OPENAI_TRIAL_MODELS')
self.HOSTED_OPENAI_QUOTA_LIMIT = int(get_env('HOSTED_OPENAI_QUOTA_LIMIT'))
self.HOSTED_OPENAI_PAID_ENABLED = get_bool_env('HOSTED_OPENAI_PAID_ENABLED')
self.HOSTED_OPENAI_PAID_MODELS = get_env('HOSTED_OPENAI_PAID_MODELS')
self.HOSTED_AZURE_OPENAI_ENABLED = get_bool_env('HOSTED_AZURE_OPENAI_ENABLED')
self.HOSTED_AZURE_OPENAI_API_KEY = get_env('HOSTED_AZURE_OPENAI_API_KEY')
self.HOSTED_AZURE_OPENAI_API_BASE = get_env('HOSTED_AZURE_OPENAI_API_BASE')
self.HOSTED_AZURE_OPENAI_QUOTA_LIMIT = int(get_env('HOSTED_AZURE_OPENAI_QUOTA_LIMIT'))
self.HOSTED_ANTHROPIC_API_BASE = get_env('HOSTED_ANTHROPIC_API_BASE')
self.HOSTED_ANTHROPIC_API_KEY = get_env('HOSTED_ANTHROPIC_API_KEY')
self.HOSTED_ANTHROPIC_TRIAL_ENABLED = get_bool_env('HOSTED_ANTHROPIC_TRIAL_ENABLED')
self.HOSTED_ANTHROPIC_QUOTA_LIMIT = int(get_env('HOSTED_ANTHROPIC_QUOTA_LIMIT'))
self.HOSTED_ANTHROPIC_PAID_ENABLED = get_bool_env('HOSTED_ANTHROPIC_PAID_ENABLED')
self.HOSTED_MINIMAX_ENABLED = get_bool_env('HOSTED_MINIMAX_ENABLED')
self.HOSTED_SPARK_ENABLED = get_bool_env('HOSTED_SPARK_ENABLED')
self.HOSTED_ZHIPUAI_ENABLED = get_bool_env('HOSTED_ZHIPUAI_ENABLED')
self.HOSTED_MODERATION_ENABLED = get_bool_env('HOSTED_MODERATION_ENABLED')
self.HOSTED_MODERATION_PROVIDERS = get_env('HOSTED_MODERATION_PROVIDERS')
# fetch app templates mode, remote, builtin, db(only for dify SaaS), default: remote
self.HOSTED_FETCH_APP_TEMPLATES_MODE = get_env('HOSTED_FETCH_APP_TEMPLATES_MODE')
self.HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN = get_env('HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN')
# Model Load Balancing Configurations.
self.MODEL_LB_ENABLED = get_bool_env('MODEL_LB_ENABLED')
# Platform Billing Configurations.
self.BILLING_ENABLED = get_bool_env('BILLING_ENABLED')
# ------------------------
# Enterprise feature Configurations.
# **Before using, please contact business@dify.ai by email to inquire about licensing matters.**
# ------------------------
self.ENTERPRISE_ENABLED = get_bool_env('ENTERPRISE_ENABLED')
self.CAN_REPLACE_LOGO = get_bool_env('CAN_REPLACE_LOGO')

View File

@@ -1,3 +0,0 @@
from .app_config import DifyConfig
dify_config = DifyConfig()

View File

@@ -1,69 +0,0 @@
from pydantic import Field, computed_field
from pydantic_settings import SettingsConfigDict
from configs.deploy import DeploymentConfig
from configs.enterprise import EnterpriseFeatureConfig
from configs.extra import ExtraServiceConfig
from configs.feature import FeatureConfig
from configs.middleware import MiddlewareConfig
from configs.packaging import PackagingInfo
class DifyConfig(
# Packaging info
PackagingInfo,
# Deployment configs
DeploymentConfig,
# Feature configs
FeatureConfig,
# Middleware configs
MiddlewareConfig,
# Extra service configs
ExtraServiceConfig,
# Enterprise feature configs
# **Before using, please contact business@dify.ai by email to inquire about licensing matters.**
EnterpriseFeatureConfig,
):
DEBUG: bool = Field(default=False, description='whether to enable debug mode.')
model_config = SettingsConfigDict(
# read from dotenv format config file
env_file='.env',
env_file_encoding='utf-8',
frozen=True,
# ignore extra attributes
extra='ignore',
)
CODE_MAX_NUMBER: int = 9223372036854775807
CODE_MIN_NUMBER: int = -9223372036854775808
CODE_MAX_STRING_LENGTH: int = 80000
CODE_MAX_STRING_ARRAY_LENGTH: int = 30
CODE_MAX_OBJECT_ARRAY_LENGTH: int = 30
CODE_MAX_NUMBER_ARRAY_LENGTH: int = 1000
HTTP_REQUEST_MAX_CONNECT_TIMEOUT: int = 300
HTTP_REQUEST_MAX_READ_TIMEOUT: int = 600
HTTP_REQUEST_MAX_WRITE_TIMEOUT: int = 600
HTTP_REQUEST_NODE_MAX_BINARY_SIZE: int = 1024 * 1024 * 10
@computed_field
def HTTP_REQUEST_NODE_READABLE_MAX_BINARY_SIZE(self) -> str:
return f'{self.HTTP_REQUEST_NODE_MAX_BINARY_SIZE / 1024 / 1024:.2f}MB'
HTTP_REQUEST_NODE_MAX_TEXT_SIZE: int = 1024 * 1024
@computed_field
def HTTP_REQUEST_NODE_READABLE_MAX_TEXT_SIZE(self) -> str:
return f'{self.HTTP_REQUEST_NODE_MAX_TEXT_SIZE / 1024 / 1024:.2f}MB'
SSRF_PROXY_HTTP_URL: str | None = None
SSRF_PROXY_HTTPS_URL: str | None = None
MODERATION_BUFFER_SIZE: int = Field(default=300, description='The buffer size for moderation.')

View File

@@ -1,27 +0,0 @@
from pydantic import Field
from pydantic_settings import BaseSettings
class DeploymentConfig(BaseSettings):
"""
Deployment configs
"""
APPLICATION_NAME: str = Field(
description='application name',
default='langgenius/dify',
)
TESTING: bool = Field(
description='',
default=False,
)
EDITION: str = Field(
description='deployment edition',
default='SELF_HOSTED',
)
DEPLOY_ENV: str = Field(
description='deployment environment, default to PRODUCTION.',
default='PRODUCTION',
)

View File

@@ -1,19 +0,0 @@
from pydantic import Field
from pydantic_settings import BaseSettings
class EnterpriseFeatureConfig(BaseSettings):
"""
Enterprise feature configs.
**Before using, please contact business@dify.ai by email to inquire about licensing matters.**
"""
ENTERPRISE_ENABLED: bool = Field(
description='whether to enable enterprise features.'
'Before using, please contact business@dify.ai by email to inquire about licensing matters.',
default=False,
)
CAN_REPLACE_LOGO: bool = Field(
description='whether to allow replacing enterprise logo.',
default=False,
)

View File

@@ -1,10 +0,0 @@
from configs.extra.notion_config import NotionConfig
from configs.extra.sentry_config import SentryConfig
class ExtraServiceConfig(
# place the configs in alphabet order
NotionConfig,
SentryConfig,
):
pass

View File

@@ -1,34 +0,0 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
class NotionConfig(BaseSettings):
"""
Notion integration configs
"""
NOTION_CLIENT_ID: Optional[str] = Field(
description='Notion client ID',
default=None,
)
NOTION_CLIENT_SECRET: Optional[str] = Field(
description='Notion client secret key',
default=None,
)
NOTION_INTEGRATION_TYPE: Optional[str] = Field(
description='Notion integration type, default to None, available values: internal.',
default=None,
)
NOTION_INTERNAL_SECRET: Optional[str] = Field(
description='Notion internal secret key',
default=None,
)
NOTION_INTEGRATION_TOKEN: Optional[str] = Field(
description='Notion integration token',
default=None,
)

View File

@@ -1,24 +0,0 @@
from typing import Optional
from pydantic import Field, NonNegativeFloat
from pydantic_settings import BaseSettings
class SentryConfig(BaseSettings):
"""
Sentry configs
"""
SENTRY_DSN: Optional[str] = Field(
description='Sentry DSN',
default=None,
)
SENTRY_TRACES_SAMPLE_RATE: NonNegativeFloat = Field(
description='Sentry trace sample rate',
default=1.0,
)
SENTRY_PROFILES_SAMPLE_RATE: NonNegativeFloat = Field(
description='Sentry profiles sample rate',
default=1.0,
)

View File

@@ -1,474 +0,0 @@
from typing import Optional
from pydantic import AliasChoices, Field, NonNegativeInt, PositiveInt, computed_field
from pydantic_settings import BaseSettings
from configs.feature.hosted_service import HostedServiceConfig
class SecurityConfig(BaseSettings):
"""
Secret Key configs
"""
SECRET_KEY: Optional[str] = Field(
description='Your App secret key will be used for securely signing the session cookie'
'Make sure you are changing this key for your deployment with a strong key.'
'You can generate a strong key using `openssl rand -base64 42`.'
'Alternatively you can set it with `SECRET_KEY` environment variable.',
default=None,
)
RESET_PASSWORD_TOKEN_EXPIRY_HOURS: PositiveInt = Field(
description='Expiry time in hours for reset token',
default=24,
)
class AppExecutionConfig(BaseSettings):
"""
App Execution configs
"""
APP_MAX_EXECUTION_TIME: PositiveInt = Field(
description='execution timeout in seconds for app execution',
default=1200,
)
APP_MAX_ACTIVE_REQUESTS: NonNegativeInt = Field(
description='max active request per app, 0 means unlimited',
default=0,
)
class CodeExecutionSandboxConfig(BaseSettings):
"""
Code Execution Sandbox configs
"""
CODE_EXECUTION_ENDPOINT: str = Field(
description='endpoint URL of code execution servcie',
default='http://sandbox:8194',
)
CODE_EXECUTION_API_KEY: str = Field(
description='API key for code execution service',
default='dify-sandbox',
)
class EndpointConfig(BaseSettings):
"""
Module URL configs
"""
CONSOLE_API_URL: str = Field(
description='The backend URL prefix of the console API.'
'used to concatenate the login authorization callback or notion integration callback.',
default='',
)
CONSOLE_WEB_URL: str = Field(
description='The front-end URL prefix of the console web.'
'used to concatenate some front-end addresses and for CORS configuration use.',
default='',
)
SERVICE_API_URL: str = Field(
description='Service API Url prefix.'
'used to display Service API Base Url to the front-end.',
default='',
)
APP_WEB_URL: str = Field(
description='WebApp Url prefix.'
'used to display WebAPP API Base Url to the front-end.',
default='',
)
class FileAccessConfig(BaseSettings):
"""
File Access configs
"""
FILES_URL: str = Field(
description='File preview or download Url prefix.'
' used to display File preview or download Url to the front-end or as Multi-model inputs;'
'Url is signed and has expiration time.',
validation_alias=AliasChoices('FILES_URL', 'CONSOLE_API_URL'),
alias_priority=1,
default='',
)
FILES_ACCESS_TIMEOUT: int = Field(
description='timeout in seconds for file accessing',
default=300,
)
class FileUploadConfig(BaseSettings):
"""
File Uploading configs
"""
UPLOAD_FILE_SIZE_LIMIT: NonNegativeInt = Field(
description='size limit in Megabytes for uploading files',
default=15,
)
UPLOAD_FILE_BATCH_LIMIT: NonNegativeInt = Field(
description='batch size limit for uploading files',
default=5,
)
UPLOAD_IMAGE_FILE_SIZE_LIMIT: NonNegativeInt = Field(
description='image file size limit in Megabytes for uploading files',
default=10,
)
BATCH_UPLOAD_LIMIT: NonNegativeInt = Field(
description='', # todo: to be clarified
default=20,
)
class HttpConfig(BaseSettings):
"""
HTTP configs
"""
API_COMPRESSION_ENABLED: bool = Field(
description='whether to enable HTTP response compression of gzip',
default=False,
)
inner_CONSOLE_CORS_ALLOW_ORIGINS: str = Field(
description='',
validation_alias=AliasChoices('CONSOLE_CORS_ALLOW_ORIGINS', 'CONSOLE_WEB_URL'),
default='',
)
@computed_field
@property
def CONSOLE_CORS_ALLOW_ORIGINS(self) -> list[str]:
return self.inner_CONSOLE_CORS_ALLOW_ORIGINS.split(',')
inner_WEB_API_CORS_ALLOW_ORIGINS: str = Field(
description='',
validation_alias=AliasChoices('WEB_API_CORS_ALLOW_ORIGINS'),
default='*',
)
@computed_field
@property
def WEB_API_CORS_ALLOW_ORIGINS(self) -> list[str]:
return self.inner_WEB_API_CORS_ALLOW_ORIGINS.split(',')
class InnerAPIConfig(BaseSettings):
"""
Inner API configs
"""
INNER_API: bool = Field(
description='whether to enable the inner API',
default=False,
)
INNER_API_KEY: Optional[str] = Field(
description='The inner API key is used to authenticate the inner API',
default=None,
)
class LoggingConfig(BaseSettings):
"""
Logging configs
"""
LOG_LEVEL: str = Field(
description='Log output level, default to INFO.'
'It is recommended to set it to ERROR for production.',
default='INFO',
)
LOG_FILE: Optional[str] = Field(
description='logging output file path',
default=None,
)
LOG_FORMAT: str = Field(
description='log format',
default='%(asctime)s.%(msecs)03d %(levelname)s [%(threadName)s] [%(filename)s:%(lineno)d] - %(message)s',
)
LOG_DATEFORMAT: Optional[str] = Field(
description='log date format',
default=None,
)
LOG_TZ: Optional[str] = Field(
description='specify log timezone, eg: America/New_York',
default=None,
)
class ModelLoadBalanceConfig(BaseSettings):
"""
Model load balance configs
"""
MODEL_LB_ENABLED: bool = Field(
description='whether to enable model load balancing',
default=False,
)
class BillingConfig(BaseSettings):
"""
Platform Billing Configurations
"""
BILLING_ENABLED: bool = Field(
description='whether to enable billing',
default=False,
)
class UpdateConfig(BaseSettings):
"""
Update configs
"""
CHECK_UPDATE_URL: str = Field(
description='url for checking updates',
default='https://updates.dify.ai',
)
class WorkflowConfig(BaseSettings):
"""
Workflow feature configs
"""
WORKFLOW_MAX_EXECUTION_STEPS: PositiveInt = Field(
description='max execution steps in single workflow execution',
default=500,
)
WORKFLOW_MAX_EXECUTION_TIME: PositiveInt = Field(
description='max execution time in seconds in single workflow execution',
default=1200,
)
WORKFLOW_CALL_MAX_DEPTH: PositiveInt = Field(
description='max depth of calling in single workflow execution',
default=5,
)
class OAuthConfig(BaseSettings):
"""
oauth configs
"""
OAUTH_REDIRECT_PATH: str = Field(
description='redirect path for OAuth',
default='/console/api/oauth/authorize',
)
GITHUB_CLIENT_ID: Optional[str] = Field(
description='GitHub client id for OAuth',
default=None,
)
GITHUB_CLIENT_SECRET: Optional[str] = Field(
description='GitHub client secret key for OAuth',
default=None,
)
GOOGLE_CLIENT_ID: Optional[str] = Field(
description='Google client id for OAuth',
default=None,
)
GOOGLE_CLIENT_SECRET: Optional[str] = Field(
description='Google client secret key for OAuth',
default=None,
)
class ModerationConfig(BaseSettings):
"""
Moderation in app configs.
"""
# todo: to be clarified in usage and unit
OUTPUT_MODERATION_BUFFER_SIZE: PositiveInt = Field(
description='buffer size for moderation',
default=300,
)
class ToolConfig(BaseSettings):
"""
Tool configs
"""
TOOL_ICON_CACHE_MAX_AGE: PositiveInt = Field(
description='max age in seconds for tool icon caching',
default=3600,
)
class MailConfig(BaseSettings):
"""
Mail Configurations
"""
MAIL_TYPE: Optional[str] = Field(
description='Mail provider type name, default to None, availabile values are `smtp` and `resend`.',
default=None,
)
MAIL_DEFAULT_SEND_FROM: Optional[str] = Field(
description='default email address for sending from ',
default=None,
)
RESEND_API_KEY: Optional[str] = Field(
description='API key for Resend',
default=None,
)
RESEND_API_URL: Optional[str] = Field(
description='API URL for Resend',
default=None,
)
SMTP_SERVER: Optional[str] = Field(
description='smtp server host',
default=None,
)
SMTP_PORT: Optional[int] = Field(
description='smtp server port',
default=465,
)
SMTP_USERNAME: Optional[str] = Field(
description='smtp server username',
default=None,
)
SMTP_PASSWORD: Optional[str] = Field(
description='smtp server password',
default=None,
)
SMTP_USE_TLS: bool = Field(
description='whether to use TLS connection to smtp server',
default=False,
)
SMTP_OPPORTUNISTIC_TLS: bool = Field(
description='whether to use opportunistic TLS connection to smtp server',
default=False,
)
class RagEtlConfig(BaseSettings):
"""
RAG ETL Configurations.
"""
ETL_TYPE: str = Field(
description='RAG ETL type name, default to `dify`, available values are `dify` and `Unstructured`. ',
default='dify',
)
KEYWORD_DATA_SOURCE_TYPE: str = Field(
description='source type for keyword data, default to `database`, available values are `database` .',
default='database',
)
UNSTRUCTURED_API_URL: Optional[str] = Field(
description='API URL for Unstructured',
default=None,
)
UNSTRUCTURED_API_KEY: Optional[str] = Field(
description='API key for Unstructured',
default=None,
)
class DataSetConfig(BaseSettings):
"""
Dataset configs
"""
CLEAN_DAY_SETTING: PositiveInt = Field(
description='interval in days for cleaning up dataset',
default=30,
)
DATASET_OPERATOR_ENABLED: bool = Field(
description='whether to enable dataset operator',
default=False,
)
class WorkspaceConfig(BaseSettings):
"""
Workspace configs
"""
INVITE_EXPIRY_HOURS: PositiveInt = Field(
description='workspaces invitation expiration in hours',
default=72,
)
class IndexingConfig(BaseSettings):
"""
Indexing configs.
"""
INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH: PositiveInt = Field(
description='max segmentation token length for indexing',
default=1000,
)
class ImageFormatConfig(BaseSettings):
MULTIMODAL_SEND_IMAGE_FORMAT: str = Field(
description='multi model send image format, support base64, url, default is base64',
default='base64',
)
class CeleryBeatConfig(BaseSettings):
CELERY_BEAT_SCHEDULER_TIME: int = Field(
description='the time of the celery scheduler, default to 1 day',
default=1,
)
class FeatureConfig(
# place the configs in alphabet order
AppExecutionConfig,
BillingConfig,
CodeExecutionSandboxConfig,
DataSetConfig,
EndpointConfig,
FileAccessConfig,
FileUploadConfig,
HttpConfig,
ImageFormatConfig,
InnerAPIConfig,
IndexingConfig,
LoggingConfig,
MailConfig,
ModelLoadBalanceConfig,
ModerationConfig,
OAuthConfig,
RagEtlConfig,
SecurityConfig,
ToolConfig,
UpdateConfig,
WorkflowConfig,
WorkspaceConfig,
# hosted services config
HostedServiceConfig,
CeleryBeatConfig,
):
pass

View File

@@ -1,209 +0,0 @@
from typing import Optional
from pydantic import Field, NonNegativeInt
from pydantic_settings import BaseSettings
class HostedOpenAiConfig(BaseSettings):
"""
Hosted OpenAI service config
"""
HOSTED_OPENAI_API_KEY: Optional[str] = Field(
description='',
default=None,
)
HOSTED_OPENAI_API_BASE: Optional[str] = Field(
description='',
default=None,
)
HOSTED_OPENAI_API_ORGANIZATION: Optional[str] = Field(
description='',
default=None,
)
HOSTED_OPENAI_TRIAL_ENABLED: bool = Field(
description='',
default=False,
)
HOSTED_OPENAI_TRIAL_MODELS: str = Field(
description='',
default='gpt-3.5-turbo,'
'gpt-3.5-turbo-1106,'
'gpt-3.5-turbo-instruct,'
'gpt-3.5-turbo-16k,'
'gpt-3.5-turbo-16k-0613,'
'gpt-3.5-turbo-0613,'
'gpt-3.5-turbo-0125,'
'text-davinci-003',
)
HOSTED_OPENAI_QUOTA_LIMIT: NonNegativeInt = Field(
description='',
default=200,
)
HOSTED_OPENAI_PAID_ENABLED: bool = Field(
description='',
default=False,
)
HOSTED_OPENAI_PAID_MODELS: str = Field(
description='',
default='gpt-4,'
'gpt-4-turbo-preview,'
'gpt-4-turbo-2024-04-09,'
'gpt-4-1106-preview,'
'gpt-4-0125-preview,'
'gpt-3.5-turbo,'
'gpt-3.5-turbo-16k,'
'gpt-3.5-turbo-16k-0613,'
'gpt-3.5-turbo-1106,'
'gpt-3.5-turbo-0613,'
'gpt-3.5-turbo-0125,'
'gpt-3.5-turbo-instruct,'
'text-davinci-003',
)
class HostedAzureOpenAiConfig(BaseSettings):
"""
Hosted OpenAI service config
"""
HOSTED_AZURE_OPENAI_ENABLED: bool = Field(
description='',
default=False,
)
HOSTED_AZURE_OPENAI_API_KEY: Optional[str] = Field(
description='',
default=None,
)
HOSTED_AZURE_OPENAI_API_BASE: Optional[str] = Field(
description='',
default=None,
)
HOSTED_AZURE_OPENAI_QUOTA_LIMIT: NonNegativeInt = Field(
description='',
default=200,
)
class HostedAnthropicConfig(BaseSettings):
"""
Hosted Azure OpenAI service config
"""
HOSTED_ANTHROPIC_API_BASE: Optional[str] = Field(
description='',
default=None,
)
HOSTED_ANTHROPIC_API_KEY: Optional[str] = Field(
description='',
default=None,
)
HOSTED_ANTHROPIC_TRIAL_ENABLED: bool = Field(
description='',
default=False,
)
HOSTED_ANTHROPIC_QUOTA_LIMIT: NonNegativeInt = Field(
description='',
default=600000,
)
HOSTED_ANTHROPIC_PAID_ENABLED: bool = Field(
description='',
default=False,
)
class HostedMinmaxConfig(BaseSettings):
"""
Hosted Minmax service config
"""
HOSTED_MINIMAX_ENABLED: bool = Field(
description='',
default=False,
)
class HostedSparkConfig(BaseSettings):
"""
Hosted Spark service config
"""
HOSTED_SPARK_ENABLED: bool = Field(
description='',
default=False,
)
class HostedZhipuAIConfig(BaseSettings):
"""
Hosted Minmax service config
"""
HOSTED_ZHIPUAI_ENABLED: bool = Field(
description='',
default=False,
)
class HostedModerationConfig(BaseSettings):
"""
Hosted Moderation service config
"""
HOSTED_MODERATION_ENABLED: bool = Field(
description='',
default=False,
)
HOSTED_MODERATION_PROVIDERS: str = Field(
description='',
default='',
)
class HostedFetchAppTemplateConfig(BaseSettings):
"""
Hosted Moderation service config
"""
HOSTED_FETCH_APP_TEMPLATES_MODE: str = Field(
description='the mode for fetching app templates,'
' default to remote,'
' available values: remote, db, builtin',
default='remote',
)
HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN: str = Field(
description='the domain for fetching remote app templates',
default='https://tmpl.dify.ai',
)
class HostedServiceConfig(
# place the configs in alphabet order
HostedAnthropicConfig,
HostedAzureOpenAiConfig,
HostedFetchAppTemplateConfig,
HostedMinmaxConfig,
HostedOpenAiConfig,
HostedSparkConfig,
HostedZhipuAIConfig,
# moderation
HostedModerationConfig,
):
pass

View File

@@ -1,203 +0,0 @@
from typing import Any, Optional
from urllib.parse import quote_plus
from pydantic import Field, NonNegativeInt, PositiveInt, computed_field
from pydantic_settings import BaseSettings
from configs.middleware.cache.redis_config import RedisConfig
from configs.middleware.storage.aliyun_oss_storage_config import AliyunOSSStorageConfig
from configs.middleware.storage.amazon_s3_storage_config import S3StorageConfig
from configs.middleware.storage.azure_blob_storage_config import AzureBlobStorageConfig
from configs.middleware.storage.google_cloud_storage_config import GoogleCloudStorageConfig
from configs.middleware.storage.oci_storage_config import OCIStorageConfig
from configs.middleware.storage.tencent_cos_storage_config import TencentCloudCOSStorageConfig
from configs.middleware.vdb.analyticdb_config import AnalyticdbConfig
from configs.middleware.vdb.chroma_config import ChromaConfig
from configs.middleware.vdb.milvus_config import MilvusConfig
from configs.middleware.vdb.myscale_config import MyScaleConfig
from configs.middleware.vdb.opensearch_config import OpenSearchConfig
from configs.middleware.vdb.oracle_config import OracleConfig
from configs.middleware.vdb.pgvector_config import PGVectorConfig
from configs.middleware.vdb.pgvectors_config import PGVectoRSConfig
from configs.middleware.vdb.qdrant_config import QdrantConfig
from configs.middleware.vdb.relyt_config import RelytConfig
from configs.middleware.vdb.tencent_vector_config import TencentVectorDBConfig
from configs.middleware.vdb.tidb_vector_config import TiDBVectorConfig
from configs.middleware.vdb.weaviate_config import WeaviateConfig
class StorageConfig(BaseSettings):
STORAGE_TYPE: str = Field(
description='storage type,'
' default to `local`,'
' available values are `local`, `s3`, `azure-blob`, `aliyun-oss`, `google-storage`.',
default='local',
)
STORAGE_LOCAL_PATH: str = Field(
description='local storage path',
default='storage',
)
class VectorStoreConfig(BaseSettings):
VECTOR_STORE: Optional[str] = Field(
description='vector store type',
default=None,
)
class KeywordStoreConfig(BaseSettings):
KEYWORD_STORE: str = Field(
description='keyword store type',
default='jieba',
)
class DatabaseConfig:
DB_HOST: str = Field(
description='db host',
default='localhost',
)
DB_PORT: PositiveInt = Field(
description='db port',
default=5432,
)
DB_USERNAME: str = Field(
description='db username',
default='postgres',
)
DB_PASSWORD: str = Field(
description='db password',
default='',
)
DB_DATABASE: str = Field(
description='db database',
default='dify',
)
DB_CHARSET: str = Field(
description='db charset',
default='',
)
DB_EXTRAS: str = Field(
description='db extras options. Example: keepalives_idle=60&keepalives=1',
default='',
)
SQLALCHEMY_DATABASE_URI_SCHEME: str = Field(
description='db uri scheme',
default='postgresql',
)
@computed_field
@property
def SQLALCHEMY_DATABASE_URI(self) -> str:
db_extras = (
f"{self.DB_EXTRAS}&client_encoding={self.DB_CHARSET}"
if self.DB_CHARSET
else self.DB_EXTRAS
).strip("&")
db_extras = f"?{db_extras}" if db_extras else ""
return (f"{self.SQLALCHEMY_DATABASE_URI_SCHEME}://"
f"{quote_plus(self.DB_USERNAME)}:{quote_plus(self.DB_PASSWORD)}@{self.DB_HOST}:{self.DB_PORT}/{self.DB_DATABASE}"
f"{db_extras}")
SQLALCHEMY_POOL_SIZE: NonNegativeInt = Field(
description='pool size of SqlAlchemy',
default=30,
)
SQLALCHEMY_MAX_OVERFLOW: NonNegativeInt = Field(
description='max overflows for SqlAlchemy',
default=10,
)
SQLALCHEMY_POOL_RECYCLE: NonNegativeInt = Field(
description='SqlAlchemy pool recycle',
default=3600,
)
SQLALCHEMY_POOL_PRE_PING: bool = Field(
description='whether to enable pool pre-ping in SqlAlchemy',
default=False,
)
SQLALCHEMY_ECHO: bool | str = Field(
description='whether to enable SqlAlchemy echo',
default=False,
)
@computed_field
@property
def SQLALCHEMY_ENGINE_OPTIONS(self) -> dict[str, Any]:
return {
'pool_size': self.SQLALCHEMY_POOL_SIZE,
'max_overflow': self.SQLALCHEMY_MAX_OVERFLOW,
'pool_recycle': self.SQLALCHEMY_POOL_RECYCLE,
'pool_pre_ping': self.SQLALCHEMY_POOL_PRE_PING,
'connect_args': {'options': '-c timezone=UTC'},
}
class CeleryConfig(DatabaseConfig):
CELERY_BACKEND: str = Field(
description='Celery backend, available values are `database`, `redis`',
default='database',
)
CELERY_BROKER_URL: Optional[str] = Field(
description='CELERY_BROKER_URL',
default=None,
)
@computed_field
@property
def CELERY_RESULT_BACKEND(self) -> str | None:
return 'db+{}'.format(self.SQLALCHEMY_DATABASE_URI) \
if self.CELERY_BACKEND == 'database' else self.CELERY_BROKER_URL
@computed_field
@property
def BROKER_USE_SSL(self) -> bool:
return self.CELERY_BROKER_URL.startswith('rediss://') if self.CELERY_BROKER_URL else False
class MiddlewareConfig(
# place the configs in alphabet order
CeleryConfig,
DatabaseConfig,
KeywordStoreConfig,
RedisConfig,
# configs of storage and storage providers
StorageConfig,
AliyunOSSStorageConfig,
AzureBlobStorageConfig,
GoogleCloudStorageConfig,
TencentCloudCOSStorageConfig,
S3StorageConfig,
OCIStorageConfig,
# configs of vdb and vdb providers
VectorStoreConfig,
AnalyticdbConfig,
ChromaConfig,
MilvusConfig,
MyScaleConfig,
OpenSearchConfig,
OracleConfig,
PGVectorConfig,
PGVectoRSConfig,
QdrantConfig,
RelytConfig,
TencentVectorDBConfig,
TiDBVectorConfig,
WeaviateConfig,
):
pass

View File

@@ -1,39 +0,0 @@
from typing import Optional
from pydantic import Field, NonNegativeInt, PositiveInt
from pydantic_settings import BaseSettings
class RedisConfig(BaseSettings):
"""
Redis configs
"""
REDIS_HOST: str = Field(
description='Redis host',
default='localhost',
)
REDIS_PORT: PositiveInt = Field(
description='Redis port',
default=6379,
)
REDIS_USERNAME: Optional[str] = Field(
description='Redis username',
default=None,
)
REDIS_PASSWORD: Optional[str] = Field(
description='Redis password',
default=None,
)
REDIS_DB: NonNegativeInt = Field(
description='Redis database id, default to 0',
default=0,
)
REDIS_USE_SSL: bool = Field(
description='whether to use SSL for Redis connection',
default=False,
)

View File

@@ -1,40 +0,0 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
class AliyunOSSStorageConfig(BaseSettings):
"""
Aliyun storage configs
"""
ALIYUN_OSS_BUCKET_NAME: Optional[str] = Field(
description='Aliyun OSS bucket name',
default=None,
)
ALIYUN_OSS_ACCESS_KEY: Optional[str] = Field(
description='Aliyun OSS access key',
default=None,
)
ALIYUN_OSS_SECRET_KEY: Optional[str] = Field(
description='Aliyun OSS secret key',
default=None,
)
ALIYUN_OSS_ENDPOINT: Optional[str] = Field(
description='Aliyun OSS endpoint URL',
default=None,
)
ALIYUN_OSS_REGION: Optional[str] = Field(
description='Aliyun OSS region',
default=None,
)
ALIYUN_OSS_AUTH_VERSION: Optional[str] = Field(
description='Aliyun OSS authentication version',
default=None,
)

View File

@@ -1,45 +0,0 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
class S3StorageConfig(BaseSettings):
"""
S3 storage configs
"""
S3_ENDPOINT: Optional[str] = Field(
description='S3 storage endpoint',
default=None,
)
S3_REGION: Optional[str] = Field(
description='S3 storage region',
default=None,
)
S3_BUCKET_NAME: Optional[str] = Field(
description='S3 storage bucket name',
default=None,
)
S3_ACCESS_KEY: Optional[str] = Field(
description='S3 storage access key',
default=None,
)
S3_SECRET_KEY: Optional[str] = Field(
description='S3 storage secret key',
default=None,
)
S3_ADDRESS_STYLE: str = Field(
description='S3 storage address style',
default='auto',
)
S3_USE_AWS_MANAGED_IAM: bool = Field(
description='whether to use aws managed IAM for S3',
default=False,
)

View File

@@ -1,30 +0,0 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
class AzureBlobStorageConfig(BaseSettings):
"""
Azure Blob storage configs
"""
AZURE_BLOB_ACCOUNT_NAME: Optional[str] = Field(
description='Azure Blob account name',
default=None,
)
AZURE_BLOB_ACCOUNT_KEY: Optional[str] = Field(
description='Azure Blob account key',
default=None,
)
AZURE_BLOB_CONTAINER_NAME: Optional[str] = Field(
description='Azure Blob container name',
default=None,
)
AZURE_BLOB_ACCOUNT_URL: Optional[str] = Field(
description='Azure Blob account URL',
default=None,
)

View File

@@ -1,20 +0,0 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
class GoogleCloudStorageConfig(BaseSettings):
"""
Google Cloud storage configs
"""
GOOGLE_STORAGE_BUCKET_NAME: Optional[str] = Field(
description='Google Cloud storage bucket name',
default=None,
)
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64: Optional[str] = Field(
description='Google Cloud storage service account json base64',
default=None,
)

View File

@@ -1,36 +0,0 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
class OCIStorageConfig(BaseSettings):
"""
OCI storage configs
"""
OCI_ENDPOINT: Optional[str] = Field(
description='OCI storage endpoint',
default=None,
)
OCI_REGION: Optional[str] = Field(
description='OCI storage region',
default=None,
)
OCI_BUCKET_NAME: Optional[str] = Field(
description='OCI storage bucket name',
default=None,
)
OCI_ACCESS_KEY: Optional[str] = Field(
description='OCI storage access key',
default=None,
)
OCI_SECRET_KEY: Optional[str] = Field(
description='OCI storage secret key',
default=None,
)

View File

@@ -1,35 +0,0 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
class TencentCloudCOSStorageConfig(BaseSettings):
"""
Tencent Cloud COS storage configs
"""
TENCENT_COS_BUCKET_NAME: Optional[str] = Field(
description='Tencent Cloud COS bucket name',
default=None,
)
TENCENT_COS_REGION: Optional[str] = Field(
description='Tencent Cloud COS region',
default=None,
)
TENCENT_COS_SECRET_ID: Optional[str] = Field(
description='Tencent Cloud COS secret id',
default=None,
)
TENCENT_COS_SECRET_KEY: Optional[str] = Field(
description='Tencent Cloud COS secret key',
default=None,
)
TENCENT_COS_SCHEME: Optional[str] = Field(
description='Tencent Cloud COS scheme',
default=None,
)

View File

@@ -1,44 +0,0 @@
from typing import Optional
from pydantic import BaseModel, Field
class AnalyticdbConfig(BaseModel):
"""
Configuration for connecting to AnalyticDB.
Refer to the following documentation for details on obtaining credentials:
https://www.alibabacloud.com/help/en/analyticdb-for-postgresql/getting-started/create-an-instance-instances-with-vector-engine-optimization-enabled
"""
ANALYTICDB_KEY_ID : Optional[str] = Field(
default=None,
description="The Access Key ID provided by Alibaba Cloud for authentication."
)
ANALYTICDB_KEY_SECRET : Optional[str] = Field(
default=None,
description="The Secret Access Key corresponding to the Access Key ID for secure access."
)
ANALYTICDB_REGION_ID : Optional[str] = Field(
default=None,
description="The region where the AnalyticDB instance is deployed (e.g., 'cn-hangzhou')."
)
ANALYTICDB_INSTANCE_ID : Optional[str] = Field(
default=None,
description="The unique identifier of the AnalyticDB instance you want to connect to (e.g., 'gp-ab123456').."
)
ANALYTICDB_ACCOUNT : Optional[str] = Field(
default=None,
description="The account name used to log in to the AnalyticDB instance."
)
ANALYTICDB_PASSWORD : Optional[str] = Field(
default=None,
description="The password associated with the AnalyticDB account for authentication."
)
ANALYTICDB_NAMESPACE : Optional[str] = Field(
default=None,
description="The namespace within AnalyticDB for schema isolation."
)
ANALYTICDB_NAMESPACE_PASSWORD : Optional[str] = Field(
default=None,
description="The password for accessing the specified namespace within the AnalyticDB instance."
)

View File

@@ -1,40 +0,0 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
class ChromaConfig(BaseSettings):
"""
Chroma configs
"""
CHROMA_HOST: Optional[str] = Field(
description='Chroma host',
default=None,
)
CHROMA_PORT: PositiveInt = Field(
description='Chroma port',
default=8000,
)
CHROMA_TENANT: Optional[str] = Field(
description='Chroma database',
default=None,
)
CHROMA_DATABASE: Optional[str] = Field(
description='Chroma database',
default=None,
)
CHROMA_AUTH_PROVIDER: Optional[str] = Field(
description='Chroma authentication provider',
default=None,
)
CHROMA_AUTH_CREDENTIALS: Optional[str] = Field(
description='Chroma authentication credentials',
default=None,
)

View File

@@ -1,40 +0,0 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
class MilvusConfig(BaseSettings):
"""
Milvus configs
"""
MILVUS_HOST: Optional[str] = Field(
description='Milvus host',
default=None,
)
MILVUS_PORT: PositiveInt = Field(
description='Milvus RestFul API port',
default=9091,
)
MILVUS_USER: Optional[str] = Field(
description='Milvus user',
default=None,
)
MILVUS_PASSWORD: Optional[str] = Field(
description='Milvus password',
default=None,
)
MILVUS_SECURE: bool = Field(
description='whether to use SSL connection for Milvus',
default=False,
)
MILVUS_DATABASE: str = Field(
description='Milvus database, default to `default`',
default='default',
)

View File

@@ -1,38 +0,0 @@
from pydantic import BaseModel, Field, PositiveInt
class MyScaleConfig(BaseModel):
"""
MyScale configs
"""
MYSCALE_HOST: str = Field(
description='MyScale host',
default='localhost',
)
MYSCALE_PORT: PositiveInt = Field(
description='MyScale port',
default=8123,
)
MYSCALE_USER: str = Field(
description='MyScale user',
default='default',
)
MYSCALE_PASSWORD: str = Field(
description='MyScale password',
default='',
)
MYSCALE_DATABASE: str = Field(
description='MyScale database name',
default='default',
)
MYSCALE_FTS_PARAMS: str = Field(
description='MyScale fts index parameters',
default='',
)

View File

@@ -1,35 +0,0 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
class OpenSearchConfig(BaseSettings):
"""
OpenSearch configs
"""
OPENSEARCH_HOST: Optional[str] = Field(
description='OpenSearch host',
default=None,
)
OPENSEARCH_PORT: PositiveInt = Field(
description='OpenSearch port',
default=9200,
)
OPENSEARCH_USER: Optional[str] = Field(
description='OpenSearch user',
default=None,
)
OPENSEARCH_PASSWORD: Optional[str] = Field(
description='OpenSearch password',
default=None,
)
OPENSEARCH_SECURE: bool = Field(
description='whether to use SSL connection for OpenSearch',
default=False,
)

View File

@@ -1,35 +0,0 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
class OracleConfig(BaseSettings):
"""
ORACLE configs
"""
ORACLE_HOST: Optional[str] = Field(
description='ORACLE host',
default=None,
)
ORACLE_PORT: Optional[PositiveInt] = Field(
description='ORACLE port',
default=1521,
)
ORACLE_USER: Optional[str] = Field(
description='ORACLE user',
default=None,
)
ORACLE_PASSWORD: Optional[str] = Field(
description='ORACLE password',
default=None,
)
ORACLE_DATABASE: Optional[str] = Field(
description='ORACLE database',
default=None,
)

View File

@@ -1,35 +0,0 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
class PGVectorConfig(BaseSettings):
"""
PGVector configs
"""
PGVECTOR_HOST: Optional[str] = Field(
description='PGVector host',
default=None,
)
PGVECTOR_PORT: Optional[PositiveInt] = Field(
description='PGVector port',
default=5433,
)
PGVECTOR_USER: Optional[str] = Field(
description='PGVector user',
default=None,
)
PGVECTOR_PASSWORD: Optional[str] = Field(
description='PGVector password',
default=None,
)
PGVECTOR_DATABASE: Optional[str] = Field(
description='PGVector database',
default=None,
)

View File

@@ -1,35 +0,0 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
class PGVectoRSConfig(BaseSettings):
"""
PGVectoRS configs
"""
PGVECTO_RS_HOST: Optional[str] = Field(
description='PGVectoRS host',
default=None,
)
PGVECTO_RS_PORT: Optional[PositiveInt] = Field(
description='PGVectoRS port',
default=5431,
)
PGVECTO_RS_USER: Optional[str] = Field(
description='PGVectoRS user',
default=None,
)
PGVECTO_RS_PASSWORD: Optional[str] = Field(
description='PGVectoRS password',
default=None,
)
PGVECTO_RS_DATABASE: Optional[str] = Field(
description='PGVectoRS database',
default=None,
)

View File

@@ -1,35 +0,0 @@
from typing import Optional
from pydantic import Field, NonNegativeInt, PositiveInt
from pydantic_settings import BaseSettings
class QdrantConfig(BaseSettings):
"""
Qdrant configs
"""
QDRANT_URL: Optional[str] = Field(
description='Qdrant url',
default=None,
)
QDRANT_API_KEY: Optional[str] = Field(
description='Qdrant api key',
default=None,
)
QDRANT_CLIENT_TIMEOUT: NonNegativeInt = Field(
description='Qdrant client timeout in seconds',
default=20,
)
QDRANT_GRPC_ENABLED: bool = Field(
description='whether enable grpc support for Qdrant connection',
default=False,
)
QDRANT_GRPC_PORT: PositiveInt = Field(
description='Qdrant grpc port',
default=6334,
)

View File

@@ -1,35 +0,0 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
class RelytConfig(BaseSettings):
"""
Relyt configs
"""
RELYT_HOST: Optional[str] = Field(
description='Relyt host',
default=None,
)
RELYT_PORT: PositiveInt = Field(
description='Relyt port',
default=9200,
)
RELYT_USER: Optional[str] = Field(
description='Relyt user',
default=None,
)
RELYT_PASSWORD: Optional[str] = Field(
description='Relyt password',
default=None,
)
RELYT_DATABASE: Optional[str] = Field(
description='Relyt database',
default='default',
)

View File

@@ -1,50 +0,0 @@
from typing import Optional
from pydantic import Field, NonNegativeInt, PositiveInt
from pydantic_settings import BaseSettings
class TencentVectorDBConfig(BaseSettings):
"""
Tencent Vector configs
"""
TENCENT_VECTOR_DB_URL: Optional[str] = Field(
description='Tencent Vector URL',
default=None,
)
TENCENT_VECTOR_DB_API_KEY: Optional[str] = Field(
description='Tencent Vector API key',
default=None,
)
TENCENT_VECTOR_DB_TIMEOUT: PositiveInt = Field(
description='Tencent Vector timeout in seconds',
default=30,
)
TENCENT_VECTOR_DB_USERNAME: Optional[str] = Field(
description='Tencent Vector username',
default=None,
)
TENCENT_VECTOR_DB_PASSWORD: Optional[str] = Field(
description='Tencent Vector password',
default=None,
)
TENCENT_VECTOR_DB_SHARD: PositiveInt = Field(
description='Tencent Vector sharding number',
default=1,
)
TENCENT_VECTOR_DB_REPLICAS: NonNegativeInt = Field(
description='Tencent Vector replicas',
default=2,
)
TENCENT_VECTOR_DB_DATABASE: Optional[str] = Field(
description='Tencent Vector Database',
default=None,
)

View File

@@ -1,35 +0,0 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
class TiDBVectorConfig(BaseSettings):
"""
TiDB Vector configs
"""
TIDB_VECTOR_HOST: Optional[str] = Field(
description='TiDB Vector host',
default=None,
)
TIDB_VECTOR_PORT: Optional[PositiveInt] = Field(
description='TiDB Vector port',
default=4000,
)
TIDB_VECTOR_USER: Optional[str] = Field(
description='TiDB Vector user',
default=None,
)
TIDB_VECTOR_PASSWORD: Optional[str] = Field(
description='TiDB Vector password',
default=None,
)
TIDB_VECTOR_DATABASE: Optional[str] = Field(
description='TiDB Vector database',
default=None,
)

View File

@@ -1,30 +0,0 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
class WeaviateConfig(BaseSettings):
"""
Weaviate configs
"""
WEAVIATE_ENDPOINT: Optional[str] = Field(
description='Weaviate endpoint URL',
default=None,
)
WEAVIATE_API_KEY: Optional[str] = Field(
description='Weaviate API key',
default=None,
)
WEAVIATE_GRPC_ENABLED: bool = Field(
description='whether to enable gRPC for Weaviate connection',
default=True,
)
WEAVIATE_BATCH_SIZE: PositiveInt = Field(
description='Weaviate batch size',
default=100,
)

View File

@@ -1,18 +0,0 @@
from pydantic import Field
from pydantic_settings import BaseSettings
class PackagingInfo(BaseSettings):
"""
Packaging build information
"""
CURRENT_VERSION: str = Field(
description='Dify version',
default='0.6.15',
)
COMMIT_SHA: str = Field(
description="SHA-1 checksum of the git commit used to build the app",
default='',
)

View File

@@ -1,2 +0,0 @@
# TODO: Update all string in code to use this constant
HIDDEN_VALUE = '[__HIDDEN__]'

View File

@@ -1,3 +1,7 @@
languages = ['en-US', 'zh-Hans', 'zh-Hant', 'pt-BR', 'es-ES', 'fr-FR', 'de-DE', 'ja-JP', 'ko-KR', 'ru-RU', 'it-IT', 'uk-UA', 'vi-VN', 'pl-PL']
language_timezone_mapping = {
'en-US': 'America/New_York',
'zh-Hans': 'Asia/Shanghai',
@@ -14,12 +18,8 @@ language_timezone_mapping = {
'vi-VN': 'Asia/Ho_Chi_Minh',
'ro-RO': 'Europe/Bucharest',
'pl-PL': 'Europe/Warsaw',
'hi-IN': 'Asia/Kolkata',
'tr-TR': 'Europe/Istanbul',
}
languages = list(language_timezone_mapping.keys())
def supported_language(lang):
if lang in languages:

View File

@@ -22,7 +22,7 @@ default_app_templates = {
'model_config': {
'model': {
"provider": "openai",
"name": "gpt-4o",
"name": "gpt-4",
"mode": "chat",
"completion_params": {}
},
@@ -51,7 +51,7 @@ default_app_templates = {
'model_config': {
'model': {
"provider": "openai",
"name": "gpt-4o",
"name": "gpt-4",
"mode": "chat",
"completion_params": {}
}
@@ -77,7 +77,7 @@ default_app_templates = {
'model_config': {
'model': {
"provider": "openai",
"name": "gpt-4o",
"name": "gpt-4",
"mode": "chat",
"completion_params": {}
}

File diff suppressed because one or more lines are too long

View File

@@ -1,4 +0,0 @@
TTS_AUTO_PLAY_TIMEOUT = 5
# sleep 20 ms ( 40ms => 1280 byte audio file,20ms => 640 byte audio file)
TTS_AUTO_PLAY_YIELD_CPU_TIME = 0.02

View File

@@ -1,3 +0,0 @@
from contextvars import ContextVar
tenant_id: ContextVar[str] = ContextVar('tenant_id')

View File

@@ -20,7 +20,6 @@ from .app import (
generator,
message,
model_config,
ops_trace,
site,
statistic,
workflow,
@@ -30,13 +29,13 @@ from .app import (
)
# Import auth controllers
from .auth import activate, data_source_bearer_auth, data_source_oauth, forgot_password, login, oauth
from .auth import activate, data_source_oauth, login, oauth
# Import billing controllers
from .billing import billing
# Import datasets controllers
from .datasets import data_source, datasets, datasets_document, datasets_segments, file, hit_testing, website
from .datasets import data_source, datasets, datasets_document, datasets_segments, file, hit_testing
# Import explore controllers
from .explore import (

View File

@@ -23,7 +23,8 @@ class AnnotationReplyActionApi(Resource):
@account_initialization_required
@cloud_edition_billing_resource_check('annotation')
def post(self, app_id, action):
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
app_id = str(app_id)
@@ -46,7 +47,8 @@ class AppAnnotationSettingDetailApi(Resource):
@login_required
@account_initialization_required
def get(self, app_id):
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
app_id = str(app_id)
@@ -59,7 +61,8 @@ class AppAnnotationSettingUpdateApi(Resource):
@login_required
@account_initialization_required
def post(self, app_id, annotation_setting_id):
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
app_id = str(app_id)
@@ -79,7 +82,8 @@ class AnnotationReplyActionStatusApi(Resource):
@account_initialization_required
@cloud_edition_billing_resource_check('annotation')
def get(self, app_id, job_id, action):
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
job_id = str(job_id)
@@ -106,7 +110,8 @@ class AnnotationListApi(Resource):
@login_required
@account_initialization_required
def get(self, app_id):
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
page = request.args.get('page', default=1, type=int)
@@ -130,7 +135,8 @@ class AnnotationExportApi(Resource):
@login_required
@account_initialization_required
def get(self, app_id):
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
app_id = str(app_id)
@@ -148,7 +154,8 @@ class AnnotationCreateApi(Resource):
@cloud_edition_billing_resource_check('annotation')
@marshal_with(annotation_fields)
def post(self, app_id):
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
app_id = str(app_id)
@@ -167,7 +174,8 @@ class AnnotationUpdateDeleteApi(Resource):
@cloud_edition_billing_resource_check('annotation')
@marshal_with(annotation_fields)
def post(self, app_id, annotation_id):
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
app_id = str(app_id)
@@ -183,7 +191,8 @@ class AnnotationUpdateDeleteApi(Resource):
@login_required
@account_initialization_required
def delete(self, app_id, annotation_id):
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
app_id = str(app_id)
@@ -198,7 +207,8 @@ class AnnotationBatchImportApi(Resource):
@account_initialization_required
@cloud_edition_billing_resource_check('annotation')
def post(self, app_id):
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
app_id = str(app_id)
@@ -222,7 +232,8 @@ class AnnotationBatchImportStatusApi(Resource):
@account_initialization_required
@cloud_edition_billing_resource_check('annotation')
def get(self, app_id, job_id):
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
job_id = str(job_id)
@@ -248,7 +259,8 @@ class AnnotationHitHistoryListApi(Resource):
@login_required
@account_initialization_required
def get(self, app_id, annotation_id):
if not current_user.is_editor:
# The role of the current user in the table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
page = request.args.get('page', default=1, type=int)

View File

@@ -1,3 +1,4 @@
import json
import uuid
from flask_login import current_user
@@ -8,16 +9,17 @@ from controllers.console import api
from controllers.console.app.wraps import get_app_model
from controllers.console.setup import setup_required
from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check
from core.ops.ops_trace_manager import OpsTraceManager
from core.tools.tool_manager import ToolManager
from core.tools.utils.configuration import ToolParameterConfigurationManager
from fields.app_fields import (
app_detail_fields,
app_detail_fields_with_site,
app_pagination_fields,
)
from libs.login import login_required
from services.app_dsl_service import AppDslService
from models.model import App, AppMode, AppModelConfig
from services.app_service import AppService
from services.feature_service import FeatureService
from services.tag_service import TagService
ALLOW_CREATE_APP_MODES = ['chat', 'agent-chat', 'advanced-chat', 'workflow', 'completion']
@@ -66,8 +68,8 @@ class AppListApi(Resource):
parser.add_argument('icon_background', type=str, location='json')
args = parser.parse_args()
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
if 'mode' not in args or args['mode'] is None:
@@ -87,8 +89,8 @@ class AppImportApi(Resource):
@cloud_edition_billing_resource_check('apps')
def post(self):
"""Import app"""
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
parser = reqparse.RequestParser()
@@ -99,42 +101,8 @@ class AppImportApi(Resource):
parser.add_argument('icon_background', type=str, location='json')
args = parser.parse_args()
app = AppDslService.import_and_create_new_app(
tenant_id=current_user.current_tenant_id,
data=args['data'],
args=args,
account=current_user
)
return app, 201
class AppImportFromUrlApi(Resource):
@setup_required
@login_required
@account_initialization_required
@marshal_with(app_detail_fields_with_site)
@cloud_edition_billing_resource_check('apps')
def post(self):
"""Import app from url"""
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument('url', type=str, required=True, nullable=False, location='json')
parser.add_argument('name', type=str, location='json')
parser.add_argument('description', type=str, location='json')
parser.add_argument('icon', type=str, location='json')
parser.add_argument('icon_background', type=str, location='json')
args = parser.parse_args()
app = AppDslService.import_and_create_new_app_from_url(
tenant_id=current_user.current_tenant_id,
url=args['url'],
args=args,
account=current_user
)
app_service = AppService()
app = app_service.import_app(current_user.current_tenant_id, args['data'], args, current_user)
return app, 201
@@ -161,16 +129,11 @@ class AppApi(Resource):
@marshal_with(app_detail_fields_with_site)
def put(self, app_model):
"""Update app"""
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument('name', type=str, required=True, nullable=False, location='json')
parser.add_argument('description', type=str, location='json')
parser.add_argument('icon', type=str, location='json')
parser.add_argument('icon_background', type=str, location='json')
parser.add_argument('max_active_requests', type=int, location='json')
args = parser.parse_args()
app_service = AppService()
@@ -184,8 +147,7 @@ class AppApi(Resource):
@get_app_model
def delete(self, app_model):
"""Delete app"""
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
if not current_user.is_admin_or_owner:
raise Forbidden()
app_service = AppService()
@@ -202,8 +164,8 @@ class AppCopyApi(Resource):
@marshal_with(app_detail_fields_with_site)
def post(self, app_model):
"""Copy app"""
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
parser = reqparse.RequestParser()
@@ -213,13 +175,9 @@ class AppCopyApi(Resource):
parser.add_argument('icon_background', type=str, location='json')
args = parser.parse_args()
data = AppDslService.export_dsl(app_model=app_model, include_secret=True)
app = AppDslService.import_and_create_new_app(
tenant_id=current_user.current_tenant_id,
data=data,
args=args,
account=current_user
)
app_service = AppService()
data = app_service.export_app(app_model)
app = app_service.import_app(current_user.current_tenant_id, data, args, current_user)
return app, 201
@@ -231,17 +189,10 @@ class AppExportApi(Resource):
@get_app_model
def get(self, app_model):
"""Export app"""
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
# Add include_secret params
parser = reqparse.RequestParser()
parser.add_argument('include_secret', type=inputs.boolean, default=False, location='args')
args = parser.parse_args()
app_service = AppService()
return {
"data": AppDslService.export_dsl(app_model=app_model, include_secret=args['include_secret'])
"data": app_service.export_app(app_model)
}
@@ -252,10 +203,6 @@ class AppNameApi(Resource):
@get_app_model
@marshal_with(app_detail_fields)
def post(self, app_model):
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument('name', type=str, required=True, location='json')
args = parser.parse_args()
@@ -273,10 +220,6 @@ class AppIconApi(Resource):
@get_app_model
@marshal_with(app_detail_fields)
def post(self, app_model):
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument('icon', type=str, location='json')
parser.add_argument('icon_background', type=str, location='json')
@@ -295,10 +238,6 @@ class AppSiteStatus(Resource):
@get_app_model
@marshal_with(app_detail_fields)
def post(self, app_model):
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument('enable_site', type=bool, required=True, location='json')
args = parser.parse_args()
@@ -316,10 +255,6 @@ class AppApiStatus(Resource):
@get_app_model
@marshal_with(app_detail_fields)
def post(self, app_model):
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument('enable_api', type=bool, required=True, location='json')
args = parser.parse_args()
@@ -330,68 +265,8 @@ class AppApiStatus(Resource):
return app_model
class AppTraceApi(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self, app_id):
"""Get app trace"""
app_trace_config = OpsTraceManager.get_app_tracing_config(
app_id=app_id
)
return app_trace_config
@setup_required
@login_required
@account_initialization_required
def post(self, app_id):
# add app trace
if not current_user.is_admin_or_owner:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument('enabled', type=bool, required=True, location='json')
parser.add_argument('tracing_provider', type=str, required=True, location='json')
args = parser.parse_args()
OpsTraceManager.update_app_tracing_config(
app_id=app_id,
enabled=args['enabled'],
tracing_provider=args['tracing_provider'],
)
return {"result": "success"}
class AppSSOApi(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self):
return FeatureService.get_system_features().model_dump()
@setup_required
@login_required
@account_initialization_required
def patch(self):
parser = reqparse.RequestParser()
parser.add_argument('exclude_app_id_list', type=list, location='json')
if not current_user.is_editor:
raise Forbidden()
args = parser.parse_args()
current_user_id = current_user.id
FeatureService.update_web_sso_exclude_apps(args['exclude_app_id_list'], current_user_id)
return {"result": "success"}
api.add_resource(AppListApi, '/apps')
api.add_resource(AppImportApi, '/apps/import')
api.add_resource(AppImportFromUrlApi, '/apps/import/url')
api.add_resource(AppApi, '/apps/<uuid:app_id>')
api.add_resource(AppCopyApi, '/apps/<uuid:app_id>/copy')
api.add_resource(AppExportApi, '/apps/<uuid:app_id>/export')
@@ -399,5 +274,3 @@ api.add_resource(AppNameApi, '/apps/<uuid:app_id>/name')
api.add_resource(AppIconApi, '/apps/<uuid:app_id>/icon')
api.add_resource(AppSiteStatus, '/apps/<uuid:app_id>/site-enable')
api.add_resource(AppApiStatus, '/apps/<uuid:app_id>/api-enable')
api.add_resource(AppTraceApi, '/apps/<uuid:app_id>/trace')
api.add_resource(AppSSOApi, '/apps/web-sso')

View File

@@ -81,36 +81,15 @@ class ChatMessageTextApi(Resource):
@account_initialization_required
@get_app_model
def post(self, app_model):
from werkzeug.exceptions import InternalServerError
try:
parser = reqparse.RequestParser()
parser.add_argument('message_id', type=str, location='json')
parser.add_argument('text', type=str, location='json')
parser.add_argument('voice', type=str, location='json')
parser.add_argument('streaming', type=bool, location='json')
args = parser.parse_args()
message_id = args.get('message_id', None)
text = args.get('text', None)
if (app_model.mode in [AppMode.ADVANCED_CHAT.value, AppMode.WORKFLOW.value]
and app_model.workflow
and app_model.workflow.features_dict):
text_to_speech = app_model.workflow.features_dict.get('text_to_speech')
voice = args.get('voice') if args.get('voice') else text_to_speech.get('voice')
else:
try:
voice = args.get('voice') if args.get('voice') else app_model.app_model_config.text_to_speech_dict.get(
'voice')
except Exception:
voice = None
response = AudioService.transcript_tts(
app_model=app_model,
text=text,
message_id=message_id,
voice=voice
text=request.form['text'],
voice=request.form['voice'],
streaming=False
)
return response
return {'data': response.data.decode('latin1')}
except services.errors.app_model_config.AppModelConfigBrokenError:
logging.exception("App model config broken.")
raise AppUnavailableError()

View File

@@ -19,12 +19,7 @@ from controllers.console.setup import setup_required
from controllers.console.wraps import account_initialization_required
from core.app.apps.base_app_queue_manager import AppQueueManager
from core.app.entities.app_invoke_entities import InvokeFrom
from core.errors.error import (
AppInvokeQuotaExceededError,
ModelCurrentlyNotSupportError,
ProviderTokenNotInitError,
QuotaExceededError,
)
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
from core.model_runtime.errors.invoke import InvokeError
from libs import helper
from libs.helper import uuid_value
@@ -80,7 +75,7 @@ class CompletionMessageApi(Resource):
raise ProviderModelCurrentlyNotSupportError()
except InvokeError as e:
raise CompletionRequestError(e.description)
except (ValueError, AppInvokeQuotaExceededError) as e:
except ValueError as e:
raise e
except Exception as e:
logging.exception("internal server error.")
@@ -146,7 +141,7 @@ class ChatMessageApi(Resource):
raise ProviderModelCurrentlyNotSupportError()
except InvokeError as e:
raise CompletionRequestError(e.description)
except (ValueError, AppInvokeQuotaExceededError) as e:
except ValueError as e:
raise e
except Exception as e:
logging.exception("internal server error.")

View File

@@ -6,7 +6,7 @@ from flask_restful import Resource, marshal_with, reqparse
from flask_restful.inputs import int_range
from sqlalchemy import func, or_
from sqlalchemy.orm import joinedload
from werkzeug.exceptions import Forbidden, NotFound
from werkzeug.exceptions import NotFound
from controllers.console import api
from controllers.console.app.wraps import get_app_model
@@ -22,7 +22,7 @@ from fields.conversation_fields import (
)
from libs.helper import datetime_string
from libs.login import login_required
from models.model import AppMode, Conversation, EndUser, Message, MessageAnnotation
from models.model import AppMode, Conversation, Message, MessageAnnotation
class CompletionConversationApi(Resource):
@@ -33,8 +33,6 @@ class CompletionConversationApi(Resource):
@get_app_model(mode=AppMode.COMPLETION)
@marshal_with(conversation_pagination_fields)
def get(self, app_model):
if not current_user.is_admin_or_owner:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument('keyword', type=str, location='args')
parser.add_argument('start', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
@@ -108,8 +106,6 @@ class CompletionConversationDetailApi(Resource):
@get_app_model(mode=AppMode.COMPLETION)
@marshal_with(conversation_message_detail_fields)
def get(self, app_model, conversation_id):
if not current_user.is_admin_or_owner:
raise Forbidden()
conversation_id = str(conversation_id)
return _get_conversation(app_model, conversation_id)
@@ -119,8 +115,6 @@ class CompletionConversationDetailApi(Resource):
@account_initialization_required
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])
def delete(self, app_model, conversation_id):
if not current_user.is_admin_or_owner:
raise Forbidden()
conversation_id = str(conversation_id)
conversation = db.session.query(Conversation) \
@@ -143,8 +137,6 @@ class ChatConversationApi(Resource):
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])
@marshal_with(conversation_with_summary_pagination_fields)
def get(self, app_model):
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument('keyword', type=str, location='args')
parser.add_argument('start', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
@@ -156,31 +148,19 @@ class ChatConversationApi(Resource):
parser.add_argument('limit', type=int_range(1, 100), required=False, default=20, location='args')
args = parser.parse_args()
subquery = (
db.session.query(
Conversation.id.label('conversation_id'),
EndUser.session_id.label('from_end_user_session_id')
)
.outerjoin(EndUser, Conversation.from_end_user_id == EndUser.id)
.subquery()
)
query = db.select(Conversation).where(Conversation.app_id == app_model.id)
if args['keyword']:
keyword_filter = '%{}%'.format(args['keyword'])
query = query.join(
Message, Message.conversation_id == Conversation.id,
).join(
subquery, subquery.c.conversation_id == Conversation.id
Message, Message.conversation_id == Conversation.id
).filter(
or_(
Message.query.ilike(keyword_filter),
Message.answer.ilike(keyword_filter),
Conversation.name.ilike(keyword_filter),
Conversation.introduction.ilike(keyword_filter),
subquery.c.from_end_user_session_id.ilike(keyword_filter)
Message.query.ilike('%{}%'.format(args['keyword'])),
Message.answer.ilike('%{}%'.format(args['keyword'])),
Conversation.name.ilike('%{}%'.format(args['keyword'])),
Conversation.introduction.ilike('%{}%'.format(args['keyword'])),
),
)
account = current_user
@@ -245,8 +225,6 @@ class ChatConversationDetailApi(Resource):
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])
@marshal_with(conversation_detail_fields)
def get(self, app_model, conversation_id):
if not current_user.is_editor:
raise Forbidden()
conversation_id = str(conversation_id)
return _get_conversation(app_model, conversation_id)
@@ -256,8 +234,6 @@ class ChatConversationDetailApi(Resource):
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])
@account_initialization_required
def delete(self, app_model, conversation_id):
if not current_user.is_admin_or_owner:
raise Forbidden()
conversation_id = str(conversation_id)
conversation = db.session.query(Conversation) \

View File

@@ -97,21 +97,3 @@ class DraftWorkflowNotSync(BaseHTTPException):
error_code = 'draft_workflow_not_sync'
description = "Workflow graph might have been modified, please refresh and resubmit."
code = 400
class TracingConfigNotExist(BaseHTTPException):
error_code = 'trace_config_not_exist'
description = "Trace config not exist."
code = 400
class TracingConfigIsExist(BaseHTTPException):
error_code = 'trace_config_is_exist'
description = "Trace config is exist."
code = 400
class TracingConfigCheckError(BaseHTTPException):
error_code = 'trace_config_check_error'
description = "Invalid Credentials."
code = 400

View File

@@ -1,5 +1,3 @@
import os
from flask_login import current_user
from flask_restful import Resource, reqparse
@@ -24,21 +22,17 @@ class RuleGenerateApi(Resource):
@account_initialization_required
def post(self):
parser = reqparse.RequestParser()
parser.add_argument('instruction', type=str, required=True, nullable=False, location='json')
parser.add_argument('model_config', type=dict, required=True, nullable=False, location='json')
parser.add_argument('no_variable', type=bool, required=True, default=False, location='json')
parser.add_argument('audiences', type=str, required=True, nullable=False, location='json')
parser.add_argument('hoping_to_solve', type=str, required=True, nullable=False, location='json')
args = parser.parse_args()
account = current_user
PROMPT_GENERATION_MAX_TOKENS = int(os.getenv('PROMPT_GENERATION_MAX_TOKENS', '512'))
try:
rules = LLMGenerator.generate_rule_config(
tenant_id=account.current_tenant_id,
instruction=args['instruction'],
model_config=args['model_config'],
no_variable=args['no_variable'],
rule_config_max_tokens=PROMPT_GENERATION_MAX_TOKENS
account.current_tenant_id,
args['audiences'],
args['hoping_to_solve']
)
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)

View File

@@ -149,7 +149,8 @@ class MessageAnnotationApi(Resource):
@get_app_model
@marshal_with(annotation_fields)
def post(self, app_model):
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
parser = reqparse.RequestParser()

View File

@@ -25,7 +25,6 @@ class ModelConfigResource(Resource):
@account_initialization_required
@get_app_model(mode=[AppMode.AGENT_CHAT, AppMode.CHAT, AppMode.COMPLETION])
def post(self, app_model):
"""Modify app model config"""
# validate config
model_configuration = AppModelConfigService.validate_configuration(

View File

@@ -1,101 +0,0 @@
from flask_restful import Resource, reqparse
from controllers.console import api
from controllers.console.app.error import TracingConfigCheckError, TracingConfigIsExist, TracingConfigNotExist
from controllers.console.setup import setup_required
from controllers.console.wraps import account_initialization_required
from libs.login import login_required
from services.ops_service import OpsService
class TraceAppConfigApi(Resource):
"""
Manage trace app configurations
"""
@setup_required
@login_required
@account_initialization_required
def get(self, app_id):
parser = reqparse.RequestParser()
parser.add_argument('tracing_provider', type=str, required=True, location='args')
args = parser.parse_args()
try:
trace_config = OpsService.get_tracing_app_config(
app_id=app_id, tracing_provider=args['tracing_provider']
)
if not trace_config:
return {"has_not_configured": True}
return trace_config
except Exception as e:
raise e
@setup_required
@login_required
@account_initialization_required
def post(self, app_id):
"""Create a new trace app configuration"""
parser = reqparse.RequestParser()
parser.add_argument('tracing_provider', type=str, required=True, location='json')
parser.add_argument('tracing_config', type=dict, required=True, location='json')
args = parser.parse_args()
try:
result = OpsService.create_tracing_app_config(
app_id=app_id,
tracing_provider=args['tracing_provider'],
tracing_config=args['tracing_config']
)
if not result:
raise TracingConfigIsExist()
if result.get('error'):
raise TracingConfigCheckError()
return result
except Exception as e:
raise e
@setup_required
@login_required
@account_initialization_required
def patch(self, app_id):
"""Update an existing trace app configuration"""
parser = reqparse.RequestParser()
parser.add_argument('tracing_provider', type=str, required=True, location='json')
parser.add_argument('tracing_config', type=dict, required=True, location='json')
args = parser.parse_args()
try:
result = OpsService.update_tracing_app_config(
app_id=app_id,
tracing_provider=args['tracing_provider'],
tracing_config=args['tracing_config']
)
if not result:
raise TracingConfigNotExist()
return {"result": "success"}
except Exception as e:
raise e
@setup_required
@login_required
@account_initialization_required
def delete(self, app_id):
"""Delete an existing trace app configuration"""
parser = reqparse.RequestParser()
parser.add_argument('tracing_provider', type=str, required=True, location='args')
args = parser.parse_args()
try:
result = OpsService.delete_tracing_app_config(
app_id=app_id,
tracing_provider=args['tracing_provider']
)
if not result:
raise TracingConfigNotExist()
return {"result": "success"}
except Exception as e:
raise e
api.add_resource(TraceAppConfigApi, '/apps/<uuid:app_id>/trace-config')

View File

@@ -20,8 +20,6 @@ def parse_app_site_args():
parser.add_argument('icon_background', type=str, required=False, location='json')
parser.add_argument('description', type=str, required=False, location='json')
parser.add_argument('default_language', type=supported_language, required=False, location='json')
parser.add_argument('chat_color_theme', type=str, required=False, location='json')
parser.add_argument('chat_color_theme_inverted', type=bool, required=False, location='json')
parser.add_argument('customize_domain', type=str, required=False, location='json')
parser.add_argument('copyright', type=str, required=False, location='json')
parser.add_argument('privacy_policy', type=str, required=False, location='json')
@@ -30,7 +28,6 @@ def parse_app_site_args():
required=False,
location='json')
parser.add_argument('prompt_public', type=bool, required=False, location='json')
parser.add_argument('show_workflow_steps', type=bool, required=False, location='json')
return parser.parse_args()
@@ -43,8 +40,8 @@ class AppSite(Resource):
def post(self, app_model):
args = parse_app_site_args()
# The role of the current user in the ta table must be editor, admin, or owner
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
site = db.session.query(Site). \
@@ -57,20 +54,24 @@ class AppSite(Resource):
'icon_background',
'description',
'default_language',
'chat_color_theme',
'chat_color_theme_inverted',
'customize_domain',
'copyright',
'privacy_policy',
'custom_disclaimer',
'customize_token_strategy',
'prompt_public',
'show_workflow_steps'
'prompt_public'
]:
value = args.get(attr_name)
if value is not None:
setattr(site, attr_name, value)
if attr_name == 'title':
app_model.name = value
elif attr_name == 'icon':
app_model.icon = value
elif attr_name == 'icon_background':
app_model.icon_background = value
db.session.commit()
return site

View File

@@ -281,7 +281,7 @@ class UserSatisfactionRateStatistic(Resource):
SELECT date(DATE_TRUNC('day', m.created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
COUNT(m.id) as message_count, COUNT(mf.id) as feedback_count
FROM messages m
LEFT JOIN message_feedbacks mf on mf.message_id=m.id and mf.rating='like'
LEFT JOIN message_feedbacks mf on mf.message_id=m.id
WHERE m.app_id = :app_id
'''
arg_dict = {'tz': account.timezone, 'app_id': app_model.id}

View File

@@ -3,7 +3,7 @@ import logging
from flask import abort, request
from flask_restful import Resource, marshal_with, reqparse
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
from werkzeug.exceptions import InternalServerError, NotFound
import services
from controllers.console import api
@@ -13,15 +13,12 @@ from controllers.console.setup import setup_required
from controllers.console.wraps import account_initialization_required
from core.app.apps.base_app_queue_manager import AppQueueManager
from core.app.entities.app_invoke_entities import InvokeFrom
from core.app.segments import factory
from core.errors.error import AppInvokeQuotaExceededError
from fields.workflow_fields import workflow_fields
from fields.workflow_run_fields import workflow_run_node_execution_fields
from libs import helper
from libs.helper import TimestampField, uuid_value
from libs.login import current_user, login_required
from models.model import App, AppMode
from services.app_dsl_service import AppDslService
from services.app_generate_service import AppGenerateService
from services.errors.app import WorkflowHashNotEqualError
from services.workflow_service import WorkflowService
@@ -39,10 +36,6 @@ class DraftWorkflowApi(Resource):
"""
Get draft workflow
"""
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
# fetch draft workflow by app_model
workflow_service = WorkflowService()
workflow = workflow_service.get_draft_workflow(app_model=app_model)
@@ -61,19 +54,13 @@ class DraftWorkflowApi(Resource):
"""
Sync draft workflow
"""
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
content_type = request.headers.get('Content-Type', '')
content_type = request.headers.get('Content-Type')
if 'application/json' in content_type:
parser = reqparse.RequestParser()
parser.add_argument('graph', type=dict, required=True, nullable=False, location='json')
parser.add_argument('features', type=dict, required=True, nullable=False, location='json')
parser.add_argument('hash', type=str, required=False, location='json')
# TODO: set this to required=True after frontend is updated
parser.add_argument('environment_variables', type=list, required=False, location='json')
args = parser.parse_args()
elif 'text/plain' in content_type:
try:
@@ -87,8 +74,7 @@ class DraftWorkflowApi(Resource):
args = {
'graph': data.get('graph'),
'features': data.get('features'),
'hash': data.get('hash'),
'environment_variables': data.get('environment_variables')
'hash': data.get('hash')
}
except json.JSONDecodeError:
return {'message': 'Invalid JSON data'}, 400
@@ -98,15 +84,12 @@ class DraftWorkflowApi(Resource):
workflow_service = WorkflowService()
try:
environment_variables_list = args.get('environment_variables') or []
environment_variables = [factory.build_variable_from_mapping(obj) for obj in environment_variables_list]
workflow = workflow_service.sync_draft_workflow(
app_model=app_model,
graph=args['graph'],
features=args['features'],
graph=args.get('graph'),
features=args.get('features'),
unique_hash=args.get('hash'),
account=current_user,
environment_variables=environment_variables,
account=current_user
)
except WorkflowHashNotEqualError:
raise DraftWorkflowNotSync()
@@ -118,33 +101,6 @@ class DraftWorkflowApi(Resource):
}
class DraftWorkflowImportApi(Resource):
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
@marshal_with(workflow_fields)
def post(self, app_model: App):
"""
Import draft workflow
"""
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument('data', type=str, required=True, nullable=False, location='json')
args = parser.parse_args()
workflow = AppDslService.import_and_overwrite_workflow(
app_model=app_model,
data=args['data'],
account=current_user
)
return workflow
class AdvancedChatDraftWorkflowRunApi(Resource):
@setup_required
@login_required
@@ -154,10 +110,6 @@ class AdvancedChatDraftWorkflowRunApi(Resource):
"""
Run draft workflow
"""
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument('inputs', type=dict, location='json')
parser.add_argument('query', type=str, required=True, location='json', default='')
@@ -194,10 +146,6 @@ class AdvancedChatDraftRunIterationNodeApi(Resource):
"""
Run draft workflow iteration node
"""
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument('inputs', type=dict, location='json')
args = parser.parse_args()
@@ -231,10 +179,6 @@ class WorkflowDraftRunIterationNodeApi(Resource):
"""
Run draft workflow iteration node
"""
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument('inputs', type=dict, location='json')
args = parser.parse_args()
@@ -268,10 +212,6 @@ class DraftWorkflowRunApi(Resource):
"""
Run draft workflow
"""
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument('inputs', type=dict, required=True, nullable=False, location='json')
parser.add_argument('files', type=list, required=False, location='json')
@@ -287,7 +227,7 @@ class DraftWorkflowRunApi(Resource):
)
return helper.compact_generate_response(response)
except (ValueError, AppInvokeQuotaExceededError) as e:
except ValueError as e:
raise e
except Exception as e:
logging.exception("internal server error.")
@@ -303,10 +243,6 @@ class WorkflowTaskStopApi(Resource):
"""
Stop workflow task
"""
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
AppQueueManager.set_stop_flag(task_id, InvokeFrom.DEBUGGER, current_user.id)
return {
@@ -324,10 +260,6 @@ class DraftWorkflowNodeRunApi(Resource):
"""
Run draft workflow node
"""
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument('inputs', type=dict, required=True, nullable=False, location='json')
args = parser.parse_args()
@@ -354,10 +286,6 @@ class PublishedWorkflowApi(Resource):
"""
Get published workflow
"""
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
# fetch published workflow by app_model
workflow_service = WorkflowService()
workflow = workflow_service.get_published_workflow(app_model=app_model)
@@ -373,10 +301,6 @@ class PublishedWorkflowApi(Resource):
"""
Publish workflow
"""
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
workflow_service = WorkflowService()
workflow = workflow_service.publish_workflow(app_model=app_model, account=current_user)
@@ -395,10 +319,6 @@ class DefaultBlockConfigsApi(Resource):
"""
Get default block config
"""
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
# Get default block configs
workflow_service = WorkflowService()
return workflow_service.get_default_block_configs()
@@ -413,10 +333,6 @@ class DefaultBlockConfigApi(Resource):
"""
Get default block config
"""
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument('q', type=str, location='args')
args = parser.parse_args()
@@ -447,10 +363,6 @@ class ConvertToWorkflowApi(Resource):
Convert expert mode of chatbot app to workflow mode
Convert Completion App to Workflow App
"""
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
if request.data:
parser = reqparse.RequestParser()
parser.add_argument('name', type=str, required=False, nullable=True, location='json')
@@ -475,7 +387,6 @@ class ConvertToWorkflowApi(Resource):
api.add_resource(DraftWorkflowApi, '/apps/<uuid:app_id>/workflows/draft')
api.add_resource(DraftWorkflowImportApi, '/apps/<uuid:app_id>/workflows/draft/import')
api.add_resource(AdvancedChatDraftWorkflowRunApi, '/apps/<uuid:app_id>/advanced-chat/workflows/draft/run')
api.add_resource(DraftWorkflowRunApi, '/apps/<uuid:app_id>/workflows/draft/run')
api.add_resource(WorkflowTaskStopApi, '/apps/<uuid:app_id>/workflow-runs/tasks/<string:task_id>/stop')

View File

@@ -1,73 +0,0 @@
from flask_login import current_user
from flask_restful import Resource, reqparse
from werkzeug.exceptions import Forbidden
from controllers.console import api
from controllers.console.auth.error import ApiKeyAuthFailedError
from libs.login import login_required
from services.auth.api_key_auth_service import ApiKeyAuthService
from ..setup import setup_required
from ..wraps import account_initialization_required
class ApiKeyAuthDataSource(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self):
data_source_api_key_bindings = ApiKeyAuthService.get_provider_auth_list(current_user.current_tenant_id)
if data_source_api_key_bindings:
return {
'sources': [{
'id': data_source_api_key_binding.id,
'category': data_source_api_key_binding.category,
'provider': data_source_api_key_binding.provider,
'disabled': data_source_api_key_binding.disabled,
'created_at': int(data_source_api_key_binding.created_at.timestamp()),
'updated_at': int(data_source_api_key_binding.updated_at.timestamp()),
}
for data_source_api_key_binding in
data_source_api_key_bindings]
}
return {'sources': []}
class ApiKeyAuthDataSourceBinding(Resource):
@setup_required
@login_required
@account_initialization_required
def post(self):
# The role of the current user in the table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument('category', type=str, required=True, nullable=False, location='json')
parser.add_argument('provider', type=str, required=True, nullable=False, location='json')
parser.add_argument('credentials', type=dict, required=True, nullable=False, location='json')
args = parser.parse_args()
ApiKeyAuthService.validate_api_key_auth_args(args)
try:
ApiKeyAuthService.create_provider_auth(current_user.current_tenant_id, args)
except Exception as e:
raise ApiKeyAuthFailedError(str(e))
return {'result': 'success'}, 200
class ApiKeyAuthDataSourceBindingDelete(Resource):
@setup_required
@login_required
@account_initialization_required
def delete(self, binding_id):
# The role of the current user in the table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
ApiKeyAuthService.delete_provider_auth(current_user.current_tenant_id, binding_id)
return {'result': 'success'}, 200
api.add_resource(ApiKeyAuthDataSource, '/api-key-auth/data-source')
api.add_resource(ApiKeyAuthDataSourceBinding, '/api-key-auth/data-source/binding')
api.add_resource(ApiKeyAuthDataSourceBindingDelete, '/api-key-auth/data-source/<uuid:binding_id>')

View File

@@ -6,7 +6,6 @@ from flask_login import current_user
from flask_restful import Resource
from werkzeug.exceptions import Forbidden
from configs import dify_config
from controllers.console import api
from libs.login import login_required
from libs.oauth_data_source import NotionOAuth
@@ -17,9 +16,11 @@ from ..wraps import account_initialization_required
def get_oauth_providers():
with current_app.app_context():
notion_oauth = NotionOAuth(client_id=dify_config.NOTION_CLIENT_ID,
client_secret=dify_config.NOTION_CLIENT_SECRET,
redirect_uri=dify_config.CONSOLE_API_URL + '/console/api/oauth/data-source/callback/notion')
notion_oauth = NotionOAuth(client_id=current_app.config.get('NOTION_CLIENT_ID'),
client_secret=current_app.config.get(
'NOTION_CLIENT_SECRET'),
redirect_uri=current_app.config.get(
'CONSOLE_API_URL') + '/console/api/oauth/data-source/callback/notion')
OAUTH_PROVIDERS = {
'notion': notion_oauth
@@ -38,10 +39,8 @@ class OAuthDataSource(Resource):
print(vars(oauth_provider))
if not oauth_provider:
return {'error': 'Invalid provider'}, 400
if dify_config.NOTION_INTEGRATION_TYPE == 'internal':
internal_secret = dify_config.NOTION_INTERNAL_SECRET
if not internal_secret:
return {'error': 'Internal secret is not set'},
if current_app.config.get('NOTION_INTEGRATION_TYPE') == 'internal':
internal_secret = current_app.config.get('NOTION_INTERNAL_SECRET')
oauth_provider.save_internal_access_token(internal_secret)
return { 'data': '' }
else:
@@ -61,13 +60,13 @@ class OAuthDataSourceCallback(Resource):
if 'code' in request.args:
code = request.args.get('code')
return redirect(f'{dify_config.CONSOLE_WEB_URL}?type=notion&code={code}')
return redirect(f'{current_app.config.get("CONSOLE_WEB_URL")}?type=notion&code={code}')
elif 'error' in request.args:
error = request.args.get('error')
return redirect(f'{dify_config.CONSOLE_WEB_URL}?type=notion&error={error}')
return redirect(f'{current_app.config.get("CONSOLE_WEB_URL")}?type=notion&error={error}')
else:
return redirect(f'{dify_config.CONSOLE_WEB_URL}?type=notion&error=Access denied')
return redirect(f'{current_app.config.get("CONSOLE_WEB_URL")}?type=notion&error=Access denied')
class OAuthDataSourceBinding(Resource):

View File

@@ -1,32 +0,0 @@
from libs.exception import BaseHTTPException
class ApiKeyAuthFailedError(BaseHTTPException):
error_code = 'auth_failed'
description = "{message}"
code = 500
class InvalidEmailError(BaseHTTPException):
error_code = 'invalid_email'
description = "The email address is not valid."
code = 400
class PasswordMismatchError(BaseHTTPException):
error_code = 'password_mismatch'
description = "The passwords do not match."
code = 400
class InvalidTokenError(BaseHTTPException):
error_code = 'invalid_or_expired_token'
description = "The token is invalid or has expired."
code = 400
class PasswordResetRateLimitExceededError(BaseHTTPException):
error_code = 'password_reset_rate_limit_exceeded'
description = "Password reset rate limit exceeded. Try again later."
code = 429

View File

@@ -1,107 +0,0 @@
import base64
import logging
import secrets
from flask_restful import Resource, reqparse
from controllers.console import api
from controllers.console.auth.error import (
InvalidEmailError,
InvalidTokenError,
PasswordMismatchError,
PasswordResetRateLimitExceededError,
)
from controllers.console.setup import setup_required
from extensions.ext_database import db
from libs.helper import email as email_validate
from libs.password import hash_password, valid_password
from models.account import Account
from services.account_service import AccountService
from services.errors.account import RateLimitExceededError
class ForgotPasswordSendEmailApi(Resource):
@setup_required
def post(self):
parser = reqparse.RequestParser()
parser.add_argument('email', type=str, required=True, location='json')
args = parser.parse_args()
email = args['email']
if not email_validate(email):
raise InvalidEmailError()
account = Account.query.filter_by(email=email).first()
if account:
try:
AccountService.send_reset_password_email(account=account)
except RateLimitExceededError:
logging.warning(f"Rate limit exceeded for email: {account.email}")
raise PasswordResetRateLimitExceededError()
else:
# Return success to avoid revealing email registration status
logging.warning(f"Attempt to reset password for unregistered email: {email}")
return {"result": "success"}
class ForgotPasswordCheckApi(Resource):
@setup_required
def post(self):
parser = reqparse.RequestParser()
parser.add_argument('token', type=str, required=True, nullable=False, location='json')
args = parser.parse_args()
token = args['token']
reset_data = AccountService.get_reset_password_data(token)
if reset_data is None:
return {'is_valid': False, 'email': None}
return {'is_valid': True, 'email': reset_data.get('email')}
class ForgotPasswordResetApi(Resource):
@setup_required
def post(self):
parser = reqparse.RequestParser()
parser.add_argument('token', type=str, required=True, nullable=False, location='json')
parser.add_argument('new_password', type=valid_password, required=True, nullable=False, location='json')
parser.add_argument('password_confirm', type=valid_password, required=True, nullable=False, location='json')
args = parser.parse_args()
new_password = args['new_password']
password_confirm = args['password_confirm']
if str(new_password).strip() != str(password_confirm).strip():
raise PasswordMismatchError()
token = args['token']
reset_data = AccountService.get_reset_password_data(token)
if reset_data is None:
raise InvalidTokenError()
AccountService.revoke_reset_password_token(token)
salt = secrets.token_bytes(16)
base64_salt = base64.b64encode(salt).decode()
password_hashed = hash_password(new_password, salt)
base64_password_hashed = base64.b64encode(password_hashed).decode()
account = Account.query.filter_by(email=reset_data.get('email')).first()
account.password = base64_password_hashed
account.password_salt = base64_salt
db.session.commit()
return {'result': 'success'}
api.add_resource(ForgotPasswordSendEmailApi, '/forgot-password')
api.add_resource(ForgotPasswordCheckApi, '/forgot-password/validity')
api.add_resource(ForgotPasswordResetApi, '/forgot-password/resets')

View File

@@ -1,15 +1,12 @@
from typing import cast
import flask_login
from flask import request
from flask import current_app, request
from flask_restful import Resource, reqparse
import services
from controllers.console import api
from controllers.console.setup import setup_required
from libs.helper import email, get_remote_ip
from libs.helper import email
from libs.password import valid_password
from models.account import Account
from services.account_service import AccountService, TenantService
@@ -37,7 +34,10 @@ class LoginApi(Resource):
if len(tenants) == 0:
return {'result': 'fail', 'data': 'workspace not found, please contact system admin to invite you to join in a workspace'}
token = AccountService.login(account, ip_address=get_remote_ip(request))
AccountService.update_last_login(account, request)
# todo: return the user info
token = AccountService.get_account_jwt_token(account)
return {'result': 'success', 'data': token}
@@ -46,9 +46,6 @@ class LogoutApi(Resource):
@setup_required
def get(self):
account = cast(Account, flask_login.current_user)
token = request.headers.get('Authorization', '').split(' ')[1]
AccountService.logout(account=account, token=token)
flask_login.logout_user()
return {'result': 'success'}
@@ -56,14 +53,14 @@ class LogoutApi(Resource):
class ResetPasswordApi(Resource):
@setup_required
def get(self):
# parser = reqparse.RequestParser()
# parser.add_argument('email', type=email, required=True, location='json')
# args = parser.parse_args()
parser = reqparse.RequestParser()
parser.add_argument('email', type=email, required=True, location='json')
args = parser.parse_args()
# import mailchimp_transactional as MailchimpTransactional
# from mailchimp_transactional.api_client import ApiClientError
# account = {'email': args['email']}
account = {'email': args['email']}
# account = AccountService.get_by_email(args['email'])
# if account is None:
# raise ValueError('Email not found')
@@ -71,28 +68,28 @@ class ResetPasswordApi(Resource):
# AccountService.update_password(account, new_password)
# todo: Send email
# MAILCHIMP_API_KEY = dify_config.MAILCHIMP_TRANSACTIONAL_API_KEY
MAILCHIMP_API_KEY = current_app.config['MAILCHIMP_TRANSACTIONAL_API_KEY']
# mailchimp = MailchimpTransactional(MAILCHIMP_API_KEY)
# message = {
# 'from_email': 'noreply@example.com',
# 'to': [{'email': account['email']}],
# 'subject': 'Reset your Dify password',
# 'html': """
# <p>Dear User,</p>
# <p>The Dify team has generated a new password for you, details as follows:</p>
# <p><strong>{new_password}</strong></p>
# <p>Please change your password to log in as soon as possible.</p>
# <p>Regards,</p>
# <p>The Dify Team</p>
# """
# }
message = {
'from_email': 'noreply@example.com',
'to': [{'email': account.email}],
'subject': 'Reset your Dify password',
'html': """
<p>Dear User,</p>
<p>The Dify team has generated a new password for you, details as follows:</p>
<p><strong>{new_password}</strong></p>
<p>Please change your password to log in as soon as possible.</p>
<p>Regards,</p>
<p>The Dify Team</p>
"""
}
# response = mailchimp.messages.send({
# 'message': message,
# # required for transactional email
# ' settings': {
# 'sandbox_mode': dify_config.MAILCHIMP_SANDBOX_MODE,
# 'sandbox_mode': current_app.config['MAILCHIMP_SANDBOX_MODE'],
# },
# })

View File

@@ -6,10 +6,8 @@ import requests
from flask import current_app, redirect, request
from flask_restful import Resource
from configs import dify_config
from constants.languages import languages
from extensions.ext_database import db
from libs.helper import get_remote_ip
from libs.oauth import GitHubOAuth, GoogleOAuth, OAuthUserInfo
from models.account import Account, AccountStatus
from services.account_service import AccountService, RegisterService, TenantService
@@ -19,24 +17,22 @@ from .. import api
def get_oauth_providers():
with current_app.app_context():
if not dify_config.GITHUB_CLIENT_ID or not dify_config.GITHUB_CLIENT_SECRET:
github_oauth = None
else:
github_oauth = GitHubOAuth(
client_id=dify_config.GITHUB_CLIENT_ID,
client_secret=dify_config.GITHUB_CLIENT_SECRET,
redirect_uri=dify_config.CONSOLE_API_URL + '/console/api/oauth/authorize/github',
)
if not dify_config.GOOGLE_CLIENT_ID or not dify_config.GOOGLE_CLIENT_SECRET:
google_oauth = None
else:
google_oauth = GoogleOAuth(
client_id=dify_config.GOOGLE_CLIENT_ID,
client_secret=dify_config.GOOGLE_CLIENT_SECRET,
redirect_uri=dify_config.CONSOLE_API_URL + '/console/api/oauth/authorize/google',
)
github_oauth = GitHubOAuth(client_id=current_app.config.get('GITHUB_CLIENT_ID'),
client_secret=current_app.config.get(
'GITHUB_CLIENT_SECRET'),
redirect_uri=current_app.config.get(
'CONSOLE_API_URL') + '/console/api/oauth/authorize/github')
OAUTH_PROVIDERS = {'github': github_oauth, 'google': google_oauth}
google_oauth = GoogleOAuth(client_id=current_app.config.get('GOOGLE_CLIENT_ID'),
client_secret=current_app.config.get(
'GOOGLE_CLIENT_SECRET'),
redirect_uri=current_app.config.get(
'CONSOLE_API_URL') + '/console/api/oauth/authorize/google')
OAUTH_PROVIDERS = {
'github': github_oauth,
'google': google_oauth
}
return OAUTH_PROVIDERS
@@ -66,7 +62,8 @@ class OAuthCallback(Resource):
token = oauth_provider.get_access_token(code)
user_info = oauth_provider.get_user_info(token)
except requests.exceptions.HTTPError as e:
logging.exception(f'An error occurred during the OAuth process with {provider}: {e.response.text}')
logging.exception(
f"An error occurred during the OAuth process with {provider}: {e.response.text}")
return {'error': 'OAuth process failed'}, 400
account = _generate_account(provider, user_info)
@@ -81,9 +78,11 @@ class OAuthCallback(Resource):
TenantService.create_owner_tenant_if_not_exist(account)
token = AccountService.login(account, ip_address=get_remote_ip(request))
AccountService.update_last_login(account, request)
return redirect(f'{dify_config.CONSOLE_WEB_URL}?console_token={token}')
token = AccountService.get_account_jwt_token(account)
return redirect(f'{current_app.config.get("CONSOLE_WEB_URL")}?console_token={token}')
def _get_account_by_openid_or_email(provider: str, user_info: OAuthUserInfo) -> Optional[Account]:
@@ -103,7 +102,11 @@ def _generate_account(provider: str, user_info: OAuthUserInfo):
# Create account
account_name = user_info.name if user_info.name else 'Dify'
account = RegisterService.register(
email=user_info.email, name=account_name, password=None, open_id=user_info.id, provider=provider
email=user_info.email,
name=account_name,
password=None,
open_id=user_info.id,
provider=provider
)
# Set interface language

View File

@@ -16,7 +16,7 @@ from extensions.ext_database import db
from fields.data_source_fields import integrate_list_fields, integrate_notion_info_list_fields
from libs.login import login_required
from models.dataset import Document
from models.source import DataSourceOauthBinding
from models.source import DataSourceBinding
from services.dataset_service import DatasetService, DocumentService
from tasks.document_indexing_sync_task import document_indexing_sync_task
@@ -29,9 +29,9 @@ class DataSourceApi(Resource):
@marshal_with(integrate_list_fields)
def get(self):
# get workspace data source integrates
data_source_integrates = db.session.query(DataSourceOauthBinding).filter(
DataSourceOauthBinding.tenant_id == current_user.current_tenant_id,
DataSourceOauthBinding.disabled == False
data_source_integrates = db.session.query(DataSourceBinding).filter(
DataSourceBinding.tenant_id == current_user.current_tenant_id,
DataSourceBinding.disabled == False
).all()
base_url = request.url_root.rstrip('/')
@@ -71,7 +71,7 @@ class DataSourceApi(Resource):
def patch(self, binding_id, action):
binding_id = str(binding_id)
action = str(action)
data_source_binding = DataSourceOauthBinding.query.filter_by(
data_source_binding = DataSourceBinding.query.filter_by(
id=binding_id
).first()
if data_source_binding is None:
@@ -124,7 +124,7 @@ class DataSourceNotionListApi(Resource):
data_source_info = json.loads(document.data_source_info)
exist_page_ids.append(data_source_info['notion_page_id'])
# get all authorized pages
data_source_bindings = DataSourceOauthBinding.query.filter_by(
data_source_bindings = DataSourceBinding.query.filter_by(
tenant_id=current_user.current_tenant_id,
provider='notion',
disabled=False
@@ -163,12 +163,12 @@ class DataSourceNotionApi(Resource):
def get(self, workspace_id, page_id, page_type):
workspace_id = str(workspace_id)
page_id = str(page_id)
data_source_binding = DataSourceOauthBinding.query.filter(
data_source_binding = DataSourceBinding.query.filter(
db.and_(
DataSourceOauthBinding.tenant_id == current_user.current_tenant_id,
DataSourceOauthBinding.provider == 'notion',
DataSourceOauthBinding.disabled == False,
DataSourceOauthBinding.source_info['workspace_id'] == f'"{workspace_id}"'
DataSourceBinding.tenant_id == current_user.current_tenant_id,
DataSourceBinding.provider == 'notion',
DataSourceBinding.disabled == False,
DataSourceBinding.source_info['workspace_id'] == f'"{workspace_id}"'
)
).first()
if not data_source_binding:

View File

@@ -1,15 +1,14 @@
import flask_restful
from flask import request
from flask import current_app, request
from flask_login import current_user
from flask_restful import Resource, marshal, marshal_with, reqparse
from werkzeug.exceptions import Forbidden, NotFound
import services
from configs import dify_config
from controllers.console import api
from controllers.console.apikey import api_key_fields, api_key_list
from controllers.console.app.error import ProviderNotInitializeError
from controllers.console.datasets.error import DatasetInUseError, DatasetNameDuplicateError, IndexingEstimateError
from controllers.console.datasets.error import DatasetNameDuplicateError
from controllers.console.setup import setup_required
from controllers.console.wraps import account_initialization_required
from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError
@@ -18,7 +17,6 @@ from core.model_runtime.entities.model_entities import ModelType
from core.provider_manager import ProviderManager
from core.rag.datasource.vdb.vector_type import VectorType
from core.rag.extractor.entity.extract_setting import ExtractSetting
from core.rag.retrieval.retrival_methods import RetrievalMethod
from extensions.ext_database import db
from fields.app_fields import related_app_list
from fields.dataset_fields import dataset_detail_fields, dataset_query_detail_fields
@@ -26,7 +24,7 @@ from fields.document_fields import document_status_fields
from libs.login import login_required
from models.dataset import Dataset, Document, DocumentSegment
from models.model import ApiToken, UploadFile
from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService
from services.dataset_service import DatasetService, DocumentService
def _validate_name(name):
@@ -86,12 +84,6 @@ class DatasetListApi(Resource):
else:
item['embedding_available'] = True
if item.get('permission') == 'partial_members':
part_users_list = DatasetPermissionService.get_dataset_partial_member_list(item['id'])
item.update({'partial_member_list': part_users_list})
else:
item.update({'partial_member_list': []})
response = {
'data': data,
'has_more': len(datasets) == limit,
@@ -115,8 +107,8 @@ class DatasetListApi(Resource):
help='Invalid indexing technique.')
args = parser.parse_args()
# The role of the current user in the ta table must be admin, owner, or editor, or dataset_operator
if not current_user.is_dataset_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
try:
@@ -147,10 +139,6 @@ class DatasetApi(Resource):
except services.errors.account.NoPermissionError as e:
raise Forbidden(str(e))
data = marshal(dataset, dataset_detail_fields)
if data.get('permission') == 'partial_members':
part_users_list = DatasetPermissionService.get_dataset_partial_member_list(dataset_id_str)
data.update({'partial_member_list': part_users_list})
# check embedding setting
provider_manager = ProviderManager()
configurations = provider_manager.get_configurations(
@@ -174,11 +162,6 @@ class DatasetApi(Resource):
data['embedding_available'] = False
else:
data['embedding_available'] = True
if data.get('permission') == 'partial_members':
part_users_list = DatasetPermissionService.get_dataset_partial_member_list(dataset_id_str)
data.update({'partial_member_list': part_users_list})
return data, 200
@setup_required
@@ -189,6 +172,8 @@ class DatasetApi(Resource):
dataset = DatasetService.get_dataset(dataset_id_str)
if dataset is None:
raise NotFound("Dataset not found.")
# check user's model setting
DatasetService.check_dataset_model_setting(dataset)
parser = reqparse.RequestParser()
parser.add_argument('name', nullable=False,
@@ -202,28 +187,17 @@ class DatasetApi(Resource):
nullable=True,
help='Invalid indexing technique.')
parser.add_argument('permission', type=str, location='json', choices=(
'only_me', 'all_team_members', 'partial_members'), help='Invalid permission.'
)
'only_me', 'all_team_members'), help='Invalid permission.')
parser.add_argument('embedding_model', type=str,
location='json', help='Invalid embedding model.')
parser.add_argument('embedding_model_provider', type=str,
location='json', help='Invalid embedding model provider.')
parser.add_argument('retrieval_model', type=dict, location='json', help='Invalid retrieval model.')
parser.add_argument('partial_member_list', type=list, location='json', help='Invalid parent user list.')
args = parser.parse_args()
data = request.get_json()
# check embedding model setting
if data.get('indexing_technique') == 'high_quality':
DatasetService.check_embedding_model_setting(dataset.tenant_id,
data.get('embedding_model_provider'),
data.get('embedding_model')
)
# The role of the current user in the ta table must be admin, owner, editor, or dataset_operator
DatasetPermissionService.check_permission(
current_user, dataset, data.get('permission'), data.get('partial_member_list')
)
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
dataset = DatasetService.update_dataset(
dataset_id_str, args, current_user)
@@ -231,21 +205,7 @@ class DatasetApi(Resource):
if dataset is None:
raise NotFound("Dataset not found.")
result_data = marshal(dataset, dataset_detail_fields)
tenant_id = current_user.current_tenant_id
if data.get('partial_member_list') and data.get('permission') == 'partial_members':
DatasetPermissionService.update_partial_member_list(
tenant_id, dataset_id_str, data.get('partial_member_list')
)
# clear partial member list when permission is only_me or all_team_members
elif data.get('permission') == 'only_me' or data.get('permission') == 'all_team_members':
DatasetPermissionService.clear_partial_member_list(dataset_id_str)
partial_member_list = DatasetPermissionService.get_dataset_partial_member_list(dataset_id_str)
result_data.update({'partial_member_list': partial_member_list})
return result_data, 200
return marshal(dataset, dataset_detail_fields), 200
@setup_required
@login_required
@@ -253,28 +213,15 @@ class DatasetApi(Resource):
def delete(self, dataset_id):
dataset_id_str = str(dataset_id)
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor or current_user.is_dataset_operator:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
try:
if DatasetService.delete_dataset(dataset_id_str, current_user):
DatasetPermissionService.clear_partial_member_list(dataset_id_str)
return {'result': 'success'}, 204
else:
raise NotFound("Dataset not found.")
except services.errors.dataset.DatasetInUseError:
raise DatasetInUseError()
if DatasetService.delete_dataset(dataset_id_str, current_user):
return {'result': 'success'}, 204
else:
raise NotFound("Dataset not found.")
class DatasetUseCheckApi(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self, dataset_id):
dataset_id_str = str(dataset_id)
dataset_is_using = DatasetService.dataset_use_check(dataset_id_str)
return {'is_using': dataset_is_using}, 200
class DatasetQueryApi(Resource):
@@ -365,22 +312,6 @@ class DatasetIndexingEstimateApi(Resource):
document_model=args['doc_form']
)
extract_settings.append(extract_setting)
elif args['info_list']['data_source_type'] == 'website_crawl':
website_info_list = args['info_list']['website_info_list']
for url in website_info_list['urls']:
extract_setting = ExtractSetting(
datasource_type="website_crawl",
website_info={
"provider": website_info_list['provider'],
"job_id": website_info_list['job_id'],
"url": url,
"tenant_id": current_user.current_tenant_id,
"mode": 'crawl',
"only_main_content": website_info_list['only_main_content']
},
document_model=args['doc_form']
)
extract_settings.append(extract_setting)
else:
raise ValueError('Data source type not support')
indexing_runner = IndexingRunner()
@@ -395,8 +326,6 @@ class DatasetIndexingEstimateApi(Resource):
"in the Settings -> Model Provider.")
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)
except Exception as e:
raise IndexingEstimateError(str(e))
return response, 200
@@ -537,7 +466,7 @@ class DatasetApiBaseUrlApi(Resource):
@account_initialization_required
def get(self):
return {
'api_base_url': (dify_config.SERVICE_API_URL if dify_config.SERVICE_API_URL
'api_base_url': (current_app.config['SERVICE_API_URL'] if current_app.config['SERVICE_API_URL']
else request.host_url.rstrip('/')) + '/v1'
}
@@ -547,20 +476,19 @@ class DatasetRetrievalSettingApi(Resource):
@login_required
@account_initialization_required
def get(self):
vector_type = dify_config.VECTOR_STORE
vector_type = current_app.config['VECTOR_STORE']
match vector_type:
case VectorType.MILVUS | VectorType.RELYT | VectorType.PGVECTOR | VectorType.TIDB_VECTOR | VectorType.CHROMA | VectorType.TENCENT:
case VectorType.MILVUS | VectorType.RELYT | VectorType.PGVECTOR | VectorType.TIDB_VECTOR | VectorType.CHROMA:
return {
'retrieval_method': [
RetrievalMethod.SEMANTIC_SEARCH.value
'semantic_search'
]
}
case VectorType.QDRANT | VectorType.WEAVIATE | VectorType.OPENSEARCH | VectorType.ANALYTICDB | VectorType.MYSCALE | VectorType.ORACLE:
case VectorType.QDRANT | VectorType.WEAVIATE:
return {
'retrieval_method': [
RetrievalMethod.SEMANTIC_SEARCH.value,
RetrievalMethod.FULL_TEXT_SEARCH.value,
RetrievalMethod.HYBRID_SEARCH.value,
'semantic_search', 'full_text_search', 'hybrid_search'
]
}
case _:
@@ -573,25 +501,22 @@ class DatasetRetrievalSettingMockApi(Resource):
@account_initialization_required
def get(self, vector_type):
match vector_type:
case VectorType.MILVUS | VectorType.RELYT | VectorType.PGVECTOR | VectorType.TIDB_VECTOR | VectorType.CHROMA | VectorType.TENCENT:
case VectorType.MILVUS | VectorType.RELYT | VectorType.PGVECTOR | VectorType.TIDB_VECTOR | VectorType.CHROMA:
return {
'retrieval_method': [
RetrievalMethod.SEMANTIC_SEARCH.value
'semantic_search'
]
}
case VectorType.QDRANT | VectorType.WEAVIATE | VectorType.OPENSEARCH| VectorType.ANALYTICDB | VectorType.MYSCALE | VectorType.ORACLE:
case VectorType.QDRANT | VectorType.WEAVIATE:
return {
'retrieval_method': [
RetrievalMethod.SEMANTIC_SEARCH.value,
RetrievalMethod.FULL_TEXT_SEARCH.value,
RetrievalMethod.HYBRID_SEARCH.value,
'semantic_search', 'full_text_search', 'hybrid_search'
]
}
case _:
raise ValueError(f"Unsupported vector db type {vector_type}.")
class DatasetErrorDocs(Resource):
@setup_required
@login_required
@@ -609,30 +534,8 @@ class DatasetErrorDocs(Resource):
}, 200
class DatasetPermissionUserListApi(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self, dataset_id):
dataset_id_str = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id_str)
if dataset is None:
raise NotFound("Dataset not found.")
try:
DatasetService.check_dataset_permission(dataset, current_user)
except services.errors.account.NoPermissionError as e:
raise Forbidden(str(e))
partial_members_list = DatasetPermissionService.get_dataset_partial_member_list(dataset_id_str)
return {
'data': partial_members_list,
}, 200
api.add_resource(DatasetListApi, '/datasets')
api.add_resource(DatasetApi, '/datasets/<uuid:dataset_id>')
api.add_resource(DatasetUseCheckApi, '/datasets/<uuid:dataset_id>/use-check')
api.add_resource(DatasetQueryApi, '/datasets/<uuid:dataset_id>/queries')
api.add_resource(DatasetErrorDocs, '/datasets/<uuid:dataset_id>/error-docs')
api.add_resource(DatasetIndexingEstimateApi, '/datasets/indexing-estimate')
@@ -643,4 +546,3 @@ api.add_resource(DatasetApiDeleteApi, '/datasets/api-keys/<uuid:api_key_id>')
api.add_resource(DatasetApiBaseUrlApi, '/datasets/api-base-info')
api.add_resource(DatasetRetrievalSettingApi, '/datasets/retrieval-setting')
api.add_resource(DatasetRetrievalSettingMockApi, '/datasets/retrieval-setting/<string:vector_type>')
api.add_resource(DatasetPermissionUserListApi, '/datasets/<uuid:dataset_id>/permission-part-users')

View File

@@ -20,7 +20,6 @@ from controllers.console.datasets.error import (
ArchivedDocumentImmutableError,
DocumentAlreadyFinishedError,
DocumentIndexingError,
IndexingEstimateError,
InvalidActionError,
InvalidMetadataError,
)
@@ -227,8 +226,8 @@ class DatasetDocumentListApi(Resource):
if not dataset:
raise NotFound('Dataset not found.')
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_dataset_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
try:
@@ -279,8 +278,8 @@ class DatasetInitApi(Resource):
@marshal_with(dataset_and_document_fields)
@cloud_edition_billing_resource_check('vector_space')
def post(self):
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
parser = reqparse.RequestParser()
@@ -294,11 +293,6 @@ class DatasetInitApi(Resource):
parser.add_argument('retrieval_model', type=dict, required=False, nullable=False,
location='json')
args = parser.parse_args()
# The role of the current user in the ta table must be admin, owner, or editor, or dataset_operator
if not current_user.is_dataset_editor:
raise Forbidden()
if args['indexing_technique'] == 'high_quality':
try:
model_manager = ModelManager()
@@ -394,8 +388,6 @@ class DocumentIndexingEstimateApi(DocumentResource):
"in the Settings -> Model Provider.")
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)
except Exception as e:
raise IndexingEstimateError(str(e))
return response
@@ -473,20 +465,6 @@ class DocumentBatchIndexingEstimateApi(DocumentResource):
document_model=document.doc_form
)
extract_settings.append(extract_setting)
elif document.data_source_type == 'website_crawl':
extract_setting = ExtractSetting(
datasource_type="website_crawl",
website_info={
"provider": data_source_info['provider'],
"job_id": data_source_info['job_id'],
"url": data_source_info['url'],
"tenant_id": current_user.current_tenant_id,
"mode": data_source_info['mode'],
"only_main_content": data_source_info['only_main_content']
},
document_model=document.doc_form
)
extract_settings.append(extract_setting)
else:
raise ValueError('Data source type not support')
@@ -501,8 +479,6 @@ class DocumentBatchIndexingEstimateApi(DocumentResource):
"in the Settings -> Model Provider.")
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)
except Exception as e:
raise IndexingEstimateError(str(e))
return response
@@ -656,8 +632,8 @@ class DocumentProcessingApi(DocumentResource):
document_id = str(document_id)
document = self.get_document(dataset_id, document_id)
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
if action == "pause":
@@ -720,8 +696,8 @@ class DocumentMetadataApi(DocumentResource):
doc_type = req_data.get('doc_type')
doc_metadata = req_data.get('doc_metadata')
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
if doc_type is None or doc_metadata is None:
@@ -762,19 +738,15 @@ class DocumentStatusApi(DocumentResource):
dataset = DatasetService.get_dataset(dataset_id)
if dataset is None:
raise NotFound("Dataset not found.")
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_dataset_editor:
raise Forbidden()
# check user's model setting
DatasetService.check_dataset_model_setting(dataset)
# check user's permission
DatasetService.check_dataset_permission(dataset, current_user)
document = self.get_document(dataset_id, document_id)
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
indexing_cache_key = 'document_{}_indexing'.format(document.id)
cache_result = redis_client.get(indexing_cache_key)
if cache_result is not None:
@@ -964,11 +936,10 @@ class DocumentRenameApi(DocumentResource):
@account_initialization_required
@marshal_with(document_fields)
def post(self, dataset_id, document_id):
# The role of the current user in the ta table must be admin, owner, editor, or dataset_operator
if not current_user.is_dataset_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
dataset = DatasetService.get_dataset(dataset_id)
DatasetService.check_dataset_operator_permission(current_user, dataset)
parser = reqparse.RequestParser()
parser.add_argument('name', type=str, required=True, nullable=False, location='json')
args = parser.parse_args()
@@ -981,33 +952,6 @@ class DocumentRenameApi(DocumentResource):
return document
class WebsiteDocumentSyncApi(DocumentResource):
@setup_required
@login_required
@account_initialization_required
def get(self, dataset_id, document_id):
"""sync website document."""
dataset_id = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id)
if not dataset:
raise NotFound('Dataset not found.')
document_id = str(document_id)
document = DocumentService.get_document(dataset.id, document_id)
if not document:
raise NotFound('Document not found.')
if document.tenant_id != current_user.current_tenant_id:
raise Forbidden('No permission.')
if document.data_source_type != 'website_crawl':
raise ValueError('Document is not a website document.')
# 403 if document is archived
if DocumentService.check_archived(document):
raise ArchivedDocumentImmutableError()
# sync document
DocumentService.sync_website_document(dataset_id, document)
return {'result': 'success'}, 200
api.add_resource(GetProcessRuleApi, '/datasets/process-rule')
api.add_resource(DatasetDocumentListApi,
'/datasets/<uuid:dataset_id>/documents')
@@ -1036,5 +980,3 @@ api.add_resource(DocumentRecoverApi, '/datasets/<uuid:dataset_id>/documents/<uui
api.add_resource(DocumentRetryApi, '/datasets/<uuid:dataset_id>/retry')
api.add_resource(DocumentRenameApi,
'/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/rename')
api.add_resource(WebsiteDocumentSyncApi, '/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/website-sync')

View File

@@ -75,7 +75,7 @@ class DatasetDocumentSegmentListApi(Resource):
)
if last_id is not None:
last_segment = db.session.get(DocumentSegment, str(last_id))
last_segment = DocumentSegment.query.get(str(last_id))
if last_segment:
query = query.filter(
DocumentSegment.position > last_segment.position)
@@ -126,8 +126,8 @@ class DatasetDocumentSegmentApi(Resource):
raise NotFound('Dataset not found.')
# check user's model setting
DatasetService.check_dataset_model_setting(dataset)
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
try:
@@ -223,7 +223,8 @@ class DatasetDocumentSegmentAddApi(Resource):
document = DocumentService.get_document(dataset_id, document_id)
if not document:
raise NotFound('Document not found.')
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
# check embedding model setting
if dataset.indexing_technique == 'high_quality':
@@ -301,8 +302,8 @@ class DatasetDocumentSegmentUpdateApi(Resource):
).first()
if not segment:
raise NotFound('Segment not found.')
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
try:
DatasetService.check_dataset_permission(dataset, current_user)
@@ -346,7 +347,7 @@ class DatasetDocumentSegmentUpdateApi(Resource):
if not segment:
raise NotFound('Segment not found.')
# The role of the current user in the ta table must be admin or owner
if not current_user.is_editor:
if not current_user.is_admin_or_owner:
raise Forbidden()
try:
DatasetService.check_dataset_permission(dataset, current_user)

Some files were not shown because too many files have changed in this diff Show More