Compare commits

..

42 Commits

Author SHA1 Message Date
jyong
6e6604d28c delete remove tsne position 2024-07-02 14:55:05 +08:00
jyong
e7b792f537 delete remove tsne position 2024-07-02 14:52:41 +08:00
Jimm Chen
b8999e367a Ensure *.sh are LF-style, so that they can be used directly by Docker for Windows (#5793) 2024-07-02 13:38:18 +08:00
Joe
59ad091e69 feat: add export permission (#5841) 2024-07-02 13:37:37 +08:00
Joe
598e030a7e feat: update LangfuseConfig host config (#5846) 2024-07-02 13:14:07 +08:00
fanghongtai
774a17cedf fix:unable to select workplace at the bottom (#5785)
Co-authored-by: wxfanghongtai <wxfanghongtai@gf.com.cn>
2024-07-02 13:10:50 +08:00
Joel
d889e1b233 fix: output variable name may be duplicate (#5845) 2024-07-02 13:02:59 +08:00
-LAN-
32d85fb896 chore: Update some type hints in config. (#5833) 2024-07-02 08:50:02 +08:00
Jyong
af308b99a3 sync delete app table record when delete app (#5819) 2024-07-02 08:48:29 +08:00
Nam Vu
49d9c60a53 chore: update i18n for #5811 (#5838) 2024-07-02 08:47:53 +08:00
Chenhe Gu
ed83df972f Chore/remove extra docker middleware variables (#5836)
Co-authored-by: dahuahua <38651850@qq.com>
2024-07-01 23:34:00 +08:00
Chenhe Gu
3124728e03 Fix/docker nginx https config (#5832)
Co-authored-by: dahuahua <38651850@qq.com>
2024-07-01 23:15:26 +08:00
掌柜的
af469ea5bd add provision scripts repo link for azure to readme (#5820) 2024-07-01 20:44:47 +08:00
quicksand
2a27568537 Enhance: tools wecom bot support markdown message (#5791) 2024-07-01 18:19:47 +08:00
hymvp
1d3e96ffa6 add support oracle oci object storage (#5616) 2024-07-01 17:21:44 +08:00
Joe
b7b1396c51 fix: ops trace slow db (#5812) 2024-07-01 17:09:53 +08:00
Jyong
71bcf75d9a Feat/add delete knowledge confirm (#5810) 2024-07-01 17:06:51 +08:00
Charles Zhou
850c2273ee feat: Nominatim OpenStreetMap search tool (#5789) 2024-07-01 16:34:32 +08:00
Joel
78d41a27cc feat: knowledge used by app can still be removed (#5811) 2024-07-01 16:14:49 +08:00
takatost
0f8625cac2 fix: ssrf proxy and nginx entrypoint command in docker-compose files (#5803) 2024-07-01 14:48:27 +08:00
Charles Zhou
cb09dbef66 feat: correctly delete applications using Celery workers (#5787) 2024-07-01 14:21:17 +08:00
zxhlyh
5692f9b33b fix: signin url (#5800) 2024-07-01 14:13:32 +08:00
longzhihun
fdfbbde10d [seanguo] modify bedrock Claude3 invoke method to converse API (#5768)
Co-authored-by: Chenhe Gu <guchenhe@gmail.com>
2024-07-01 04:36:13 +08:00
Chenhe Gu
a27462d58b Chore/improve docker compose (#5784) 2024-07-01 01:11:33 +08:00
takatost
91da622df5 chore: merge CODE_EXECUTION_API_KEY into SANDBOX_API_KEY in the docker-compose.yaml (#5779) 2024-06-30 21:39:48 +08:00
takatost
373b5047fd chore: fulfill default value in docker compose yaml (#5778) 2024-06-30 21:17:53 +08:00
takatost
36610b6acf fix: can’t change exec permissions after mounting docker-entrypoint.sh for nginx and ssrf-proxy services causing startup failures (#5776) 2024-06-30 20:18:53 +08:00
takatost
eab0ac3a13 chore: remove port expose in docker compose (#5754)
Co-authored-by: Chenhe Gu <guchenhe@gmail.com>
2024-06-30 10:31:31 +08:00
Joe
f637ae4794 fix: langsmith message_trace end_user_data session_id error (#5759) 2024-06-30 01:12:16 +08:00
Joe
ffb07eb24b fix: workflow trace none type error (#5758) 2024-06-29 23:32:52 +08:00
Yeuoly
f101fcd0e7 fix: missing process data in parameter extractor (#5755) 2024-06-29 23:29:43 +08:00
Chenhe Gu
fc0f75d13b Docs/add docker dotenv notes (#5750) 2024-06-29 22:09:59 +08:00
takatost
1e045a0187 fix: slow sql of ops tracing (#5749) 2024-06-29 20:28:30 +08:00
takatost
cdf64d4ee2 Update docker-compose.yaml (#5745) 2024-06-29 18:35:32 +08:00
takatost
8fd75e6965 bump to 0.6.12-fix1 (#5743) 2024-06-29 17:43:20 +08:00
takatost
0b8faade6f fix: env SMTP_PORT is empty caused err when launching (#5742) 2024-06-29 17:34:12 +08:00
takatost
d56cedfc67 fix: app config does not use empty string in the env (#5741) 2024-06-29 17:15:25 +08:00
takatost
906857b28a fix: couldn't log in or resetup after a failed setup (#5739) 2024-06-29 17:07:21 +08:00
Nam Vu
9513155fa4 chore: support both $$ and $ latex format (#5723) 2024-06-29 11:24:25 +08:00
Chenhe Gu
a6356be348 Rename README to README.md (#5727) 2024-06-29 00:53:14 +08:00
Chenhe Gu
f33ef92f0c Chore/set entrypoint scripts permissions (#5726) 2024-06-29 00:48:34 +08:00
Chenhe Gu
d435230059 add README for new docker/ directory (#5724) 2024-06-29 00:29:44 +08:00
88 changed files with 1701 additions and 724 deletions

View File

@@ -3,7 +3,7 @@
cd web && npm install
echo 'alias start-api="cd /workspaces/dify/api && flask run --host 0.0.0.0 --port=5001 --debug"' >> ~/.bashrc
echo 'alias start-worker="cd /workspaces/dify/api && celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace"' >> ~/.bashrc
echo 'alias start-worker="cd /workspaces/dify/api && celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion"' >> ~/.bashrc
echo 'alias start-web="cd /workspaces/dify/web && npm run dev"' >> ~/.bashrc
echo 'alias start-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify up -d"' >> ~/.bashrc

7
.gitattributes vendored Normal file
View File

@@ -0,0 +1,7 @@
# Ensure that .sh scripts use LF as line separator, even if they are checked out
# to Windows(NTFS) file-system, by a user of Docker for Window.
# These .sh scripts will be run from the Container after `docker compose up -d`.
# If they appear to be CRLF style, Dash from the Container will fail to execute
# them.
*.sh text eol=lf

View File

@@ -60,6 +60,9 @@ jobs:
cp docker/.env.example docker/.env
cp docker/middleware.env.example docker/middleware.env
- name: Expose Service Ports
run: sh .github/workflows/expose_service_ports.sh
- name: Set up Sandbox
uses: hoverkraft-tech/compose-action@v2.0.0
with:

View File

@@ -38,6 +38,11 @@ jobs:
- name: Install dependencies
run: poetry install -C api
- name: Prepare middleware env
run: |
cd docker
cp middleware.env.example middleware.env
- name: Set up Middlewares
uses: hoverkraft-tech/compose-action@v2.0.0
with:

10
.github/workflows/expose_service_ports.sh vendored Executable file
View File

@@ -0,0 +1,10 @@
#!/bin/bash
yq eval '.services.weaviate.ports += ["8080:8080"]' -i docker/docker-compose.yaml
yq eval '.services.qdrant.ports += ["6333:6333"]' -i docker/docker-compose.yaml
yq eval '.services.chroma.ports += ["8000:8000"]' -i docker/docker-compose.yaml
yq eval '.services["milvus-standalone"].ports += ["19530:19530"]' -i docker/docker-compose.yaml
yq eval '.services.pgvector.ports += ["5433:5432"]' -i docker/docker-compose.yaml
yq eval '.services["pgvecto-rs"].ports += ["5431:5432"]' -i docker/docker-compose.yaml
echo "Ports exposed for sandbox, weaviate, qdrant, chroma, milvus, pgvector, pgvecto-rs."

2
.vscode/launch.json vendored
View File

@@ -48,7 +48,7 @@
"--loglevel",
"info",
"-Q",
"dataset,generation,mail,ops_trace"
"dataset,generation,mail,ops_trace,app_deletion"
]
},
]

View File

@@ -192,6 +192,11 @@ If you'd like to configure a highly-available setup, there are community-contrib
- [Helm Chart by @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
- [YAML file by @Winson-030](https://github.com/Winson-030/dify-kubernetes)
#### Using Terraform for Deployment
##### Azure Global
Deploy Dify to Azure with a single click using [terraform](https://www.terraform.io/).
- [Azure Terraform by @nikawang](https://github.com/nikawang/dify-azure-terraform)
## Contributing

View File

@@ -175,6 +175,12 @@ docker compose up -d
- [رسم بياني Helm من قبل @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
- [ملف YAML من قبل @Winson-030](https://github.com/Winson-030/dify-kubernetes)
#### استخدام Terraform للتوزيع
##### Azure Global
استخدم [terraform](https://www.terraform.io/) لنشر Dify على Azure بنقرة واحدة.
- [Azure Terraform بواسطة @nikawang](https://github.com/nikawang/dify-azure-terraform)
## المساهمة

View File

@@ -197,6 +197,12 @@ docker compose up -d
- [Helm Chart by @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
- [YAML 文件 by @Winson-030](https://github.com/Winson-030/dify-kubernetes)
#### 使用 Terraform 部署
##### Azure Global
使用 [terraform](https://www.terraform.io/) 一键部署 Dify 到 Azure。
- [Azure Terraform by @nikawang](https://github.com/nikawang/dify-azure-terraform)
## Star History
[![Star History Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)

View File

@@ -199,6 +199,12 @@ Si desea configurar una configuración de alta disponibilidad, la comunidad prop
- [Gráfico Helm por @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
- [Ficheros YAML por @Winson-030](https://github.com/Winson-030/dify-kubernetes)
#### Uso de Terraform para el despliegue
##### Azure Global
Utiliza [terraform](https://www.terraform.io/) para desplegar Dify en Azure con un solo clic.
- [Azure Terraform por @nikawang](https://github.com/nikawang/dify-azure-terraform)
## Contribuir

View File

@@ -197,6 +197,12 @@ Si vous souhaitez configurer une configuration haute disponibilité, la communau
- [Helm Chart par @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
- [Fichier YAML par @Winson-030](https://github.com/Winson-030/dify-kubernetes)
#### Utilisation de Terraform pour le déploiement
##### Azure Global
Utilisez [terraform](https://www.terraform.io/) pour déployer Dify sur Azure en un clic.
- [Azure Terraform par @nikawang](https://github.com/nikawang/dify-azure-terraform)
## Contribuer

View File

@@ -196,6 +196,12 @@ docker compose up -d
- [Helm Chart by @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
- [YAML file by @Winson-030](https://github.com/Winson-030/dify-kubernetes)
#### Terraformを使用したデプロイ
##### Azure Global
[terraform](https://www.terraform.io/) を使用して、AzureにDifyをワンクリックでデプロイします。
- [nikawangのAzure Terraform](https://github.com/nikawang/dify-azure-terraform)
## 貢献

View File

@@ -197,6 +197,13 @@ If you'd like to configure a highly-available setup, there are community-contrib
- [Helm Chart by @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
- [YAML file by @Winson-030](https://github.com/Winson-030/dify-kubernetes)
#### Terraform atorlugu pilersitsineq
##### Azure Global
Atoruk [terraform](https://www.terraform.io/) Dify-mik Azure-mut ataatsikkut ikkussuilluarlugu.
- [Azure Terraform atorlugu @nikawang](https://github.com/nikawang/dify-azure-terraform)
## Contributing
For those who'd like to contribute code, see our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).

View File

@@ -190,6 +190,12 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했
- [Helm Chart by @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
- [YAML file by @Winson-030](https://github.com/Winson-030/dify-kubernetes)
#### Terraform을 사용한 배포
##### Azure Global
[terraform](https://www.terraform.io/)을 사용하여 Azure에 Dify를 원클릭으로 배포하세요.
- [nikawang의 Azure Terraform](https://github.com/nikawang/dify-azure-terraform)
## 기여
코드에 기여하고 싶은 분들은 [기여 가이드](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)를 참조하세요.

View File

@@ -72,6 +72,13 @@ TENCENT_COS_SECRET_ID=your-secret-id
TENCENT_COS_REGION=your-region
TENCENT_COS_SCHEME=your-scheme
# OCI Storage configuration
OCI_ENDPOINT=your-endpoint
OCI_BUCKET_NAME=your-bucket-name
OCI_ACCESS_KEY=your-access-key
OCI_SECRET_KEY=your-secret-key
OCI_REGION=your-region
# CORS configuration
WEB_API_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
CONSOLE_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*

View File

@@ -66,7 +66,7 @@
10. If you need to debug local async processing, please start the worker service.
```bash
poetry run python -m celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace
poetry run python -m celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion
```
The started celery app handles the async tasks, e.g. dataset importing and documents indexing.

View File

@@ -8,6 +8,9 @@ from configs.middleware import MiddlewareConfig
from configs.packaging import PackagingInfo
# TODO: Both `BaseModel` and `BaseSettings` has `model_config` attribute but they are in different types.
# This inheritance is depends on the order of the classes.
# It is better to use `BaseSettings` as the base class.
class DifyConfig(
# based on pydantic-settings
BaseSettings,
@@ -36,7 +39,6 @@ class DifyConfig(
# read from dotenv format config file
env_file='.env',
env_file_encoding='utf-8',
env_ignore_empty=True,
# ignore extra attributes
extra='ignore',

View File

@@ -136,7 +136,7 @@ class HttpConfig(BaseModel):
def CONSOLE_CORS_ALLOW_ORIGINS(self) -> list[str]:
return self.inner_CONSOLE_CORS_ALLOW_ORIGINS.split(',')
inner_WEB_API_CORS_ALLOW_ORIGINS: Optional[str] = Field(
inner_WEB_API_CORS_ALLOW_ORIGINS: str = Field(
description='',
validation_alias=AliasChoices('WEB_API_CORS_ALLOW_ORIGINS'),
default='*',

View File

@@ -7,6 +7,7 @@ from configs.middleware.storage.aliyun_oss_storage_config import AliyunOSSStorag
from configs.middleware.storage.amazon_s3_storage_config import S3StorageConfig
from configs.middleware.storage.azure_blob_storage_config import AzureBlobStorageConfig
from configs.middleware.storage.google_cloud_storage_config import GoogleCloudStorageConfig
from configs.middleware.storage.oci_storage_config import OCIStorageConfig
from configs.middleware.storage.tencent_cos_storage_config import TencentCloudCOSStorageConfig
from configs.middleware.vdb.chroma_config import ChromaConfig
from configs.middleware.vdb.milvus_config import MilvusConfig
@@ -143,7 +144,7 @@ class CeleryConfig(DatabaseConfig):
@computed_field
@property
def CELERY_RESULT_BACKEND(self) -> str:
def CELERY_RESULT_BACKEND(self) -> str | None:
return 'db+{}'.format(self.SQLALCHEMY_DATABASE_URI) \
if self.CELERY_BACKEND == 'database' else self.CELERY_BROKER_URL
@@ -167,6 +168,7 @@ class MiddlewareConfig(
GoogleCloudStorageConfig,
TencentCloudCOSStorageConfig,
S3StorageConfig,
OCIStorageConfig,
# configs of vdb and vdb providers
VectorStoreConfig,

View File

@@ -0,0 +1,35 @@
from typing import Optional
from pydantic import BaseModel, Field
class OCIStorageConfig(BaseModel):
"""
OCI storage configs
"""
OCI_ENDPOINT: Optional[str] = Field(
description='OCI storage endpoint',
default=None,
)
OCI_REGION: Optional[str] = Field(
description='OCI storage region',
default=None,
)
OCI_BUCKET_NAME: Optional[str] = Field(
description='OCI storage bucket name',
default=None,
)
OCI_ACCESS_KEY: Optional[str] = Field(
description='OCI storage access key',
default=None,
)
OCI_SECRET_KEY: Optional[str] = Field(
description='OCI storage secret key',
default=None,
)

View File

@@ -8,7 +8,7 @@ class PackagingInfo(BaseModel):
CURRENT_VERSION: str = Field(
description='Dify version',
default='0.6.12',
default='0.6.12-fix1',
)
COMMIT_SHA: str = Field(

View File

@@ -190,6 +190,10 @@ class AppExportApi(Resource):
@get_app_model
def get(self, app_model):
"""Export app"""
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
app_service = AppService()
return {

View File

@@ -226,6 +226,15 @@ class DatasetApi(Resource):
except services.errors.dataset.DatasetInUseError:
raise DatasetInUseError()
class DatasetUseCheckApi(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self, dataset_id):
dataset_id_str = str(dataset_id)
dataset_is_using = DatasetService.dataset_use_check(dataset_id_str)
return {'is_using': dataset_is_using}, 200
class DatasetQueryApi(Resource):
@@ -562,6 +571,7 @@ class DatasetErrorDocs(Resource):
api.add_resource(DatasetListApi, '/datasets')
api.add_resource(DatasetApi, '/datasets/<uuid:dataset_id>')
api.add_resource(DatasetUseCheckApi, '/datasets/<uuid:dataset_id>/use-check')
api.add_resource(DatasetQueryApi, '/datasets/<uuid:dataset_id>/queries')
api.add_resource(DatasetErrorDocs, '/datasets/<uuid:dataset_id>/error-docs')
api.add_resource(DatasetIndexingEstimateApi, '/datasets/indexing-estimate')

View File

@@ -3,11 +3,10 @@ from functools import wraps
from flask import current_app, request
from flask_restful import Resource, reqparse
from extensions.ext_database import db
from libs.helper import email, get_remote_ip, str_len
from libs.password import valid_password
from models.model import DifySetup
from services.account_service import AccountService, RegisterService, TenantService
from services.account_service import RegisterService, TenantService
from . import api
from .error import AlreadySetupError, NotInitValidateError, NotSetupError
@@ -51,28 +50,17 @@ class SetupApi(Resource):
required=True, location='json')
args = parser.parse_args()
# Register
account = RegisterService.register(
# setup
RegisterService.setup(
email=args['email'],
name=args['name'],
password=args['password']
password=args['password'],
ip_address=get_remote_ip(request)
)
TenantService.create_owner_tenant_if_not_exist(account)
setup()
AccountService.update_last_login(account, ip_address=get_remote_ip(request))
return {'result': 'success'}, 201
def setup():
dify_setup = DifySetup(
version=current_app.config['CURRENT_VERSION']
)
db.session.add(dify_setup)
def setup_required(view):
@wraps(view)
def decorated(*args, **kwargs):

View File

@@ -1,22 +1,14 @@
# standard import
import base64
import json
import logging
import mimetypes
import time
from collections.abc import Generator
from typing import Optional, Union, cast
# 3rd import
import boto3
import requests
from anthropic import AnthropicBedrock, Stream
from anthropic.types import (
ContentBlockDeltaEvent,
Message,
MessageDeltaEvent,
MessageStartEvent,
MessageStopEvent,
MessageStreamEvent,
)
from botocore.config import Config
from botocore.exceptions import (
ClientError,
@@ -27,7 +19,8 @@ from botocore.exceptions import (
)
from cohere import ChatMessage
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage
# local import
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta
from core.model_runtime.entities.message_entities import (
AssistantPromptMessage,
ImagePromptMessageContent,
@@ -38,7 +31,6 @@ from core.model_runtime.entities.message_entities import (
TextPromptMessageContent,
UserPromptMessage,
)
from core.model_runtime.entities.model_entities import PriceType
from core.model_runtime.errors.invoke import (
InvokeAuthorizationError,
InvokeBadRequestError,
@@ -73,8 +65,8 @@ class BedrockLargeLanguageModel(LargeLanguageModel):
:param user: unique user id
:return: full response or stream response chunk generator result
"""
# invoke anthropic models via anthropic official SDK
# TODO: consolidate different invocation methods for models based on base model capabilities
# invoke anthropic models via boto3 client
if "anthropic" in model:
return self._generate_anthropic(model, credentials, prompt_messages, model_parameters, stop, stream, user)
# invoke Cohere models via boto3 client
@@ -171,48 +163,34 @@ class BedrockLargeLanguageModel(LargeLanguageModel):
:param stream: is stream response
:return: full response or stream response chunk generator result
"""
# use Anthropic official SDK references
# - https://docs.anthropic.com/claude/reference/claude-on-amazon-bedrock
# - https://github.com/anthropics/anthropic-sdk-python
client = AnthropicBedrock(
aws_access_key=credentials.get("aws_access_key_id"),
aws_secret_key=credentials.get("aws_secret_access_key"),
aws_region=credentials["aws_region"],
)
bedrock_client = boto3.client(service_name='bedrock-runtime',
aws_access_key_id=credentials.get("aws_access_key_id"),
aws_secret_access_key=credentials.get("aws_secret_access_key"),
region_name=credentials["aws_region"])
extra_model_kwargs = {}
if stop:
extra_model_kwargs['stop_sequences'] = stop
# Notice: If you request the current version of the SDK to the bedrock server,
# you will get the following error message and you need to wait for the service or SDK to be updated.
# Response: Error code: 400
# {'message': 'Malformed input request: #: subject must not be valid against schema
# {"required":["messages"]}#: extraneous key [metadata] is not permitted, please reformat your input and try again.'}
# TODO: Open in the future when the interface is properly supported
# if user:
# ref: https://github.com/anthropics/anthropic-sdk-python/blob/e84645b07ca5267066700a104b4d8d6a8da1383d/src/anthropic/resources/messages.py#L465
# extra_model_kwargs['metadata'] = message_create_params.Metadata(user_id=user)
system, prompt_message_dicts = self._convert_claude_prompt_messages(prompt_messages)
if system:
extra_model_kwargs['system'] = system
response = client.messages.create(
model=model,
messages=prompt_message_dicts,
stream=stream,
**model_parameters,
**extra_model_kwargs
)
system, prompt_message_dicts = self._convert_converse_prompt_messages(prompt_messages)
inference_config, additional_model_fields = self._convert_converse_api_model_parameters(model_parameters, stop)
if stream:
return self._handle_claude_stream_response(model, credentials, response, prompt_messages)
response = bedrock_client.converse_stream(
modelId=model,
messages=prompt_message_dicts,
system=system,
inferenceConfig=inference_config,
additionalModelRequestFields=additional_model_fields
)
return self._handle_converse_stream_response(model, credentials, response, prompt_messages)
else:
response = bedrock_client.converse(
modelId=model,
messages=prompt_message_dicts,
system=system,
inferenceConfig=inference_config,
additionalModelRequestFields=additional_model_fields
)
return self._handle_converse_response(model, credentials, response, prompt_messages)
return self._handle_claude_response(model, credentials, response, prompt_messages)
def _handle_claude_response(self, model: str, credentials: dict, response: Message,
def _handle_converse_response(self, model: str, credentials: dict, response: dict,
prompt_messages: list[PromptMessage]) -> LLMResult:
"""
Handle llm chat response
@@ -223,17 +201,16 @@ class BedrockLargeLanguageModel(LargeLanguageModel):
:param prompt_messages: prompt messages
:return: full response chunk generator result
"""
# transform assistant message to prompt message
assistant_prompt_message = AssistantPromptMessage(
content=response.content[0].text
content=response['output']['message']['content'][0]['text']
)
# calculate num tokens
if response.usage:
if response['usage']:
# transform usage
prompt_tokens = response.usage.input_tokens
completion_tokens = response.usage.output_tokens
prompt_tokens = response['usage']['inputTokens']
completion_tokens = response['usage']['outputTokens']
else:
# calculate num tokens
prompt_tokens = self.get_num_tokens(model, credentials, prompt_messages)
@@ -242,17 +219,15 @@ class BedrockLargeLanguageModel(LargeLanguageModel):
# transform usage
usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens)
# transform response
response = LLMResult(
model=response.model,
result = LLMResult(
model=model,
prompt_messages=prompt_messages,
message=assistant_prompt_message,
usage=usage
usage=usage,
)
return result
return response
def _handle_claude_stream_response(self, model: str, credentials: dict, response: Stream[MessageStreamEvent],
def _handle_converse_stream_response(self, model: str, credentials: dict, response: dict,
prompt_messages: list[PromptMessage], ) -> Generator:
"""
Handle llm chat stream response
@@ -272,14 +247,14 @@ class BedrockLargeLanguageModel(LargeLanguageModel):
finish_reason = None
index = 0
for chunk in response:
if isinstance(chunk, MessageStartEvent):
return_model = chunk.message.model
input_tokens = chunk.message.usage.input_tokens
elif isinstance(chunk, MessageDeltaEvent):
output_tokens = chunk.usage.output_tokens
finish_reason = chunk.delta.stop_reason
elif isinstance(chunk, MessageStopEvent):
for chunk in response['stream']:
if 'messageStart' in chunk:
return_model = model
elif 'messageStop' in chunk:
finish_reason = chunk['messageStop']['stopReason']
elif 'metadata' in chunk:
input_tokens = chunk['metadata']['usage']['inputTokens']
output_tokens = chunk['metadata']['usage']['outputTokens']
usage = self._calc_response_usage(model, credentials, input_tokens, output_tokens)
yield LLMResultChunk(
model=return_model,
@@ -293,13 +268,13 @@ class BedrockLargeLanguageModel(LargeLanguageModel):
usage=usage
)
)
elif isinstance(chunk, ContentBlockDeltaEvent):
chunk_text = chunk.delta.text if chunk.delta.text else ''
elif 'contentBlockDelta' in chunk:
chunk_text = chunk['contentBlockDelta']['delta']['text'] if chunk['contentBlockDelta']['delta']['text'] else ''
full_assistant_content += chunk_text
assistant_prompt_message = AssistantPromptMessage(
content=chunk_text if chunk_text else '',
)
index = chunk.index
index = chunk['contentBlockDelta']['contentBlockIndex']
yield LLMResultChunk(
model=model,
prompt_messages=prompt_messages,
@@ -310,57 +285,33 @@ class BedrockLargeLanguageModel(LargeLanguageModel):
)
except Exception as ex:
raise InvokeError(str(ex))
def _convert_converse_api_model_parameters(self, model_parameters: dict, stop: Optional[list[str]] = None) -> tuple[dict, dict]:
inference_config = {}
additional_model_fields = {}
if 'max_tokens' in model_parameters:
inference_config['maxTokens'] = model_parameters['max_tokens']
def _calc_claude_response_usage(self, model: str, credentials: dict, prompt_tokens: int, completion_tokens: int) -> LLMUsage:
"""
Calculate response usage
if 'temperature' in model_parameters:
inference_config['temperature'] = model_parameters['temperature']
if 'top_p' in model_parameters:
inference_config['topP'] = model_parameters['temperature']
:param model: model name
:param credentials: model credentials
:param prompt_tokens: prompt tokens
:param completion_tokens: completion tokens
:return: usage
"""
# get prompt price info
prompt_price_info = self.get_price(
model=model,
credentials=credentials,
price_type=PriceType.INPUT,
tokens=prompt_tokens,
)
if stop:
inference_config['stopSequences'] = stop
if 'top_k' in model_parameters:
additional_model_fields['top_k'] = model_parameters['top_k']
return inference_config, additional_model_fields
# get completion price info
completion_price_info = self.get_price(
model=model,
credentials=credentials,
price_type=PriceType.OUTPUT,
tokens=completion_tokens
)
# transform usage
usage = LLMUsage(
prompt_tokens=prompt_tokens,
prompt_unit_price=prompt_price_info.unit_price,
prompt_price_unit=prompt_price_info.unit,
prompt_price=prompt_price_info.total_amount,
completion_tokens=completion_tokens,
completion_unit_price=completion_price_info.unit_price,
completion_price_unit=completion_price_info.unit,
completion_price=completion_price_info.total_amount,
total_tokens=prompt_tokens + completion_tokens,
total_price=prompt_price_info.total_amount + completion_price_info.total_amount,
currency=prompt_price_info.currency,
latency=time.perf_counter() - self.started_at
)
return usage
def _convert_claude_prompt_messages(self, prompt_messages: list[PromptMessage]) -> tuple[str, list[dict]]:
def _convert_converse_prompt_messages(self, prompt_messages: list[PromptMessage]) -> tuple[str, list[dict]]:
"""
Convert prompt messages to dict list and system
"""
system = ""
system = []
first_loop = True
for message in prompt_messages:
if isinstance(message, SystemPromptMessage):
@@ -375,25 +326,24 @@ class BedrockLargeLanguageModel(LargeLanguageModel):
prompt_message_dicts = []
for message in prompt_messages:
if not isinstance(message, SystemPromptMessage):
prompt_message_dicts.append(self._convert_claude_prompt_message_to_dict(message))
prompt_message_dicts.append(self._convert_prompt_message_to_dict(message))
return system, prompt_message_dicts
def _convert_claude_prompt_message_to_dict(self, message: PromptMessage) -> dict:
def _convert_prompt_message_to_dict(self, message: PromptMessage) -> dict:
"""
Convert PromptMessage to dict
"""
if isinstance(message, UserPromptMessage):
message = cast(UserPromptMessage, message)
if isinstance(message.content, str):
message_dict = {"role": "user", "content": message.content}
message_dict = {"role": "user", "content": [{'text': message.content}]}
else:
sub_messages = []
for message_content in message.content:
if message_content.type == PromptMessageContentType.TEXT:
message_content = cast(TextPromptMessageContent, message_content)
sub_message_dict = {
"type": "text",
"text": message_content.data
}
sub_messages.append(sub_message_dict)
@@ -404,24 +354,24 @@ class BedrockLargeLanguageModel(LargeLanguageModel):
try:
image_content = requests.get(message_content.data).content
mime_type, _ = mimetypes.guess_type(message_content.data)
base64_data = base64.b64encode(image_content).decode('utf-8')
except Exception as ex:
raise ValueError(f"Failed to fetch image data from url {message_content.data}, {ex}")
else:
data_split = message_content.data.split(";base64,")
mime_type = data_split[0].replace("data:", "")
base64_data = data_split[1]
image_content = base64.b64decode(base64_data)
if mime_type not in ["image/jpeg", "image/png", "image/gif", "image/webp"]:
raise ValueError(f"Unsupported image type {mime_type}, "
f"only support image/jpeg, image/png, image/gif, and image/webp")
sub_message_dict = {
"type": "image",
"source": {
"type": "base64",
"media_type": mime_type,
"data": base64_data
"image": {
"format": mime_type.replace('image/', ''),
"source": {
"bytes": image_content
}
}
}
sub_messages.append(sub_message_dict)
@@ -429,10 +379,10 @@ class BedrockLargeLanguageModel(LargeLanguageModel):
message_dict = {"role": "user", "content": sub_messages}
elif isinstance(message, AssistantPromptMessage):
message = cast(AssistantPromptMessage, message)
message_dict = {"role": "assistant", "content": message.content}
message_dict = {"role": "assistant", "content": [{'text': message.content}]}
elif isinstance(message, SystemPromptMessage):
message = cast(SystemPromptMessage, message)
message_dict = {"role": "system", "content": message.content}
message_dict = [{'text': message.content}]
else:
raise ValueError(f"Got unknown type {message}")

View File

@@ -27,8 +27,8 @@ class LangfuseConfig(BaseTracingConfig):
def set_value(cls, v, info: ValidationInfo):
if v is None or v == "":
v = 'https://api.langfuse.com'
if not v.startswith('https://'):
raise ValueError('host must start with https://')
if not v.startswith('https://') or not v.startswith('http://'):
raise ValueError('host must start with https:// or http://')
return v

View File

@@ -109,7 +109,6 @@ class LangFuseDataTrace(BaseTraceInstance):
workflow_nodes_executions = (
db.session.query(WorkflowNodeExecution)
.filter(WorkflowNodeExecution.workflow_run_id == trace_info.workflow_run_id)
.order_by(WorkflowNodeExecution.index.desc())
.all()
)
@@ -121,7 +120,9 @@ class LangFuseDataTrace(BaseTraceInstance):
node_type = node_execution.node_type
status = node_execution.status
if node_type == "llm":
inputs = json.loads(node_execution.process_data).get("prompts", {})
inputs = json.loads(node_execution.process_data).get(
"prompts", {}
) if node_execution.process_data else {}
else:
inputs = json.loads(node_execution.inputs) if node_execution.inputs else {}
outputs = (
@@ -213,7 +214,9 @@ class LangFuseDataTrace(BaseTraceInstance):
end_user_data: EndUser = db.session.query(EndUser).filter(
EndUser.id == message_data.from_end_user_id
).first()
user_id = end_user_data.session_id
if end_user_data is not None:
user_id = end_user_data.session_id
metadata["user_id"] = user_id
trace_data = LangfuseTrace(
id=message_id,

View File

@@ -102,7 +102,6 @@ class LangSmithDataTrace(BaseTraceInstance):
workflow_nodes_executions = (
db.session.query(WorkflowNodeExecution)
.filter(WorkflowNodeExecution.workflow_run_id == trace_info.workflow_run_id)
.order_by(WorkflowNodeExecution.index.desc())
.all()
)
@@ -114,7 +113,9 @@ class LangSmithDataTrace(BaseTraceInstance):
node_type = node_execution.node_type
status = node_execution.status
if node_type == "llm":
inputs = json.loads(node_execution.process_data).get("prompts", {})
inputs = json.loads(node_execution.process_data).get(
"prompts", {}
) if node_execution.process_data else {}
else:
inputs = json.loads(node_execution.inputs) if node_execution.inputs else {}
outputs = (
@@ -181,13 +182,15 @@ class LangSmithDataTrace(BaseTraceInstance):
message_id = message_data.id
user_id = message_data.from_account_id
metadata["user_id"] = user_id
if message_data.from_end_user_id:
end_user_data: EndUser = db.session.query(EndUser).filter(
EndUser.id == message_data.from_end_user_id
).first().session_id
end_user_id = end_user_data.session_id
metadata["end_user_id"] = end_user_id
metadata["user_id"] = user_id
).first()
if end_user_data is not None:
end_user_id = end_user_data.session_id
metadata["end_user_id"] = end_user_id
message_run = LangSmithRunModel(
input_tokens=trace_info.message_tokens,

View File

@@ -352,10 +352,17 @@ class TraceTask:
query = workflow_run_inputs.get("query") or workflow_run_inputs.get("sys.query") or ""
# get workflow_app_log_id
workflow_app_log_data = db.session.query(WorkflowAppLog).filter_by(workflow_run_id=workflow_run.id).first()
workflow_app_log_data = db.session.query(WorkflowAppLog).filter_by(
tenant_id=tenant_id,
app_id=workflow_run.app_id,
workflow_run_id=workflow_run.id
).first()
workflow_app_log_id = str(workflow_app_log_data.id) if workflow_app_log_data else None
# get message_id
message_data = db.session.query(Message.id).filter_by(workflow_run_id=workflow_run_id).first()
message_data = db.session.query(Message.id).filter_by(
conversation_id=conversation_id,
workflow_run_id=workflow_run_id
).first()
message_id = str(message_data.id) if message_data else None
metadata = {

View File

@@ -7,6 +7,7 @@
- azuredalle
- stability
- wikipedia
- nominatim
- yahoo
- arxiv
- pubmed

View File

@@ -0,0 +1,277 @@
<svg xmlns="http://www.w3.org/2000/svg" height="256" width="256" xmlns:xlink="http://www.w3.org/1999/xlink">
<defs>
<linearGradient id="c">
<stop stop-color="#2d3335" offset="0"/>
<stop stop-color="#4c464a" offset=".5"/>
<stop stop-color="#384042" offset="1"/>
</linearGradient>
<linearGradient id="a">
<stop offset="0"/>
<stop stop-opacity="0" offset="1"/>
</linearGradient>
<linearGradient id="u" y2="-179.97" xlink:href="#a" gradientUnits="userSpaceOnUse" x2="9" y1="-94.672" x1="94.25"/>
<linearGradient id="v" y2="-30.438" xlink:href="#a" gradientUnits="userSpaceOnUse" x2="35.75" y1="-114.44" x1="50.75"/>
<linearGradient id="w" y2="-74.625" xlink:href="#a" gradientUnits="userSpaceOnUse" x2="115.25" y1="-163.12" x1="77.625"/>
<linearGradient id="x" y2="7.063" xlink:href="#a" gradientUnits="userSpaceOnUse" x2="125.72" y1="-50.438" x1="56.5"/>
<linearGradient id="y" y2="-146.8" xlink:href="#a" gradientUnits="userSpaceOnUse" x2="180.75" y1="-163.3" x1="35"/>
<linearGradient id="z" y2="-41.609" xlink:href="#a" gradientUnits="userSpaceOnUse" x2="179" y1="-141.11" x1="57"/>
<linearGradient id="A" y2="39.75" xlink:href="#a" gradientUnits="userSpaceOnUse" x2="208.25" y1="-26.5" x1="156"/>
<linearGradient id="B" y2="-77.297" xlink:href="#a" gradientUnits="userSpaceOnUse" x2="235" y1="-169.3" x1="142.75"/>
<linearGradient id="m" y2="-113.72" xlink:href="#b" gradientUnits="userSpaceOnUse" x2="166.5" y1="-161.36" x1="214.25"/>
<linearGradient id="b">
<stop stop-color="#fff" offset="0"/>
<stop stop-color="#fff" stop-opacity="0" offset="1"/>
</linearGradient>
<linearGradient id="n" y2="-62.469" xlink:href="#b" gradientUnits="userSpaceOnUse" x2="57.656" y1="-126.5" x1="53.25"/>
<linearGradient id="o" y2="-17.125" xlink:href="#b" gradientUnits="userSpaceOnUse" x2="75.719" y1="6.625" x1="39.5"/>
<linearGradient id="p" y2="-121.44" xlink:href="#b" gradientUnits="userSpaceOnUse" x2="85.312" y1="-162.34" x1="102"/>
<linearGradient id="q" y2="-14.75" xlink:href="#b" gradientUnits="userSpaceOnUse" x2="63.5" y1="-78.094" x1="117.75"/>
<linearGradient id="r" y2="-60.75" xlink:href="#b" gradientUnits="userSpaceOnUse" x2="133.94" y1="-125.12" x1="180.69"/>
<linearGradient id="s" y2="33.375" xlink:href="#b" gradientUnits="userSpaceOnUse" x2="130.22" y1="-23.5" x1="170.75"/>
<linearGradient id="t" y2="-5.656" xlink:href="#b" gradientUnits="userSpaceOnUse" y1="-76" x2="181.75" x1="213.5"/>
<linearGradient id="l" y2="-213.25" xlink:href="#a" gradientUnits="userSpaceOnUse" x2="9" gradientTransform="translate(0 -10)" y1="72.064" x1="210.17"/>
<linearGradient id="M" y2="150.38" xlink:href="#c" gradientUnits="userSpaceOnUse" x2="133" gradientTransform="matrix(-1 0 0 1 46 0)" y1="150.38" x1="123"/>
<linearGradient id="L" y2="143.36" xlink:href="#c" gradientUnits="userSpaceOnUse" x2="130.88" gradientTransform="translate(-210)" y1="134.36" x1="128"/>
<linearGradient id="O" y2="150.38" gradientUnits="userSpaceOnUse" y1="150.38" gradientTransform="matrix(-2 0 0 1.7699 174 -86.65)" x2="133" x1="123">
<stop stop-color="#2d3335" offset="0"/>
<stop stop-color="#4c464a" offset=".5"/>
<stop stop-color="#384042" offset="1"/>
</linearGradient>
<linearGradient id="N" y2="186.5" gradientUnits="userSpaceOnUse" x2="136" gradientTransform="translate(-210)" y1="186.5" x1="120">
<stop stop-color="#f9e295" offset="0"/>
<stop stop-color="#f7dd84" offset=".125"/>
<stop stop-color="#fff" offset=".206"/>
<stop stop-color="#f4ce51" offset=".301"/>
<stop stop-color="#f9e7aa" offset=".341"/>
<stop stop-color="#efbb0e" offset="1"/>
</linearGradient>
<linearGradient id="P" y2="186.5" gradientUnits="userSpaceOnUse" y1="186.5" gradientTransform="translate(-210 67)" x2="136" x1="120">
<stop stop-color="#f9e295" offset="0"/>
<stop stop-color="#f7dd84" offset=".125"/>
<stop stop-color="#fff" offset=".206"/>
<stop stop-color="#f4ce51" offset=".301"/>
<stop stop-color="#f9e7aa" offset=".341"/>
<stop stop-color="#efbb0e" offset="1"/>
</linearGradient>
<linearGradient id="J" y2="186.5" gradientUnits="userSpaceOnUse" x2="136" gradientTransform="rotate(45 104.976 51.258) scale(1.39197)" y1="186.5" x1="120">
<stop stop-color="#f9e295" offset="0"/>
<stop stop-color="#f7dd84" offset=".125"/>
<stop stop-color="#fff" offset=".206"/>
<stop stop-color="#f4ce51" offset=".301"/>
<stop stop-color="#f9e7aa" offset=".341"/>
<stop stop-color="#efbb0e" offset="1"/>
</linearGradient>
<linearGradient id="U" y2="150.38" gradientUnits="userSpaceOnUse" x2="133" gradientTransform="rotate(-135 332.789 -39.72) scale(2.7839 -2.4637)" y1="150.38" x1="123">
<stop stop-color="#2d3335" offset="0"/>
<stop stop-color="#4c464a" offset=".5"/>
<stop stop-color="#384042" offset="1"/>
</linearGradient>
<linearGradient id="T" y2="186.5" gradientUnits="userSpaceOnUse" x2="136" gradientTransform="rotate(45 217.552 97.892) scale(1.39197)" y1="186.5" x1="120">
<stop stop-color="#f9e295" offset="0"/>
<stop stop-color="#f7dd84" offset=".125"/>
<stop stop-color="#fff" offset=".206"/>
<stop stop-color="#f4ce51" offset=".301"/>
<stop stop-color="#f9e7aa" offset=".341"/>
<stop stop-color="#efbb0e" offset="1"/>
</linearGradient>
<linearGradient id="S" y2="150.38" xlink:href="#c" gradientUnits="userSpaceOnUse" x2="133" gradientTransform="scale(-1.39197 1.39197) rotate(-45 -28.292 379.342)" y1="150.38" x1="123"/>
<linearGradient id="R" y2="143.36" xlink:href="#c" gradientUnits="userSpaceOnUse" x2="130.88" gradientTransform="rotate(45 217.552 97.892) scale(1.39197)" y1="134.36" x1="128"/>
<linearGradient id="V" y2="150.38" xlink:href="#a" gradientUnits="userSpaceOnUse" y1="167.73" gradientTransform="rotate(-135 332.789 -39.72) scale(2.7839 -2.4637)" x2="133" x1="108"/>
<linearGradient id="W" y2="137.2" gradientUnits="userSpaceOnUse" x2="179.96" gradientTransform="translate(0 -10)" y1="29.815" x1="126.64">
<stop stop-color="#fff" offset="0"/>
<stop stop-color="#fff" stop-opacity="0" offset="1"/>
</linearGradient>
<linearGradient id="Z" y2="137.2" gradientUnits="userSpaceOnUse" y1="29.815" gradientTransform="matrix(-.50295 .1984 .1972 -.43254 276.16 123.42)" x2="179.96" x1="126.64">
<stop stop-color="#fff" offset="0"/>
<stop stop-color="#fff" stop-opacity="0" offset="1"/>
</linearGradient>
<linearGradient id="G" y2="-213.25" gradientUnits="userSpaceOnUse" x2="9" gradientTransform="translate(0 192)" y1="72.064" x1="210.17">
<stop offset="0"/>
<stop stop-opacity="0" offset="1"/>
</linearGradient>
<linearGradient id="h" y2="4.302" gradientUnits="userSpaceOnUse" x2="122" y1="245.45" x1="122">
<stop offset="0"/>
<stop stop-opacity="0" offset="1"/>
</linearGradient>
<linearGradient id="e" y2="-213.25" gradientUnits="userSpaceOnUse" x2="9" gradientTransform="matrix(1 -.08088 0 1 0 203.06)" y1="72.064" x1="210.17">
<stop offset="0"/>
<stop stop-opacity="0" offset="1"/>
</linearGradient>
<clipPath id="j" clipPathUnits="userSpaceOnUse">
<path fill="#ceeeab" d="M9 12.25s7.5 13 11.75 27.75S27 65.5 27 65.5s-5.5 12.75-8.25 24.75-5.75 23-5.75 23 5.75 16 9.25 30S26 167.5 26 167.5s-4 10.25-7.5 24.25-5 30.75-5 30.75 9.25-2 28.5 1.25 32.25 6 32.25 6 12.75-2.75 24-6.25 16.25-6.5 16.25-6.5 5.5.5 22.5 6.25 29.25 8.5 29.25 8.5 13-2.75 26-5.75 26.5-8 26.5-8-.75-5 4.25-24.5 8.75-28 8.75-28-.5-4.5-3.75-19.75S218 116 218 116s1.75-10.5 6.75-23.75S235 65.5 235 65.5s-4.75-15.25-7.5-29.75S219.25 10 219.25 10 195 19 187.5 20.5s-21 5.25-21 5.25-9.75-4.25-22-8.5-29.75-5.5-29.75-5.5-3.25 3.5-22 8-27.5 5.75-27.5 5.75-18.5-9-31.5-11.5-24-2-24.75-1.75z"/>
</clipPath>
<clipPath id="k" clipPathUnits="userSpaceOnUse">
<path fill="#ceeeab" d="M9 12.25s7.5 13 11.75 27.75S27 65.5 27 65.5s-5.5 12.75-8.25 24.75-5.75 23-5.75 23 5.75 16 9.25 30S26 167.5 26 167.5s-4 10.25-7.5 24.25-5 30.75-5 30.75 9.25-2 28.5 1.25 32.25 6 32.25 6 12.75-2.75 24-6.25 16.25-6.5 16.25-6.5 5.5.5 22.5 6.25 29.25 8.5 29.25 8.5 13-2.75 26-5.75 26.5-8 26.5-8-.75-5 4.25-24.5 8.75-28 8.75-28-.5-4.5-3.75-19.75S218 116 218 116s1.75-10.5 6.75-23.75S235 65.5 235 65.5s-4.75-15.25-7.5-29.75S219.25 10 219.25 10 195 19 187.5 20.5s-21 5.25-21 5.25-9.75-4.25-22-8.5-29.75-5.5-29.75-5.5-3.25 3.5-22 8-27.5 5.75-27.5 5.75-18.5-9-31.5-11.5-24-2-24.75-1.75z"/>
</clipPath>
<clipPath id="i" clipPathUnits="userSpaceOnUse">
<path fill="#ceeeab" d="M9 12.25s7.5 13 11.75 27.75S27 65.5 27 65.5s-5.5 12.75-8.25 24.75-5.75 23-5.75 23 5.75 16 9.25 30S26 167.5 26 167.5s-4 10.25-7.5 24.25-5 30.75-5 30.75 9.25-2 28.5 1.25 32.25 6 32.25 6 12.75-2.75 24-6.25 16.25-6.5 16.25-6.5 5.5.5 22.5 6.25 29.25 8.5 29.25 8.5 13-2.75 26-5.75 26.5-8 26.5-8-.75-5 4.25-24.5 8.75-28 8.75-28-.5-4.5-3.75-19.75S218 116 218 116s1.75-10.5 6.75-23.75S235 65.5 235 65.5s-4.75-15.25-7.5-29.75S219.25 10 219.25 10 195 19 187.5 20.5s-21 5.25-21 5.25-9.75-4.25-22-8.5-29.75-5.5-29.75-5.5-3.25 3.5-22 8-27.5 5.75-27.5 5.75-18.5-9-31.5-11.5-24-2-24.75-1.75z"/>
</clipPath>
<clipPath id="D" clipPathUnits="userSpaceOnUse">
<path fill="#ceeeab" d="M9 12.25s7.5 13 11.75 27.75S27 65.5 27 65.5s-5.5 12.75-8.25 24.75-5.75 23-5.75 23 5.75 16 9.25 30S26 167.5 26 167.5s-4 10.25-7.5 24.25-5 30.75-5 30.75 9.25-2 28.5 1.25 32.25 6 32.25 6 12.75-2.75 24-6.25 16.25-6.5 16.25-6.5 5.5.5 22.5 6.25 29.25 8.5 29.25 8.5 13-2.75 26-5.75 26.5-8 26.5-8-.75-5 4.25-24.5 8.75-28 8.75-28-.5-4.5-3.75-19.75S218 116 218 116s1.75-10.5 6.75-23.75S235 65.5 235 65.5s-4.75-15.25-7.5-29.75S219.25 10 219.25 10 195 19 187.5 20.5s-21 5.25-21 5.25-9.75-4.25-22-8.5-29.75-5.5-29.75-5.5-3.25 3.5-22 8-27.5 5.75-27.5 5.75-18.5-9-31.5-11.5-24-2-24.75-1.75z"/>
</clipPath>
<clipPath id="E" clipPathUnits="userSpaceOnUse">
<path fill="#ceeeab" d="M9 12.25s7.5 13 11.75 27.75S27 65.5 27 65.5s-5.5 12.75-8.25 24.75-5.75 23-5.75 23 5.75 16 9.25 30S26 167.5 26 167.5s-4 10.25-7.5 24.25-5 30.75-5 30.75 9.25-2 28.5 1.25 32.25 6 32.25 6 12.75-2.75 24-6.25 16.25-6.5 16.25-6.5 5.5.5 22.5 6.25 29.25 8.5 29.25 8.5 13-2.75 26-5.75 26.5-8 26.5-8-.75-5 4.25-24.5 8.75-28 8.75-28-.5-4.5-3.75-19.75S218 116 218 116s1.75-10.5 6.75-23.75S235 65.5 235 65.5s-4.75-15.25-7.5-29.75S219.25 10 219.25 10 195 19 187.5 20.5s-21 5.25-21 5.25-9.75-4.25-22-8.5-29.75-5.5-29.75-5.5-3.25 3.5-22 8-27.5 5.75-27.5 5.75-18.5-9-31.5-11.5-24-2-24.75-1.75z"/>
</clipPath>
<clipPath id="F" clipPathUnits="userSpaceOnUse">
<path fill="#ceeeab" d="M9 12.25s7.5 13 11.75 27.75S27 65.5 27 65.5s-5.5 12.75-8.25 24.75-5.75 23-5.75 23 5.75 16 9.25 30S26 167.5 26 167.5s-4 10.25-7.5 24.25-5 30.75-5 30.75 9.25-2 28.5 1.25 32.25 6 32.25 6 12.75-2.75 24-6.25 16.25-6.5 16.25-6.5 5.5.5 22.5 6.25 29.25 8.5 29.25 8.5 13-2.75 26-5.75 26.5-8 26.5-8-.75-5 4.25-24.5 8.75-28 8.75-28-.5-4.5-3.75-19.75S218 116 218 116s1.75-10.5 6.75-23.75S235 65.5 235 65.5s-4.75-15.25-7.5-29.75S219.25 10 219.25 10 195 19 187.5 20.5s-21 5.25-21 5.25-9.75-4.25-22-8.5-29.75-5.5-29.75-5.5-3.25 3.5-22 8-27.5 5.75-27.5 5.75-18.5-9-31.5-11.5-24-2-24.75-1.75z"/>
</clipPath>
<clipPath id="C" clipPathUnits="userSpaceOnUse">
<path d="M123.62 141.06c20.832 20.243 54.607 19.289 75.439-2.132s20.832-55.196 0-75.439-54.607-19.289-75.439 2.132-20.832 55.196 0 75.439z" fill="url(#d)"/>
</clipPath>
<clipPath id="H" clipPathUnits="userSpaceOnUse">
<path opacity=".039" d="M9 22.578s7.5 12.393 11.75 26.8C25 63.784 27 74.373 27 74.373s-5.5 13.195-8.25 25.417C16 112.01 13 123.25 13 123.25s5.75 15.535 9.25 29.252S26 176.449 26 176.449s-4 10.574-7.5 24.857-5 31.154-5 31.154 9.25-2.748 28.5-1.055 32.25 3.392 32.25 3.392 12.75-3.782 24-8.191 16.25-7.815 16.25-7.815 5.5.055 22.5 4.43c17 4.376 29.25 6.135 29.25 6.135s13-3.802 26-7.853 26.5-10.143 26.5-10.143-.75-4.94 4.25-24.844 8.75-28.708 8.75-28.708-.514-4.455-3.75-19.447c-1.75-8.108-2.25-4.068-2.25-4.068s16.264-26.155 16.5-40.334c.25-15.02-7.25-36.413-7.25-36.413s-4.75-14.866-7.5-29.143c-2.75-14.278-8.25-25.083-8.25-25.083S195 14.281 187.5 16.388c-7.5 2.107-21 6.948-21 6.948s-9.75-3.461-22-6.72c-12.25-3.26-29.75-3.094-29.75-3.094s-3.25 3.763-22 9.78c-18.75 6.016-27.5 7.973-27.5 7.973s-18.5-7.504-31.5-8.952-24-.06-24.75.252z" fill="url(#e)"/>
</clipPath>
<radialGradient id="K" gradientUnits="userSpaceOnUse" cy="86" cx="128" gradientTransform="matrix(1.0213 0 0 -1.0213 -212.72 173.83)" r="47">
<stop stop-color="#d0e9f2" stop-opacity="0" offset="0"/>
<stop stop-color="#d0e9f2" stop-opacity="0" offset=".839"/>
<stop stop-color="#d0e9f2" stop-opacity=".282" offset=".943"/>
<stop stop-color="#d0e9f2" offset="1"/>
</radialGradient>
<radialGradient id="Q" gradientUnits="userSpaceOnUse" cy="86" cx="128" gradientTransform="scale(1.4216 -1.4216) rotate(-45 -51.091 19.464)" r="47">
<stop stop-color="#d0e9f2" stop-opacity="0" offset="0"/>
<stop stop-color="#d0e9f2" stop-opacity="0" offset=".839"/>
<stop stop-color="#d0e9f2" stop-opacity=".282" offset=".943"/>
<stop stop-color="#d0e9f2" offset="1"/>
</radialGradient>
<radialGradient id="Y" gradientUnits="userSpaceOnUse" cy="72.588" cx="159.61" gradientTransform="matrix(1.2125 0 0 .83915 -46.348 24.528)" r="38.417">
<stop stop-color="#fff" offset="0"/>
<stop stop-color="#fff" stop-opacity=".93" offset=".5"/>
<stop stop-color="#fff" stop-opacity="0" offset="1"/>
</radialGradient>
<radialGradient id="X" gradientUnits="userSpaceOnUse" cx="159.61" cy="72.588" r="38.417" gradientTransform="matrix(1.2125 0 0 .83915 -45.92 25.814)">
<stop stop-color="#fff" offset="0"/>
<stop stop-color="#fff" stop-opacity="0" offset="1"/>
</radialGradient>
<radialGradient id="d" gradientUnits="userSpaceOnUse" cy="86" cx="128" gradientTransform="matrix(.80254 .77986 .80254 -.82522 -10.402 73.423)" r="47">
<stop stop-color="#d0e9f2" stop-opacity="0" offset="0"/>
<stop stop-color="#d0e9f2" stop-opacity="0" offset=".839"/>
<stop stop-color="#d0e9f2" stop-opacity=".282" offset=".943"/>
<stop stop-color="#d0e9f2" offset="1"/>
</radialGradient>
<filter id="I">
<feGaussianBlur stdDeviation="4.287"/>
</filter>
<filter id="f">
<feGaussianBlur stdDeviation="6.311"/>
</filter>
<filter id="g">
<feGaussianBlur stdDeviation="4.457"/>
</filter>
</defs>
<path opacity=".7" d="M174.28 35.875c-17.6 0-35.19 6.639-48.72 19.906-.16.164-.33.303-.5.469-22.05 22.056-26.206 55.25-12.47 81.5l-2.28 1.63c2.25 4.23 4.94 8.3 8.1 12.12l-13.32 13.34c-1.34-.58-2.78-1.03-4.56-1.37l-2.842 2.84c-1.11-.48-2.139-.81-3.063-.87L30.5 229.56c-.037 1.1.226 2.19.656 3.28l-.531.54-1.094 1.09c.716 3.74 2.011 6.82 5.469 10.28s6.42 4.63 10.281 5.47l1.094-1.1.719-.71c1.088.46 2.13.78 3.094.84l64.122-64.13c-.24-.97-.62-1.93-1.03-2.9l3-3c-.25-1.73-.68-3.2-1.31-4.6l13.28-13.28c3.82 3.16 7.89 5.85 12.13 8.1l1.62-2.28c26.25 13.74 59.44 9.58 81.5-12.47.17-.17.31-.34.47-.5 26.69-27.22 26.55-70.923-.47-97.94-13.59-13.59-31.41-20.375-49.22-20.375zm-.5 3.281c.11.002.21-.002.31 0a67.89 67.89 0 0 1 5.72.375c.1.01.19.021.28.031l.29.032c.14.017.29.044.43.062.9.11 1.8.228 2.69.375.62.102 1.23.224 1.84.344.35.066.69.116 1.04.187.72.153 1.43.324 2.15.5.31.077.63.138.94.219.07.019.14.043.22.063.95.253 1.89.516 2.84.812.07.023.15.04.22.063.95.3 1.91.625 2.84.969.06.019.11.042.16.062.95.352 1.9.728 2.84 1.125.06.022.11.041.16.063.94.4 1.88.836 2.81 1.281l.13.062c.93.451 1.86.91 2.78 1.407.03.017.06.044.09.062.93.503 1.84 1.013 2.75 1.562.03.02.06.044.1.063.9.551 1.8 1.122 2.68 1.719.55.369 1.09.769 1.63 1.156.36.261.73.512 1.09.781.02.014.05.018.06.031.88.656 1.75 1.36 2.6 2.063a70.875 70.875 0 0 1 5.06 4.594c1.62 1.62 3.14 3.3 4.56 5.031.01.011.03.02.03.031.71.855 1.41 1.715 2.07 2.594.28.379.54.773.81 1.156.37.517.74 1.038 1.09 1.563.02.024.05.038.07.062.59.89 1.16 1.778 1.71 2.688.02.032.05.061.07.094a75.45 75.45 0 0 1 1.56 2.75c.02.032.04.06.06.093.5.921.96 1.844 1.41 2.781l.06.126c.45.929.88 1.868 1.28 2.812l.06.156c.4.936.78 1.895 1.13 2.844l.06.156c.35.949.67 1.885.97 2.844l.06.219c.3.941.56 1.894.82 2.843l.06.219c.08.31.14.627.22.938.17.72.35 1.432.5 2.156.07.342.12.688.18 1.031.12.617.25 1.225.35 1.844.15.893.26 1.791.37 2.688.02.145.05.291.07.437l.03.281.03.282c.1.952.19 1.912.25 2.872.06.94.11 1.9.12 2.85.01.1 0 .2 0 .31.02 1.05.01 2.1-.03 3.15-.03.94-.08 1.86-.15 2.79-.01.12-.02.25-.04.37-.07.92-.16 1.84-.28 2.75-.12.96-.27 1.93-.43 2.88-.02.08-.02.16-.04.25-.02.12-.03.25-.06.37-.16.91-.36 1.82-.56 2.72-.03.13-.06.27-.09.41a86.1 86.1 0 0 1-.69 2.65c-.17.59-.35 1.17-.53 1.75-.18.57-.34 1.13-.53 1.69-.3.86-.61 1.71-.94 2.56-.06.16-.13.32-.19.47-.04.11-.08.23-.12.34-.36.88-.73 1.73-1.13 2.6-.38.82-.77 1.63-1.19 2.44-.08.15-.16.31-.25.46-.41.8-.83 1.6-1.28 2.38-.09.17-.18.34-.28.5-.2.34-.42.66-.62 1-.37.59-.75 1.17-1.13 1.75 0 .01.01.02 0 .03-.49.75-1 1.49-1.53 2.22-.46.64-.95 1.25-1.44 1.87-.19.26-.36.53-.56.79l-.41.5c-.16.2-.33.39-.5.59-.55.67-1.13 1.32-1.72 1.97-.6.67-1.23 1.35-1.87 2-.16.16-.31.34-.47.5-21.13 21.13-52.26 24.49-77.19 11.15l.13-.18c-3.71-1.9-7.26-4.14-10.63-6.78-.54-.43-1.06-.87-1.59-1.32-.51-.43-1.03-.86-1.53-1.31-.09-.08-.19-.16-.28-.25a86.97 86.97 0 0 1-2.41-2.31c-.79-.8-1.56-1.59-2.31-2.41-.09-.09-.17-.19-.25-.28-.45-.5-.88-1.02-1.31-1.53-.45-.53-.89-1.05-1.32-1.59a65.831 65.831 0 0 1-6.78-10.63l-.19.13c-13.33-24.93-9.97-56.061 11.16-77.191.16-.163.34-.308.5-.469.65-.64 1.33-1.269 2-1.875.65-.588 1.3-1.162 1.97-1.719.27-.227.54-.465.81-.687.35-.278.72-.512 1.06-.782.63-.483 1.24-.98 1.88-1.437.74-.53 1.5-1.036 2.25-1.531.58-.383 1.16-.764 1.75-1.125.45-.274.89-.551 1.34-.813.69-.395 1.37-.757 2.07-1.125a61.343 61.343 0 0 1 3.37-1.687c.87-.397 1.72-.769 2.59-1.125.16-.062.32-.096.47-.157.97-.384 1.93-.758 2.91-1.093.56-.192 1.12-.356 1.69-.532.58-.181 1.16-.367 1.75-.531.88-.246 1.77-.48 2.65-.687.07-.017.15-.015.22-.032.42-.095.84-.193 1.25-.281.67-.141 1.36-.287 2.03-.406.09-.015.17-.017.25-.031.95-.164 1.92-.317 2.88-.438a65.183 65.183 0 0 1 9.06-.5z" transform="matrix(1 .06864 0 .8487 0 26.913)" filter="url(#f)" fill="#2d3335"/>
<path opacity=".5" d="M9 17.25s7.5 11 11.75 25.75S27 68.5 27 68.5s-5.5 12.75-8.25 24.75-5.75 25-5.75 25 5.75 16 9.25 30S26 170.5 26 170.5s-4 10.25-7.5 24.25-5 30.75-5 30.75 9.25-2 28.5 1.25 32.25 6 32.25 6 12.75-2.75 24-6.25 16.25-6.5 16.25-6.5 5.5.5 22.5 6.25 29.25 8.5 29.25 8.5 13-2.75 26-5.75 26.5-8 26.5-8-.75-5 4.25-24.5 8.75-28 8.75-28-.5-2.5-3.75-17.75S218 121 218 121s1.75-12.5 6.75-25.75S235 68.5 235 68.5s-4.75-15.25-7.5-29.75S219.25 15 219.25 15 195 24 187.5 25.5s-21 5.25-21 5.25-9.75-4.25-22-8.5-29.75-5.5-29.75-5.5-3.25 3.5-22 8-27.5 5.75-27.5 5.75-18.5-9-31.5-11.5-24-2-24.75-1.75z" transform="matrix(1 0 0 .84657 0 27.66)" filter="url(#g)" fill="url(#h)"/>
<path fill="#ceeeab" d="M9 2.25s7.5 13 11.75 27.75S27 55.5 27 55.5s-5.5 12.75-8.25 24.75-5.75 23-5.75 23 5.75 16 9.25 30S26 157.5 26 157.5s-4 10.25-7.5 24.25-5 30.75-5 30.75 9.25-2 28.5 1.25 32.25 6 32.25 6 12.75-2.75 24-6.25 16.25-6.5 16.25-6.5 5.5.5 22.5 6.25 29.25 8.5 29.25 8.5 13-2.75 26-5.75 26.5-8 26.5-8-.75-5 4.25-24.5 8.75-28 8.75-28-.5-4.5-3.75-19.75S218 106 218 106s1.75-10.5 6.75-23.75S235 55.5 235 55.5s-4.75-15.25-7.5-29.75S219.25 0 219.25 0 195 9 187.5 10.5s-21 5.25-21 5.25-9.75-4.25-22-8.5-29.75-5.5-29.75-5.5-3.25 3.5-22 8-27.5 5.75-27.5 5.75-18.5-9-31.5-11.5S9.75 2 9 2.25z"/>
<g transform="translate(0 -10)" clip-path="url(#i)">
<path fill="#a6dd8b" d="m110.75 5.5-4.25 7c-.75 9-2.25 13.75-8.5 16.5s-12.25 3.5-11 7.5 13 9.25 14.25 13 8.25 1.75 11 7 2 15.25-3.75 17.25-17.5.5-20.25 9.5-4.75 10.5-9 12.75-7.25 10.5-3.5 16.5 12.25-1.25 15-6.5S98 97.25 98 97.25h23.75l59-1.75 3.25-3.75s3.25 4 2.75 8.75-3.75 14.75.25 17.25 19.5-2 24-7-4.75-28.25-10.5-29.5-18.5-1.75-17-7 11.75 4.5 17.25 3.25 16.75-21 12-25.25-24.25-5.25-25.75-8 21-8 22.25-11-2.5-7.25-6.25-8.5S189 41 182 40.5s-20 5.75-17.25 11.5-6 11-14.5 6.5-24.75-13-21.75-24S150 15.75 150 15.75L110.75 5.5zM97.188 112.72c-1.595.15-4.469 4.69-4.938 7.03-.5 2.5-3.25 6.75-3.5 12.25s4.75 6.75 8.75 6.5 2.75-6.75 2-15c-.25 0-.5-10.25-2-10.75-.094-.03-.206-.04-.312-.03zm92.722 51.66c-6.34.29-6.63 7.46-7.41 8.87-1.25 2.25 2.25 13.75 2 18s-4.75 5.25-9.5 9.75.5 16 11.25 31l44.75 1.25-1-35s4.75-4.25-20.75-24.25c-9.56-7.5-15.54-9.8-19.34-9.62zM39.94 180.91c-4.03-.11-8.282 5.71-9.688 7.59-1.5 2-6.25 5-17 9.5l-14.5 34.25 53 4.25s1.75-11-4.25-15.75-17.25-5.75-18.25-13.25 9.25-10 13.75-14.25 2-11.25-2.25-12.25a3.728 3.728 0 0 0-.812-.09z"/>
</g>
<path fill="#aac3e7" d="M158.53 75.344c-4.76-.015-9.03.968-11.53 3.156-8 7-35 .75-48.5 7s-13.25 38-14.75 44.5-17.5 20.75-20 23.5-13.25 7.25-19.5 8.5-12.75 7.25-15.5 11c-2.021 2.76-7.406 6.45-10.125 8.22-.046.18-.079.35-.125.53-.134.53-.247 1.08-.375 1.63 4.98-1.61 11.18-8.18 16.625-13.63 6.25-6.25 20-7.75 27.75-11.5S76.75 138.5 89 134.5s21.25 11.75 24.25 18.5 1.75 12.75 3.75 17 11 11.75 11.5 13.5-5 6.5-6.25 8.5-10.5 7-11.75 8.75c-.97 1.35-1.92 6.77-2.31 9.22.41-.17.75-.31 1.12-.47.49-2.53 1.52-7.08 2.94-8.5 2-2 11.25-4.5 12.5-8.5s7-6.5 7-6.5 2.75 4 16 14c8.83 6.67 12.76 15.53 14.41 20.72.67.17 1.53.41 2.03.53-1.22-4.32-4.84-16.24-8.94-20.75-5-5.5-18.5-10.75-22.75-22S108 144.25 115 138.25s16.5-4 28.5 7.5 46.25 5.75 57.75 3.75c9.95-1.73 20.83 14.88 23.91 26.03.59-2.09 1.12-4 1.65-5.75-1.74-1.92-3.69-4.62-5.31-8.28-3.75-8.5-12-13.25-12-13.25s8.75-5 14.75-7.75c1.62-.74 3.01-1.68 4.19-2.66-.16-.77-.26-1.25-.44-2.09l-.09-.44c-4.77 5.56-19.24 9.62-21.66 10.94-2.75 1.5-18.25 3-35.75 4.5s-26.75-7.5-34.25-14.75-13-36-3-38 20 13.75 30 17 21.5-15.75 19.75-27c-1.2-7.734-14-12.625-24.47-12.656zm-53.34 13.281c1.27.001 5.86 1.594 6.56 2.375 2.25 2.5 2 7.25 0 8s-12.75 8-10.5 14.25 1.75 18-3.5 18-8 0-10-2.5-2-12 0-19.75 3.5-15 8-18.25c3.094-2.234 6.63-2.128 9.44-2.125zm13.59 73.095c.2.01 1.91 3.51 4.72 7.03 3 3.75 3.25 8.25 3.25 8.25s-4.25-4.75-6-8-2-7.25-2-7.25c0-.03.02-.03.03-.03z"/>
<path opacity=".387" stroke-linejoin="round" d="M122.75 62.25c6.25.5 12.25-2.25 10.75-6.5s-12.5-7.25-13.25-3 2 10.5 2.5 9.5zM108.75 46.75c-8.5-5.5-8.5-7.25-7-8s10.25 5 12.75 8-2.25 2-5.75 0z" stroke="#6d7f42" stroke-linecap="round" fill="none"/>
<g clip-path="url(#j)" transform="translate(0 -10)" stroke="#6d7f42" fill="none">
<path opacity=".387" stroke-linejoin="round" d="M15.75 151.25c33-2.5 38.25-3.5 36.25-10.25s-8.75-23.5-23-22.5-22.75-6.75-22.75-6.75" stroke-linecap="round"/>
<path opacity=".387" d="M17.5 177.25c18.25-10 28.75-8.5 35-11.5s13.25-5.75 15.75-9.5 5.75-11.5 5.75-18-3.5-36 3.25-43.5 17.25-26 26-23.25 12.25 9.75 22 9.5S147 77 149.5 68.75s-15.75-16-16-23 5.5-14.25 20.25-17 23.5-11.5 23.5-11.5"/>
<path opacity=".387" stroke-linejoin="round" d="M14.5 161c15.25-1.5 22.25 3.5 31.5 1.75s13.25-3 17.75-5.75 6-3.75 6.25-6.5 1-12.5-3-22S54.5 97 58.75 89.75 64 68.25 74.5 65.25 95.5 59 102.25 64s12.75 14.25 20 14.5S134 77 136.75 74s4.5-10.25 1.75-13.75-15.5-5.5-17.25-9.5-10.75-17-.25-26.75 36.25-8.25 36.25-8.25" stroke-linecap="round"/>
<path opacity=".387" stroke-linejoin="round" d="M21.25 159c13.75 1 21.5 4.25 33.5-.5s13.5-5.25 13.75-8.75-.25-15.75-3-20-13-13.75-15.75-24.25.25-41 8.5-48.25 32-7.25 37.5-10.5 5.25-16.75 13-24.5 27-12.25 27-12.25M190 18.5c11.75 5 39 9.75 47.25 40.25M9.75 214c19-12.5 36-1.25 49-4s38.25-37.25 46-37 5.5-11.75 8.5-13.25 5.5 6 11.75 8 24.5-2.75 23.5-5.75-7-7.75-5.5-8.5 8.5 5 12.75 3.5 43.75-1 48.5-13.5 21.5-13.5 21.5-13.5M77.5 233.5c13.5-11.5 23.25-25 28.5-27s12.75-3.75 15.75-8 2.5-10.75 5.25-12 3.75 5.75 8.75 6 36.75-5 42.75-11.75 32.25-12.5 32.5-14.5-1-4.75.25-5.5 24.75-6 24.75-6M168.75 236c9-13.75 20.5-41 29.5-42s10.25-2.5 12.5-4.25 17 2.25 17 2.25" stroke-linecap="round"/>
</g>
<g clip-path="url(#k)" transform="translate(0 -10)" stroke="#d38484" stroke-linecap="round" stroke-width="2" fill="none">
<path d="m57.75 20-8.5 28.25 18 6.25L75 90.75 54 113l9 10.5L51 135l.5 4.5L71.75 164l14-6.75 20.5 18.5L95.75 204l10.25 8.5-2.75 13"/>
<path d="m105.75 212.25 12.5-27.75 11-7 27.5 15.75 20.5-3.75-.25-15.75-10.25-6 12.75-26.25 5.75-3.75 38.75-10"/>
</g>
<g opacity=".504">
<path d="M219.25 330s-24.25 9-31.75 10.5-21 5.25-21 5.25-9.75-4.25-22-8.5-29.75-5.5-29.75-5.5-3.25 3.5-22 8-27.5 5.75-27.5 5.75-18.5-9-31.5-11.5-24-2-24.75-1.75c0 0 7.5 13 11.75 27.75S27 385.5 27 385.5l51.469 5.125 48.971-4 52.25 12.031c10.38-4.418 52.11-4.9 55.31-13.156 0 0-4.75-15.25-7.5-29.75S219.25 330 219.25 330z" fill="#b1e479"/>
<path d="M27 385.5s-5.5 12.75-8.25 24.75-5.75 23-5.75 23l50.969 16.37 53.721-15.74 52.25 17.53c16.36-4.26 32.39-9.12 48.06-15.41 0 0 1.75-10.5 6.75-23.75S235 385.5 235 385.5c-3.2 8.256-44.93 8.738-55.31 13.156l-52.25-12.031-47.971 4L27 385.5z" fill="#87d531"/>
<path d="M231.75 485.5c-17.9 7.67-35.94 14.9-54.31 21.41l-52.25-18.53-50.221 9L26 487.5s-4 10.25-7.5 24.25-5 30.75-5 30.75 9.25-2 28.5 1.25 32.25 6 32.25 6 12.75-2.75 24-6.25 16.25-6.5 16.25-6.5 5.5.5 22.5 6.25 29.25 8.5 29.25 8.5 13-2.75 26-5.75 26.5-8 26.5-8-.75-5 4.25-24.5 8.75-28 8.75-28z" fill="#ceeeab"/>
<path d="M13 433.25s5.75 16 9.25 30S26 487.5 26 487.5l48.969 9.88 50.221-9 52.25 18.53c18.37-6.51 36.41-13.74 54.31-21.41 0 0-.5-4.5-3.75-19.75S218 436 218 436c-15.67 6.29-31.7 11.15-48.06 15.41l-52.25-17.53-53.721 15.74L13 433.25z" fill="#b9e787"/>
</g>
<g opacity=".522">
<path d="M13.719 332.031c-2.729-.007-4.438.125-4.719.219 0 0 7.5 13 11.75 27.75S27 385.5 27 385.5s-5.5 12.75-8.25 24.75-5.75 23-5.75 23 5.75 16 9.25 30S26 487.5 26 487.5s-4 10.25-7.5 24.25-5 30.75-5 30.75 9.25-2 28.5 1.25 32.25 6 32.25 6l1.5-53.25L63.5 450l16-59.75c-2.22-15.48-9.068-30.046-14.25-44.75 0 0-18.5-9-31.5-11.5-8.125-1.563-15.483-1.956-20.031-1.969z" fill="#83d32b"/>
<path d="M114.75 331.75s-3.25 3.5-22 8-27.5 5.75-27.5 5.75c5.182 14.704 12.03 29.27 14.25 44.75L63.5 450l12.25 46.5-1.5 53.25s12.75-2.75 24-6.25 16.25-6.5 16.25-6.5l11.25-48.5-8.25-54.25 11.75-45c-3.44-9.727-8.06-56.93-14.5-57.5z" fill="#b1e479"/>
<path d="M219.25 330s-24.25 9-31.75 10.5-21 5.25-21 5.25c4.92-1.358 11.44 45.392 14.25 52.25l-11.25 53.25 9.5 55-12.75 45.5s13-2.75 26-5.75 26.5-8 26.5-8-.75-5 4.25-24.5 8.75-28 8.75-28-.5-4.5-3.75-19.75S218 436 218 436s1.75-10.5 6.75-23.75S235 385.5 235 385.5s-4.75-15.25-7.5-29.75-8.25-25.75-8.25-25.75z" fill="#a4df62"/>
<path d="M114.75 331.75c6.44.57 11.06 47.773 14.5 57.5l-11.75 45 8.25 53.25L114.5 537s5.5.5 22.5 6.25 29.25 8.5 29.25 8.5l12.75-45.5-9.5-55L180.75 398c-2.81-6.858-9.33-53.608-14.25-52.25 0 0-9.75-4.25-22-8.5s-29.75-5.5-29.75-5.5z" fill="#ceeeab"/>
</g>
<path opacity=".039" d="M9-189.75s7.5 13 11.75 27.75S27-136.5 27-136.5s-5.5 12.75-8.25 24.75-5.75 23-5.75 23 5.75 16 9.25 30S26-34.5 26-34.5s-4 10.25-7.5 24.25-5 30.75-5 30.75 9.25-2 28.5 1.25 32.25 6 32.25 6S87 25 98.25 21.5 114.5 15 114.5 15s5.5.5 22.5 6.25 29.25 8.5 29.25 8.5 13-2.75 26-5.75 26.5-8 26.5-8S218 11 223-8.5s8.75-28 8.75-28-.5-4.5-3.75-19.75S218-86 218-86s1.75-10.5 6.75-23.75S235-136.5 235-136.5s-4.75-15.25-7.5-29.75-8.25-25.75-8.25-25.75-24.25 9-31.75 10.5-21 5.25-21 5.25-9.75-4.25-22-8.5-29.75-5.5-29.75-5.5-3.25 3.5-22 8-27.5 5.75-27.5 5.75-18.5-9-31.5-11.5-24-2-24.75-1.75z" fill="url(#l)" transform="translate(0 192)"/>
<g opacity=".27">
<path fill="url(#m)" d="M219.25-182s-24.25 9-31.75 10.5-21 5.25-21 5.25c4.92-1.36 11.44 45.39 14.25 52.25l-.06.28c11.81-4.07 51.21-4.77 54.31-12.78 0 0-4.75-15.25-7.5-29.75S219.25-182 219.25-182z" transform="translate(0 182)"/>
<path fill="url(#n)" d="M27-126.5s-5.5 12.75-8.25 24.75-5.75 23-5.75 23l50.625 16.281 15.781-58.911L27-126.5z" transform="translate(0 182)"/>
<path fill="url(#o)" d="M26-24.5S22-14.25 18.5-.25s-5 30.75-5 30.75.147-.045.438-.094c2.033-.338 11.218-1.5 28.062 1.344 19.25 3.25 32.25 6 32.25 6l1.469-52.5-.75.125L26-24.5z" transform="translate(0 182)"/>
<path fill="url(#p)" d="M114.75-180.25s-3.25 3.5-22 8-27.5 5.75-27.5 5.75c5.182 14.7 12.03 29.27 14.25 44.75l-.094.31 48.034-3.94 1.12.26c-3.36-13.33-7.79-54.6-13.81-55.13z" transform="translate(0 182)"/>
<path fill="url(#q)" d="M117.59-78.094 63.969-62.375l-.344-.094L63.5-62l12.25 46.5-.031.75 49.471-8.875.53.187.03-.062-8.25-54.25.09-.344z" transform="translate(0 182)"/>
<path fill="url(#r)" d="M128.56-125.12c.24.92.47 1.74.69 2.37l-11.66 44.656.1-.031L169.5-60.75l11.19-52.97c-.36.12-.7.25-1 .38l-51.13-11.78z" transform="translate(0 182)"/>
<path fill="url(#s)" d="M125.53-23.5 114.5 25s5.5.5 22.5 6.25c2.12.719 4.16 1.406 6.12 2.031 1.97.626 3.86 1.186 5.63 1.719 10.62 3.199 17.5 4.75 17.5 4.75l12.72-45.406c-.51.182-1.02.38-1.53.562L125.53-23.5z" transform="translate(0 182)"/>
<path fill="url(#t)" d="M218-76c-15.67 6.288-31.7 11.15-48.06 15.406l-.44-.156 9.5 55-.03.094c17.84-6.36 35.38-13.387 52.78-20.844 0 0-.5-4.5-3.75-19.75S218-76 218-76z" transform="translate(0 182)"/>
</g>
<g opacity=".043">
<path fill="url(#u)" d="M13.719-179.97c-1.106 0-1.972.03-2.719.06-.445.02-.855.04-1.156.07-.03 0-.066-.01-.094 0-.398.03-.656.06-.75.09 0 0 7.5 13 11.75 27.75S27-126.5 27-126.5l51.469 5.12.937-.06.094-.31c-2.22-15.48-9.068-30.05-14.25-44.75 0 0-18.5-9-31.5-11.5-1.018-.2-2.01-.37-3-.53-.596-.1-1.167-.2-1.75-.28-.237-.04-.453-.06-.688-.1a98.772 98.772 0 0 0-3.812-.46c-.735-.08-1.395-.13-2.094-.19-.685-.06-1.353-.11-2-.16-.992-.07-1.89-.12-2.781-.15-1.456-.07-2.769-.1-3.906-.1z" transform="translate(0 182)"/>
<path fill="url(#v)" d="M13-78.75s5.75 16 9.25 30S26-24.5 26-24.5l48.969 9.875.75-.125.031-.75L63.5-62l.125-.469L13-78.75z" transform="translate(0 182)"/>
<path fill="url(#w)" d="m127.44-125.38-47.971 4h-.063L63.625-62.469l.344.094 53.621-15.719 11.66-44.656c-.22-.63-.45-1.45-.69-2.37l-1.12-.26z" transform="translate(0 182)"/>
<path fill="url(#x)" d="M125.19-23.625 75.719-14.75l-1.469 52.5S87 35 98.25 31.5 114.5 25 114.5 25l11.22-48.438-.53-.187z" transform="translate(0 182)"/>
<path fill="url(#y)" d="M114.75-180.25c6.02.53 10.45 41.8 13.81 55.13l51.13 11.78c.3-.13.64-.26 1-.38l.06-.28c-2.81-6.86-9.33-53.61-14.25-52.25 0 0-.61-.25-1.72-.72-3.32-1.39-11.09-4.59-20.28-7.78-12.25-4.25-29.75-5.5-29.75-5.5z" transform="translate(0 182)"/>
<path fill="url(#z)" d="m117.69-78.125-.1.031-.09.344 8.25 53.25-.22 1 51.91 18.406c.51-.181 1.02-.38 1.53-.562l.03-.094-9.5-55-51.81-17.375z" transform="translate(0 182)"/>
<path fill="url(#A)" d="M231.75-26.5c-17.4 7.457-34.94 14.484-52.78 20.844L166.25 39.75s13-2.75 26-5.75 26.5-8 26.5-8S218 21 223 1.5s8.75-28 8.75-28z" transform="translate(0 182)"/>
<path fill="url(#B)" d="M235-126.5c-3.1 8.01-42.5 8.71-54.31 12.78L169.5-60.75l.44.156C186.3-64.85 202.33-69.712 218-76c0 0 1.75-10.5 6.75-23.75S235-126.5 235-126.5z" transform="translate(0 182)"/>
</g>
<g clip-path="url(#C)" transform="matrix(1.2525 .0354 0 1.2525 -27.809 -48.344)">
<path fill="#ceeeab" d="M9 12.25s7.5 13 11.75 27.75S27 65.5 27 65.5s-5.5 12.75-8.25 24.75-5.75 23-5.75 23 5.75 16 9.25 30S26 167.5 26 167.5s-4 10.25-7.5 24.25-5 30.75-5 30.75 9.25-2 28.5 1.25 32.25 6 32.25 6 12.75-2.75 24-6.25 16.25-6.5 16.25-6.5 5.5.5 22.5 6.25 29.25 8.5 29.25 8.5 13-2.75 26-5.75 26.5-8 26.5-8-.75-5 4.25-24.5 8.75-28 8.75-28-.5-4.5-3.75-19.75S218 116 218 116s1.75-10.5 6.75-23.75S235 65.5 235 65.5s-4.75-15.25-7.5-29.75S219.25 10 219.25 10 195 19 187.5 20.5s-21 5.25-21 5.25-9.75-4.25-22-8.5-29.75-5.5-29.75-5.5-3.25 3.5-22 8-27.5 5.75-27.5 5.75-18.5-9-31.5-11.5-24-2-24.75-1.75z"/>
<g clip-path="url(#D)">
<path fill="#a6dd8b" d="m110.75 5.5-4.25 7c-.75 9-2.25 13.75-8.5 16.5s-12.25 3.5-11 7.5 13 9.25 14.25 13 8.25 1.75 11 7 2 15.25-3.75 17.25-17.5.5-20.25 9.5-4.75 10.5-9 12.75-7.25 10.5-3.5 16.5 12.25-1.25 15-6.5S98 97.25 98 97.25h23.75l59-1.75 3.25-3.75s3.25 4 2.75 8.75-3.75 14.75.25 17.25 19.5-2 24-7-4.75-28.25-10.5-29.5-18.5-1.75-17-7 11.75 4.5 17.25 3.25 16.75-21 12-25.25-24.25-5.25-25.75-8 21-8 22.25-11-2.5-7.25-6.25-8.5S189 41 182 40.5s-20 5.75-17.25 11.5-6 11-14.5 6.5-24.75-13-21.75-24S150 15.75 150 15.75L110.75 5.5zM97.188 112.72c-1.595.15-4.469 4.69-4.938 7.03-.5 2.5-3.25 6.75-3.5 12.25s4.75 6.75 8.75 6.5 2.75-6.75 2-15c-.25 0-.5-10.25-2-10.75-.094-.03-.206-.04-.312-.03zm92.722 51.66c-6.34.29-6.63 7.46-7.41 8.87-1.25 2.25 2.25 13.75 2 18s-4.75 5.25-9.5 9.75.5 16 11.25 31l44.75 1.25-1-35s4.75-4.25-20.75-24.25c-9.56-7.5-15.54-9.8-19.34-9.62zM39.94 180.91c-4.03-.11-8.282 5.71-9.688 7.59-1.5 2-6.25 5-17 9.5l-14.5 34.25 53 4.25s1.75-11-4.25-15.75-17.25-5.75-18.25-13.25 9.25-10 13.75-14.25 2-11.25-2.25-12.25a3.728 3.728 0 0 0-.812-.09z"/>
</g>
<path fill="#aac3e7" d="M158.53 85.344c-4.76-.015-9.03.968-11.53 3.156-8 7-35 .75-48.5 7s-13.25 38-14.75 44.5-17.5 20.75-20 23.5-13.25 7.25-19.5 8.5-12.75 7.25-15.5 11c-2.021 2.76-7.406 6.45-10.125 8.22-.046.18-.079.35-.125.53-.134.53-.247 1.08-.375 1.63 4.98-1.61 11.18-8.18 16.625-13.63 6.25-6.25 20-7.75 27.75-11.5S76.75 148.5 89 144.5s21.25 11.75 24.25 18.5 1.75 12.75 3.75 17 11 11.75 11.5 13.5-5 6.5-6.25 8.5-10.5 7-11.75 8.75c-.97 1.35-1.92 6.77-2.31 9.22.41-.17.75-.31 1.12-.47.49-2.53 1.52-7.08 2.94-8.5 2-2 11.25-4.5 12.5-8.5s7-6.5 7-6.5 2.75 4 16 14c8.83 6.67 12.76 15.53 14.41 20.72.67.17 1.53.41 2.03.53-1.22-4.32-4.84-16.24-8.94-20.75-5-5.5-18.5-10.75-22.75-22S108 154.25 115 148.25s16.5-4 28.5 7.5 46.25 5.75 57.75 3.75c9.95-1.73 20.83 14.88 23.91 26.03.59-2.09 1.12-4 1.65-5.75-1.74-1.92-3.69-4.62-5.31-8.28-3.75-8.5-12-13.25-12-13.25s8.75-5 14.75-7.75c1.62-.74 3.01-1.68 4.19-2.66-.16-.77-.26-1.25-.44-2.09l-.09-.44c-4.77 5.56-19.24 9.62-21.66 10.94-2.75 1.5-18.25 3-35.75 4.5s-26.75-7.5-34.25-14.75-13-36-3-38 20 13.75 30 17 21.5-15.75 19.75-27c-1.2-7.734-14-12.625-24.47-12.656zm-53.34 13.281c1.27.001 5.86 1.595 6.56 2.375 2.25 2.5 2 7.25 0 8s-12.75 8-10.5 14.25 1.75 18-3.5 18-8 0-10-2.5-2-12 0-19.75 3.5-15 8-18.25c3.094-2.234 6.63-2.128 9.44-2.125zm13.59 73.095c.2.01 1.91 3.51 4.72 7.03 3 3.75 3.25 8.25 3.25 8.25s-4.25-4.75-6-8-2-7.25-2-7.25c0-.03.02-.03.03-.03z"/>
<path opacity=".387" stroke-linejoin="round" d="M122.75 72.25c6.25.5 12.25-2.25 10.75-6.5s-12.5-7.25-13.25-3 2 10.5 2.5 9.5zM108.75 56.75c-8.5-5.5-8.5-7.25-7-8s10.25 5 12.75 8-2.25 2-5.75 0z" stroke="#6d7f42" stroke-linecap="round" fill="none"/>
<g stroke="#6d7f42" fill="none" clip-path="url(#E)">
<path opacity=".387" stroke-linejoin="round" d="M15.75 151.25c33-2.5 38.25-3.5 36.25-10.25s-8.75-23.5-23-22.5-22.75-6.75-22.75-6.75" stroke-linecap="round"/>
<path opacity=".387" d="M17.5 177.25c18.25-10 28.75-8.5 35-11.5s13.25-5.75 15.75-9.5 5.75-11.5 5.75-18-3.5-36 3.25-43.5 17.25-26 26-23.25 12.25 9.75 22 9.5S147 77 149.5 68.75s-15.75-16-16-23 5.5-14.25 20.25-17 23.5-11.5 23.5-11.5"/>
<path opacity=".387" stroke-linejoin="round" d="M14.5 161c15.25-1.5 22.25 3.5 31.5 1.75s13.25-3 17.75-5.75 6-3.75 6.25-6.5 1-12.5-3-22S54.5 97 58.75 89.75 64 68.25 74.5 65.25 95.5 59 102.25 64s12.75 14.25 20 14.5S134 77 136.75 74s4.5-10.25 1.75-13.75-15.5-5.5-17.25-9.5-10.75-17-.25-26.75 36.25-8.25 36.25-8.25" stroke-linecap="round"/>
<path opacity=".387" stroke-linejoin="round" d="M21.25 159c13.75 1 21.5 4.25 33.5-.5s13.5-5.25 13.75-8.75-.25-15.75-3-20-13-13.75-15.75-24.25.25-41 8.5-48.25 32-7.25 37.5-10.5 5.25-16.75 13-24.5 27-12.25 27-12.25M190 18.5c11.75 5 39 9.75 47.25 40.25M9.75 214c19-12.5 36-1.25 49-4s38.25-37.25 46-37 5.5-11.75 8.5-13.25 5.5 6 11.75 8 24.5-2.75 23.5-5.75-7-7.75-5.5-8.5 8.5 5 12.75 3.5 43.75-1 48.5-13.5 21.5-13.5 21.5-13.5M77.5 233.5c13.5-11.5 23.25-25 28.5-27s12.75-3.75 15.75-8 2.5-10.75 5.25-12 3.75 5.75 8.75 6 36.75-5 42.75-11.75 32.25-12.5 32.5-14.5-1-4.75.25-5.5 24.75-6 24.75-6M168.75 236c9-13.75 20.5-41 29.5-42s10.25-2.5 12.5-4.25 17 2.25 17 2.25" stroke-linecap="round"/>
</g>
<g clip-path="url(#F)" stroke="#d38484" stroke-linecap="round" stroke-width="2" fill="none">
<path d="m57.75 20-8.5 28.25 18 6.25L75 90.75 54 113l9 10.5L51 135l.5 4.5L71.75 164l14-6.75 20.5 18.5L95.75 204l10.25 8.5-2.75 13"/>
<path d="m105.75 212.25 12.5-27.75 11-7 27.5 15.75 20.5-3.75-.25-15.75-10.25-6 12.75-26.25 5.75-3.75 38.75-10"/>
</g>
<path opacity=".039" d="M9 12.25s7.5 13 11.75 27.75S27 65.5 27 65.5s-5.5 12.75-8.25 24.75-5.75 23-5.75 23 5.75 16 9.25 30S26 167.5 26 167.5s-4 10.25-7.5 24.25-5 30.75-5 30.75 9.25-2 28.5 1.25 32.25 6 32.25 6 12.75-2.75 24-6.25 16.25-6.5 16.25-6.5 5.5.5 22.5 6.25 29.25 8.5 29.25 8.5 13-2.75 26-5.75 26.5-8 26.5-8-.75-5 4.25-24.5 8.75-28 8.75-28-.5-4.5-3.75-19.75S218 116 218 116s1.75-10.5 6.75-23.75S235 65.5 235 65.5s-4.75-15.25-7.5-29.75S219.25 10 219.25 10 195 19 187.5 20.5s-21 5.25-21 5.25-9.75-4.25-22-8.5-29.75-5.5-29.75-5.5-3.25 3.5-22 8-27.5 5.75-27.5 5.75-18.5-9-31.5-11.5-24-2-24.75-1.75z" fill="url(#G)"/>
<g opacity=".27">
<path fill="url(#m)" d="M219.25-182s-24.25 9-31.75 10.5-21 5.25-21 5.25c4.92-1.36 11.44 45.39 14.25 52.25l-.06.28c11.81-4.07 51.21-4.77 54.31-12.78 0 0-4.75-15.25-7.5-29.75S219.25-182 219.25-182z" transform="translate(0 192)"/>
<path fill="url(#n)" d="M27-126.5s-5.5 12.75-8.25 24.75-5.75 23-5.75 23l50.625 16.281 15.781-58.911L27-126.5z" transform="translate(0 192)"/>
<path fill="url(#o)" d="M26-24.5S22-14.25 18.5-.25s-5 30.75-5 30.75.147-.045.438-.094c2.033-.338 11.218-1.5 28.062 1.344 19.25 3.25 32.25 6 32.25 6l1.469-52.5-.75.125L26-24.5z" transform="translate(0 192)"/>
<path fill="url(#p)" d="M114.75-180.25s-3.25 3.5-22 8-27.5 5.75-27.5 5.75c5.182 14.7 12.03 29.27 14.25 44.75l-.094.31 48.034-3.94 1.12.26c-3.36-13.33-7.79-54.6-13.81-55.13z" transform="translate(0 192)"/>
<path fill="url(#q)" d="M117.59-78.094 63.969-62.375l-.344-.094L63.5-62l12.25 46.5-.031.75 49.471-8.875.53.187.03-.062-8.25-54.25.09-.344z" transform="translate(0 192)"/>
<path fill="url(#r)" d="M128.56-125.12c.24.92.47 1.74.69 2.37l-11.66 44.656.1-.031L169.5-60.75l11.19-52.97c-.36.12-.7.25-1 .38l-51.13-11.78z" transform="translate(0 192)"/>
<path fill="url(#s)" d="M125.53-23.5 114.5 25s5.5.5 22.5 6.25c2.12.719 4.16 1.406 6.12 2.031 1.97.626 3.86 1.186 5.63 1.719 10.62 3.199 17.5 4.75 17.5 4.75l12.72-45.406c-.51.182-1.02.38-1.53.562L125.53-23.5z" transform="translate(0 192)"/>
<path fill="url(#t)" d="M218-76c-15.67 6.288-31.7 11.15-48.06 15.406l-.44-.156 9.5 55-.03.094c17.84-6.36 35.38-13.387 52.78-20.844 0 0-.5-4.5-3.75-19.75S218-76 218-76z" transform="translate(0 192)"/>
</g>
<g opacity=".043">
<path fill="url(#u)" d="M13.719-179.97c-1.106 0-1.972.03-2.719.06-.445.02-.855.04-1.156.07-.03 0-.066-.01-.094 0-.398.03-.656.06-.75.09 0 0 7.5 13 11.75 27.75S27-126.5 27-126.5l51.469 5.12.937-.06.094-.31c-2.22-15.48-9.068-30.05-14.25-44.75 0 0-18.5-9-31.5-11.5-1.018-.2-2.01-.37-3-.53-.596-.1-1.167-.2-1.75-.28-.237-.04-.453-.06-.688-.1a98.772 98.772 0 0 0-3.812-.46c-.735-.08-1.395-.13-2.094-.19-.685-.06-1.353-.11-2-.16-.992-.07-1.89-.12-2.781-.15-1.456-.07-2.769-.1-3.906-.1z" transform="translate(0 192)"/>
<path fill="url(#v)" d="M13-78.75s5.75 16 9.25 30S26-24.5 26-24.5l48.969 9.875.75-.125.031-.75L63.5-62l.125-.469L13-78.75z" transform="translate(0 192)"/>
<path fill="url(#w)" d="m127.44-125.38-47.971 4h-.063L63.625-62.469l.344.094 53.621-15.719 11.66-44.656c-.22-.63-.45-1.45-.69-2.37l-1.12-.26z" transform="translate(0 192)"/>
<path fill="url(#x)" d="M125.19-23.625 75.719-14.75l-1.469 52.5S87 35 98.25 31.5 114.5 25 114.5 25l11.22-48.438-.53-.187z" transform="translate(0 192)"/>
<path fill="url(#y)" d="M114.75-180.25c6.02.53 10.45 41.8 13.81 55.13l51.13 11.78c.3-.13.64-.26 1-.38l.06-.28c-2.81-6.86-9.33-53.61-14.25-52.25 0 0-.61-.25-1.72-.72-3.32-1.39-11.09-4.59-20.28-7.78-12.25-4.25-29.75-5.5-29.75-5.5z" transform="translate(0 192)"/>
<path fill="url(#z)" d="m117.69-78.125-.1.031-.09.344 8.25 53.25-.22 1 51.91 18.406c.51-.181 1.02-.38 1.53-.562l.03-.094-9.5-55-51.81-17.375z" transform="translate(0 192)"/>
<path fill="url(#A)" d="M231.75-26.5c-17.4 7.457-34.94 14.484-52.78 20.844L166.25 39.75s13-2.75 26-5.75 26.5-8 26.5-8S218 21 223 1.5s8.75-28 8.75-28z" transform="translate(0 192)"/>
<path fill="url(#B)" d="M235-126.5c-3.1 8.01-42.5 8.71-54.31 12.78L169.5-60.75l.44.156C186.3-64.85 202.33-69.712 218-76c0 0 1.75-10.5 6.75-23.75S235-126.5 235-126.5z" transform="translate(0 192)"/>
</g>
</g>
<path d="M174.28 35.875c-17.6 0-35.19 6.639-48.72 19.906-.16.164-.33.303-.5.469-22.05 22.056-26.206 55.25-12.47 81.5l-2.28 1.63c2.25 4.23 4.94 8.3 8.1 12.12l-13.32 13.34c-1.34-.58-2.78-1.03-4.56-1.37l-2.842 2.84c-1.11-.48-2.139-.81-3.063-.87L30.5 229.56c-.037 1.1.226 2.19.656 3.28l-.531.54-1.094 1.09c.716 3.74 2.011 6.82 5.469 10.28s6.42 4.63 10.281 5.47l1.094-1.1.719-.71c1.088.46 2.13.78 3.094.84l64.122-64.13c-.24-.97-.62-1.93-1.03-2.9l3-3c-.25-1.73-.68-3.2-1.31-4.6l13.28-13.28c3.82 3.16 7.89 5.85 12.13 8.1l1.62-2.28c26.25 13.74 59.44 9.58 81.5-12.47.17-.17.31-.34.47-.5 26.69-27.22 26.55-70.923-.47-97.94-13.59-13.59-31.41-20.375-49.22-20.375zm-.5 3.281c.11.002.21-.002.31 0a67.89 67.89 0 0 1 5.72.375c.1.01.19.021.28.031l.29.032c.14.017.29.044.43.062.9.11 1.8.228 2.69.375.62.102 1.23.224 1.84.344.35.066.69.116 1.04.187.72.153 1.43.324 2.15.5.31.077.63.138.94.219.07.019.14.043.22.063.95.253 1.89.516 2.84.812.07.023.15.04.22.063.95.3 1.91.625 2.84.969.06.019.11.042.16.062.95.352 1.9.728 2.84 1.125.06.022.11.041.16.063.94.4 1.88.836 2.81 1.281l.13.062c.93.451 1.86.91 2.78 1.407.03.017.06.044.09.062.93.503 1.84 1.013 2.75 1.562.03.02.06.044.1.063.9.551 1.8 1.122 2.68 1.719.55.369 1.09.769 1.63 1.156.36.261.73.512 1.09.781.02.014.05.018.06.031.88.656 1.75 1.36 2.6 2.063a70.875 70.875 0 0 1 5.06 4.594c1.62 1.62 3.14 3.3 4.56 5.031.01.011.03.02.03.031.71.855 1.41 1.715 2.07 2.594.28.379.54.773.81 1.156.37.517.74 1.038 1.09 1.563.02.024.05.038.07.062.59.89 1.16 1.778 1.71 2.688.02.032.05.061.07.094a75.45 75.45 0 0 1 1.56 2.75c.02.032.04.06.06.093.5.921.96 1.844 1.41 2.781l.06.126c.45.929.88 1.868 1.28 2.812l.06.156c.4.936.78 1.895 1.13 2.844l.06.156c.35.949.67 1.885.97 2.844l.06.219c.3.941.56 1.894.82 2.843l.06.219c.08.31.14.627.22.938.17.72.35 1.432.5 2.156.07.342.12.688.18 1.031.12.617.25 1.225.35 1.844.15.893.26 1.791.37 2.688.02.145.05.291.07.437l.03.281.03.282c.1.952.19 1.912.25 2.872.06.94.11 1.9.12 2.85.01.1 0 .2 0 .31.02 1.05.01 2.1-.03 3.15-.03.94-.08 1.86-.15 2.79-.01.12-.02.25-.04.37-.07.92-.16 1.84-.28 2.75-.12.96-.27 1.93-.43 2.88-.02.08-.02.16-.04.25-.02.12-.03.25-.06.37-.16.91-.36 1.82-.56 2.72-.03.13-.06.27-.09.41a86.1 86.1 0 0 1-.69 2.65c-.17.59-.35 1.17-.53 1.75-.18.57-.34 1.13-.53 1.69-.3.86-.61 1.71-.94 2.56-.06.16-.13.32-.19.47-.04.11-.08.23-.12.34-.36.88-.73 1.73-1.13 2.6-.38.82-.77 1.63-1.19 2.44-.08.15-.16.31-.25.46-.41.8-.83 1.6-1.28 2.38-.09.17-.18.34-.28.5-.2.34-.42.66-.62 1-.37.59-.75 1.17-1.13 1.75 0 .01.01.02 0 .03-.49.75-1 1.49-1.53 2.22-.46.64-.95 1.25-1.44 1.87-.19.26-.36.53-.56.79l-.41.5c-.16.2-.33.39-.5.59-.55.67-1.13 1.32-1.72 1.97-.6.67-1.23 1.35-1.87 2-.16.16-.31.34-.47.5-21.13 21.13-52.26 24.49-77.19 11.15l.13-.18c-3.71-1.9-7.26-4.14-10.63-6.78-.54-.43-1.06-.87-1.59-1.32-.51-.43-1.03-.86-1.53-1.31-.09-.08-.19-.16-.28-.25a86.97 86.97 0 0 1-2.41-2.31c-.79-.8-1.56-1.59-2.31-2.41-.09-.09-.17-.19-.25-.28-.45-.5-.88-1.02-1.31-1.53-.45-.53-.89-1.05-1.32-1.59a65.831 65.831 0 0 1-6.78-10.63l-.19.13c-13.33-24.93-9.97-56.061 11.16-77.191.16-.163.34-.308.5-.469.65-.64 1.33-1.269 2-1.875.65-.588 1.3-1.162 1.97-1.719.27-.227.54-.465.81-.687.35-.278.72-.512 1.06-.782.63-.483 1.24-.98 1.88-1.437.74-.53 1.5-1.036 2.25-1.531.58-.383 1.16-.764 1.75-1.125.45-.274.89-.551 1.34-.813.69-.395 1.37-.757 2.07-1.125a61.343 61.343 0 0 1 3.37-1.687c.87-.397 1.72-.769 2.59-1.125.16-.062.32-.096.47-.157.97-.384 1.93-.758 2.91-1.093.56-.192 1.12-.356 1.69-.532.58-.181 1.16-.367 1.75-.531.88-.246 1.77-.48 2.65-.687.07-.017.15-.015.22-.032.42-.095.84-.193 1.25-.281.67-.141 1.36-.287 2.03-.406.09-.015.17-.017.25-.031.95-.164 1.92-.317 2.88-.438a65.183 65.183 0 0 1 9.06-.5z" clip-path="url(#H)" transform="matrix(1 .08088 0 1 0 -21.056)" filter="url(#I)" fill="#2d3335"/>
<path d="m48.01 227.46-4.807 4.55c-5.007-5.31-10.24-10.7-15.08-16.13l4.491-4.07c6.829 6.53 8.872 8.53 15.396 15.65z" fill="url(#J)"/>
<path d="M-95.5 225c28.719 0 52 23.28 52 52s-23.281 52-52 52c-28.72 0-52-23.28-52-52s23.28-52 52-52z" fill-opacity=".476" fill="#d0e9f2"/>
<path d="M-82 136c-27.61 0-50-22.39-50-50 0-27.614 22.39-50 50-50 27.614 0 50 22.386 50 50 0 27.61-22.386 50-50 50zm0-3c26.51 0 48-20.49 48-47s-21.49-47-48-47-48 20.49-48 47 21.49 47 48 47z" fill="#2d3335"/>
<path fill="#9eaaac" d="M-82 136c-27.61 0-50-22.39-50-50v-.5c.27 27.38 22.55 49.5 50 49.5 27.446 0 49.729-22.12 50-49.5.002.168 0 .332 0 .5 0 27.61-22.386 50-50 50zm-48-50.5V85c0-26.51 21.49-47 48-47s48 20.49 48 47c0 .168.002.333 0 .5C-34.27 59.234-55.658 39-82 39c-26.34 0-47.73 20.234-48 46.5z"/>
<path d="M-82 134c26.51 0 48-21.49 48-48s-21.49-48-48-48-48 21.49-48 48 21.49 48 48 48z" fill-opacity=".476" fill="#d0e9f2"/>
<path d="M-82 134c26.51 0 48-21.49 48-48s-21.49-48-48-48-48 21.49-48 48 21.49 48 48 48z" fill="url(#K)"/>
<path fill="url(#L)" d="m-96.438 130.72-.843 5C-92.451 137.2-87.316 138-82 138s10.451-.8 15.281-2.28l-.843-5C-72.115 132.19-76.959 133-82 133s-9.885-.81-14.438-2.28z"/>
<path fill="url(#M)" d="M-82 136.25c1.757 0 3.431.3 5 .81v26.63c-1.569.51-3.243.81-5 .81s-3.431-.3-5-.81v-26.63c1.569-.51 3.243-.81 5-.81z"/>
<path d="M-82 150.5c3.514 0 5.613.65 8 2.44v67.12c-3.137.91-4.486 1.44-8 1.44s-4.863-.53-8-1.44v-67.12c2.637-1.79 4.486-2.44 8-2.44z" fill="url(#N)"/>
<path d="M-82 154.5c3.514 0 7.238.78 10 2.44v65.12c-2.012 1.79-6.486 2.44-10 2.44s-7.863-.15-10-2.44v-65.12c1.887-1.66 6.486-2.44 10-2.44z" fill="url(#O)"/>
<path d="M-82 217.5c3.514 0 5.863.78 8 2.44v5.12c-2.387 1.54-4.486 2.44-8 2.44s-5.738-.9-8-2.44v-5.12c2.387-1.91 4.486-2.44 8-2.44z" fill="url(#P)"/>
<path fill="#e0bb41" d="M-82 221.5c-3.514 0-5.613.28-8 2.44v1.12c2.262 1.54 4.486 2.44 8 2.44s5.613-.9 8-2.44v-1.12c-2.512-2.04-4.486-2.44-8-2.44z"/>
<path d="M125.06 134.69c-27.176-27.18-27.176-71.251 0-98.431 27.18-27.18 71.25-27.18 98.43 0s27.18 71.251 0 98.431-71.25 27.18-98.43 0zm2.96-2.96c26.09 26.1 67.41 27.08 93.5.99 26.1-26.1 25.11-67.416-.98-93.509-26.09-26.092-67.41-27.077-93.51-.984-26.09 26.093-25.11 67.413.99 93.503z" fill="#2d3335"/>
<path fill="#9eaaac" d="M125.06 134.69c-27.176-27.18-27.176-71.251 0-98.431.17-.166.33-.329.5-.493-26.689 27.218-26.526 70.924.49 97.934 27.01 27.02 70.72 27.18 97.93.49-.16.17-.32.33-.49.5-27.18 27.18-71.25 27.18-98.43 0zm2.46-96.955c.17-.167.33-.327.5-.492 26.09-26.093 67.41-25.109 93.5.984 26.1 26.093 27.08 67.413.99 93.503l-.5.5c25.59-26.12 24.46-67.091-1.47-93.019-25.93-25.927-66.9-27.063-93.02-1.476z"/>
<path d="M127.03 132.72c26.1 26.09 68.4 26.09 94.49 0 26.1-26.1 26.1-68.4 0-94.493-26.09-26.093-68.39-26.093-94.49 0-26.09 26.093-26.09 68.393 0 94.493z" fill-opacity=".476" fill="#d0e9f2"/>
<path d="M127.03 132.72c26.1 26.09 68.4 26.09 94.49 0 26.1-26.1 26.1-68.4 0-94.493-26.09-26.093-68.39-26.093-94.49 0-26.09 26.093-26.09 68.393 0 94.493z" fill="url(#Q)"/>
<path fill="url(#R)" d="m116.05 115.28-5.75 4.09c3.3 6.21 7.56 12.05 12.8 17.28 5.23 5.24 11.07 9.5 17.28 12.8l4.09-5.75c-5.92-3.04-11.49-7.01-16.45-11.97s-8.93-10.53-11.97-16.45z"/>
<path fill="url(#S)" d="M124.82 134.93c1.73 1.73 3.08 3.67 4.12 5.72l-26.21 26.21a22.302 22.302 0 0 1-5.718-4.12 22.268 22.268 0 0 1-4.121-5.72l26.209-26.21c2.05 1.04 3.99 2.39 5.72 4.12z"/>
<path d="M116.27 159.23 50.199 225.3c-3.984-2.19-5.832-3-9.29-6.46s-4.267-5.3-6.459-9.29l66.07-66.07c5.06 3.75 12.12 10.4 15.75 15.75z" fill="url(#T)"/>
<path d="M105.78 154.12c3.46 3.46 7.43 6.66 8.52 11.02l-64.101 64.1c-6.517-5.16-14.555-12.79-19.686-19.69l64.1-64.1c3.491.23 7.657 5.16 11.117 8.62l.05.05z" fill="url(#U)"/>
<path opacity=".596" d="M105.53 154.36c3.46 3.46 7.68 6.42 8.77 10.78l-64.101 64.1c-7.877-6.91-11.798-10.63-19.686-19.69l64.1-64.1c3.491.23 7.417 5.45 10.877 8.91h.04z" fill="url(#V)"/>
<path opacity=".604" d="M131.17 37.05c40.66-32.173 78.13-12.728 77.78 5.303s-56.57 70.357-76.37 66.117-25.45-53.389-1.41-71.42z" fill="url(#W)"/>
<text opacity=".461" font-weight="bold" xml:space="preserve" transform="rotate(-8.9)" font-size="19.8" y="69.184" x="99.839" font-family="Monospace" fill="url(#X)"><tspan y="69.184" x="99.839">01011001</tspan><tspan y="93.934" x="99.839">00110101</tspan><tspan y="118.684" x="99.839">10010011</tspan></text>
<text font-weight="bold" xml:space="preserve" transform="rotate(-8.9)" font-size="19.8" y="67.898" x="99.41" font-family="Monospace" fill="url(#Y)"><tspan y="67.898" x="99.41">01011001</tspan><tspan y="92.648" x="99.41">00110101</tspan><tspan y="117.398" x="99.41">10010011</tspan></text>
<path opacity=".765" d="M219.46 129.09c-26.79 21.98-41.81 21.01-38.07 13.14 3.73-7.87 42.32-41.66 51.45-43.75 9.12-2.093 2.27 18.04-13.38 30.61z" fill="url(#Z)"/>
</svg>

After

Width:  |  Height:  |  Size: 49 KiB

View File

@@ -0,0 +1,23 @@
from typing import Any
from core.tools.errors import ToolProviderCredentialValidationError
from core.tools.provider.builtin.nominatim.tools.nominatim_search import NominatimSearchTool
from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController
class NominatimProvider(BuiltinToolProviderController):
def _validate_credentials(self, credentials: dict[str, Any]) -> None:
try:
result = NominatimSearchTool().fork_tool_runtime(
runtime={
"credentials": credentials,
}
).invoke(
user_id='',
tool_parameters={
'query': 'London',
'limit': 1,
},
)
except Exception as e:
raise ToolProviderCredentialValidationError(str(e))

View File

@@ -0,0 +1,43 @@
identity:
author: Charles Zhou
name: nominatim
label:
en_US: Nominatim
zh_Hans: Nominatim
de_DE: Nominatim
ja_JP: Nominatim
description:
en_US: Nominatim is a search engine for OpenStreetMap data
zh_Hans: Nominatim是OpenStreetMap数据的搜索引擎
de_DE: Nominatim ist eine Suchmaschine für OpenStreetMap-Daten
ja_JP: NominatimはOpenStreetMapデータの検索エンジンです
icon: icon.svg
tags:
- search
- utilities
credentials_for_provider:
base_url:
type: text-input
required: false
default: https://nominatim.openstreetmap.org
label:
en_US: Nominatim Base URL
zh_Hans: Nominatim 基础 URL
de_DE: Nominatim Basis-URL
ja_JP: Nominatim ベースURL
placeholder:
en_US: "Enter your Nominatim instance URL (default:
https://nominatim.openstreetmap.org)"
zh_Hans: 输入您的Nominatim实例URL默认https://nominatim.openstreetmap.org
de_DE: "Geben Sie Ihre Nominatim-Instanz-URL ein (Standard:
https://nominatim.openstreetmap.org)"
ja_JP: NominatimインスタンスのURLを入力してくださいデフォルトhttps://nominatim.openstreetmap.org
help:
en_US: The base URL for the Nominatim instance. Use the default for the public
service or enter your self-hosted instance URL.
zh_Hans: Nominatim实例的基础URL。使用默认值可访问公共服务或输入您的自托管实例URL。
de_DE: Die Basis-URL für die Nominatim-Instanz. Verwenden Sie den Standardwert
für den öffentlichen Dienst oder geben Sie die URL Ihrer selbst
gehosteten Instanz ein.
ja_JP: NominatimインスタンスのベースURL。公共サービスにはデフォルトを使用するか、自己ホスティングインスタンスのURLを入力してください。
url: https://nominatim.org/

View File

@@ -0,0 +1,47 @@
import json
from typing import Any, Union
import requests
from core.tools.entities.tool_entities import ToolInvokeMessage
from core.tools.tool.builtin_tool import BuiltinTool
class NominatimLookupTool(BuiltinTool):
def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]:
osm_ids = tool_parameters.get('osm_ids', '')
if not osm_ids:
return self.create_text_message('Please provide OSM IDs')
params = {
'osm_ids': osm_ids,
'format': 'json',
'addressdetails': 1
}
return self._make_request(user_id, 'lookup', params)
def _make_request(self, user_id: str, endpoint: str, params: dict) -> ToolInvokeMessage:
base_url = self.runtime.credentials.get('base_url', 'https://nominatim.openstreetmap.org')
try:
headers = {
"User-Agent": "DifyNominatimTool/1.0"
}
s = requests.session()
response = s.request(
method='GET',
headers=headers,
url=f"{base_url}/{endpoint}",
params=params
)
response_data = response.json()
if response.status_code == 200:
s.close()
return self.create_text_message(self.summary(user_id=user_id, content=json.dumps(response_data, ensure_ascii=False)))
else:
return self.create_text_message(f"Error: {response.status_code} - {response.text}")
except Exception as e:
return self.create_text_message(f"An error occurred: {str(e)}")

View File

@@ -0,0 +1,31 @@
identity:
name: nominatim_lookup
author: Charles Zhou
label:
en_US: Nominatim OSM Lookup
zh_Hans: Nominatim OSM 对象查找
de_DE: Nominatim OSM-Objektsuche
ja_JP: Nominatim OSM ルックアップ
description:
human:
en_US: Look up OSM objects using their IDs with Nominatim
zh_Hans: 使用Nominatim通过ID查找OSM对象
de_DE: Suchen Sie OSM-Objekte anhand ihrer IDs mit Nominatim
ja_JP: Nominatimを使用してIDでOSMオブジェクトを検索
llm: A tool for looking up OpenStreetMap objects using their IDs with Nominatim.
parameters:
- name: osm_ids
type: string
required: true
label:
en_US: OSM IDs
zh_Hans: OSM ID
de_DE: OSM-IDs
ja_JP: OSM ID
human_description:
en_US: Comma-separated list of OSM IDs to lookup (e.g., N123,W456,R789)
zh_Hans: 要查找的OSM ID的逗号分隔列表例如N123,W456,R789
de_DE: Kommagetrennte Liste von OSM-IDs für die Suche (z.B. N123,W456,R789)
ja_JP: 検索するOSM IDのカンマ区切りリストN123,W456,R789
llm_description: A comma-separated list of OSM IDs (prefixed with N, W, or R) for lookup.
form: llm

View File

@@ -0,0 +1,49 @@
import json
from typing import Any, Union
import requests
from core.tools.entities.tool_entities import ToolInvokeMessage
from core.tools.tool.builtin_tool import BuiltinTool
class NominatimReverseTool(BuiltinTool):
def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]:
lat = tool_parameters.get('lat')
lon = tool_parameters.get('lon')
if lat is None or lon is None:
return self.create_text_message('Please provide both latitude and longitude')
params = {
'lat': lat,
'lon': lon,
'format': 'json',
'addressdetails': 1
}
return self._make_request(user_id, 'reverse', params)
def _make_request(self, user_id: str, endpoint: str, params: dict) -> ToolInvokeMessage:
base_url = self.runtime.credentials.get('base_url', 'https://nominatim.openstreetmap.org')
try:
headers = {
"User-Agent": "DifyNominatimTool/1.0"
}
s = requests.session()
response = s.request(
method='GET',
headers=headers,
url=f"{base_url}/{endpoint}",
params=params
)
response_data = response.json()
if response.status_code == 200:
s.close()
return self.create_text_message(self.summary(user_id=user_id, content=json.dumps(response_data, ensure_ascii=False)))
else:
return self.create_text_message(f"Error: {response.status_code} - {response.text}")
except Exception as e:
return self.create_text_message(f"An error occurred: {str(e)}")

View File

@@ -0,0 +1,47 @@
identity:
name: nominatim_reverse
author: Charles Zhou
label:
en_US: Nominatim Reverse Geocoding
zh_Hans: Nominatim 反向地理编码
de_DE: Nominatim Rückwärts-Geocodierung
ja_JP: Nominatim リバースジオコーディング
description:
human:
en_US: Convert coordinates to addresses using Nominatim
zh_Hans: 使用Nominatim将坐标转换为地址
de_DE: Konvertieren Sie Koordinaten in Adressen mit Nominatim
ja_JP: Nominatimを使用して座標を住所に変換
llm: A tool for reverse geocoding using Nominatim, which can convert latitude
and longitude coordinates to an address.
parameters:
- name: lat
type: number
required: true
label:
en_US: Latitude
zh_Hans: 纬度
de_DE: Breitengrad
ja_JP: 緯度
human_description:
en_US: Latitude coordinate for reverse geocoding
zh_Hans: 用于反向地理编码的纬度坐标
de_DE: Breitengrad-Koordinate für die Rückwärts-Geocodierung
ja_JP: リバースジオコーディングの緯度座標
llm_description: The latitude coordinate for reverse geocoding.
form: llm
- name: lon
type: number
required: true
label:
en_US: Longitude
zh_Hans: 经度
de_DE: Längengrad
ja_JP: 経度
human_description:
en_US: Longitude coordinate for reverse geocoding
zh_Hans: 用于反向地理编码的经度坐标
de_DE: Längengrad-Koordinate für die Rückwärts-Geocodierung
ja_JP: リバースジオコーディングの経度座標
llm_description: The longitude coordinate for reverse geocoding.
form: llm

View File

@@ -0,0 +1,49 @@
import json
from typing import Any, Union
import requests
from core.tools.entities.tool_entities import ToolInvokeMessage
from core.tools.tool.builtin_tool import BuiltinTool
class NominatimSearchTool(BuiltinTool):
def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]:
query = tool_parameters.get('query', '')
limit = tool_parameters.get('limit', 10)
if not query:
return self.create_text_message('Please input a search query')
params = {
'q': query,
'format': 'json',
'limit': limit,
'addressdetails': 1
}
return self._make_request(user_id, 'search', params)
def _make_request(self, user_id: str, endpoint: str, params: dict) -> ToolInvokeMessage:
base_url = self.runtime.credentials.get('base_url', 'https://nominatim.openstreetmap.org')
try:
headers = {
"User-Agent": "DifyNominatimTool/1.0"
}
s = requests.session()
response = s.request(
method='GET',
headers=headers,
url=f"{base_url}/{endpoint}",
params=params
)
response_data = response.json()
if response.status_code == 200:
s.close()
return self.create_text_message(self.summary(user_id=user_id, content=json.dumps(response_data, ensure_ascii=False)))
else:
return self.create_text_message(f"Error: {response.status_code} - {response.text}")
except Exception as e:
return self.create_text_message(f"An error occurred: {str(e)}")

View File

@@ -0,0 +1,51 @@
identity:
name: nominatim_search
author: Charles Zhou
label:
en_US: Nominatim Search
zh_Hans: Nominatim 搜索
de_DE: Nominatim Suche
ja_JP: Nominatim 検索
description:
human:
en_US: Search for locations using Nominatim
zh_Hans: 使用Nominatim搜索位置
de_DE: Suche nach Orten mit Nominatim
ja_JP: Nominatimを使用して場所を検索
llm: A tool for geocoding using Nominatim, which can search for locations based
on addresses or place names.
parameters:
- name: query
type: string
required: true
label:
en_US: Search Query
zh_Hans: 搜索查询
de_DE: Suchanfrage
ja_JP: 検索クエリ
human_description:
en_US: Enter an address or place name to search for
zh_Hans: 输入要搜索的地址或地名
de_DE: Geben Sie eine Adresse oder einen Ortsnamen für die Suche ein
ja_JP: 検索する住所または場所の名前を入力してください
llm_description: The search query for Nominatim, which can be an address or place name.
form: llm
- name: limit
type: number
default: 10
min: 1
max: 40
required: false
label:
en_US: Result Limit
zh_Hans: 结果限制
de_DE: Ergebnislimit
ja_JP: 結果の制限
human_description:
en_US: "Maximum number of results to return (default: 10, max: 40)"
zh_Hans: 要返回的最大结果数默认10最大40
de_DE: "Maximale Anzahl der zurückzugebenden Ergebnisse (Standard: 10, max: 40)"
ja_JP: 返す結果の最大数デフォルト10、最大40
llm_description: Limit the number of returned results. The default is 10, and
the maximum is 40.
form: form

View File

@@ -22,7 +22,21 @@ class WecomGroupBotTool(BuiltinTool):
return self.create_text_message(
f'Invalid parameter hook_key ${hook_key}, not a valid UUID')
msgtype = 'text'
message_type = tool_parameters.get('message_type', 'text')
if message_type == 'markdown':
payload = {
"msgtype": 'markdown',
"markdown": {
"content": content,
}
}
else:
payload = {
"msgtype": 'text',
"text": {
"content": content,
}
}
api_url = 'https://qyapi.weixin.qq.com/cgi-bin/webhook/send'
headers = {
'Content-Type': 'application/json',
@@ -30,12 +44,6 @@ class WecomGroupBotTool(BuiltinTool):
params = {
'key': hook_key,
}
payload = {
"msgtype": msgtype,
"text": {
"content": content,
}
}
try:
res = httpx.post(api_url, headers=headers, params=params, json=payload)

View File

@@ -38,3 +38,27 @@ parameters:
pt_BR: Content to sent to the group.
llm_description: Content of the message
form: llm
- name: message_type
type: select
default: text
required: true
label:
en_US: Wecom Group bot message type
zh_Hans: 群机器人webhook的消息类型
pt_BR: Wecom Group bot message type
human_description:
en_US: Wecom Group bot message type
zh_Hans: 群机器人webhook的消息类型
pt_BR: Wecom Group bot message type
options:
- value: text
label:
en_US: Text
zh_Hans: 文本
pt_BR: Text
- value: markdown
label:
en_US: Markdown
zh_Hans: Markdown
pt_BR: Markdown
form: form

View File

@@ -131,7 +131,7 @@ class ParameterExtractorNode(LLMNode):
return NodeRunResult(
status=WorkflowNodeExecutionStatus.FAILED,
inputs=inputs,
process_data={},
process_data=process_data,
outputs={
'__is_success': 0,
'__reason': str(e)

View File

@@ -9,7 +9,7 @@ fi
if [[ "${MODE}" == "worker" ]]; then
celery -A app.celery worker -P ${CELERY_WORKER_CLASS:-gevent} -c ${CELERY_WORKER_AMOUNT:-1} --loglevel INFO \
-Q ${CELERY_QUEUES:-dataset,generation,mail,ops_trace}
-Q ${CELERY_QUEUES:-dataset,generation,mail,ops_trace,app_deletion}
elif [[ "${MODE}" == "beat" ]]; then
celery -A app.celery beat --loglevel INFO
else

View File

@@ -3,9 +3,6 @@ from blinker import signal
# sender: app
app_was_created = signal('app-was-created')
# sender: app
app_was_deleted = signal('app-was-deleted')
# sender: app, kwargs: app_model_config
app_model_config_was_updated = signal('app-model-config-was-updated')

View File

@@ -4,10 +4,7 @@ from .create_document_index import handle
from .create_installed_app_when_app_created import handle
from .create_site_record_when_app_created import handle
from .deduct_quota_when_messaeg_created import handle
from .delete_installed_app_when_app_deleted import handle
from .delete_site_record_when_app_deleted import handle
from .delete_tool_parameters_cache_when_sync_draft_workflow import handle
from .delete_workflow_as_tool_when_app_deleted import handle
from .update_app_dataset_join_when_app_model_config_updated import handle
from .update_app_dataset_join_when_app_published_workflow_updated import handle
from .update_provider_last_used_at_when_messaeg_created import handle

View File

@@ -1,12 +0,0 @@
from events.app_event import app_was_deleted
from extensions.ext_database import db
from models.model import InstalledApp
@app_was_deleted.connect
def handle(sender, **kwargs):
app = sender
installed_apps = db.session.query(InstalledApp).filter(InstalledApp.app_id == app.id).all()
for installed_app in installed_apps:
db.session.delete(installed_app)
db.session.commit()

View File

@@ -1,11 +0,0 @@
from events.app_event import app_was_deleted
from extensions.ext_database import db
from models.model import Site
@app_was_deleted.connect
def handle(sender, **kwargs):
app = sender
site = db.session.query(Site).filter(Site.app_id == app.id).first()
db.session.delete(site)
db.session.commit()

View File

@@ -1,14 +0,0 @@
from events.app_event import app_was_deleted
from extensions.ext_database import db
from models.tools import WorkflowToolProvider
@app_was_deleted.connect
def handle(sender, **kwargs):
app = sender
workflow_tools = db.session.query(WorkflowToolProvider).filter(
WorkflowToolProvider.app_id == app.id
).all()
for workflow_tool in workflow_tools:
db.session.delete(workflow_tool)
db.session.commit()

View File

@@ -7,6 +7,7 @@ from extensions.storage.aliyun_storage import AliyunStorage
from extensions.storage.azure_storage import AzureStorage
from extensions.storage.google_storage import GoogleStorage
from extensions.storage.local_storage import LocalStorage
from extensions.storage.oci_storage import OCIStorage
from extensions.storage.s3_storage import S3Storage
from extensions.storage.tencent_storage import TencentStorage
@@ -37,6 +38,10 @@ class Storage:
self.storage_runner = TencentStorage(
app=app
)
elif storage_type == 'oci-storage':
self.storage_runner = OCIStorage(
app=app
)
else:
self.storage_runner = LocalStorage(app=app)

View File

@@ -0,0 +1,64 @@
from collections.abc import Generator
from contextlib import closing
import boto3
from botocore.exceptions import ClientError
from flask import Flask
from extensions.storage.base_storage import BaseStorage
class OCIStorage(BaseStorage):
def __init__(self, app: Flask):
super().__init__(app)
app_config = self.app.config
self.bucket_name = app_config.get('OCI_BUCKET_NAME')
self.client = boto3.client(
's3',
aws_secret_access_key=app_config.get('OCI_SECRET_KEY'),
aws_access_key_id=app_config.get('OCI_ACCESS_KEY'),
endpoint_url=app_config.get('OCI_ENDPOINT'),
region_name=app_config.get('OCI_REGION')
)
def save(self, filename, data):
self.client.put_object(Bucket=self.bucket_name, Key=filename, Body=data)
def load_once(self, filename: str) -> bytes:
try:
with closing(self.client) as client:
data = client.get_object(Bucket=self.bucket_name, Key=filename)['Body'].read()
except ClientError as ex:
if ex.response['Error']['Code'] == 'NoSuchKey':
raise FileNotFoundError("File not found")
else:
raise
return data
def load_stream(self, filename: str) -> Generator:
def generate(filename: str = filename) -> Generator:
try:
with closing(self.client) as client:
response = client.get_object(Bucket=self.bucket_name, Key=filename)
yield from response['Body'].iter_chunks()
except ClientError as ex:
if ex.response['Error']['Code'] == 'NoSuchKey':
raise FileNotFoundError("File not found")
else:
raise
return generate()
def download(self, filename, target_filepath):
with closing(self.client) as client:
client.download_file(self.bucket_name, filename, target_filepath)
def exists(self, filename):
with closing(self.client) as client:
try:
client.head_object(Bucket=self.bucket_name, Key=filename)
return True
except:
return False
def delete(self, filename):
self.client.delete_object(Bucket=self.bucket_name, Key=filename)

View File

@@ -0,0 +1,32 @@
"""add workflow_run_id index for message
Revision ID: b2602e131636
Revises: 63f9175e515b
Create Date: 2024-06-29 12:16:51.646346
"""
from alembic import op
import models as models
# revision identifiers, used by Alembic.
revision = 'b2602e131636'
down_revision = '63f9175e515b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('messages', schema=None) as batch_op:
batch_op.create_index('message_workflow_run_id_idx', ['conversation_id', 'workflow_run_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('messages', schema=None) as batch_op:
batch_op.drop_index('message_workflow_run_id_idx')
# ### end Alembic commands ###

View File

@@ -626,6 +626,7 @@ class Message(db.Model):
db.Index('message_conversation_id_idx', 'conversation_id'),
db.Index('message_end_user_idx', 'app_id', 'from_source', 'from_end_user_id'),
db.Index('message_account_idx', 'app_id', 'from_source', 'from_account_id'),
db.Index('message_workflow_run_id_idx', 'conversation_id', 'workflow_run_id')
)
id = db.Column(StringUUID, server_default=db.text('uuid_generate_v4()'))

38
api/poetry.lock generated
View File

@@ -534,41 +534,41 @@ files = [
[[package]]
name = "boto3"
version = "1.28.17"
version = "1.34.136"
description = "The AWS SDK for Python"
optional = false
python-versions = ">= 3.7"
python-versions = ">=3.8"
files = [
{file = "boto3-1.28.17-py3-none-any.whl", hash = "sha256:bca0526f819e0f19c0f1e6eba3e2d1d6b6a92a45129f98c0d716e5aab6d9444b"},
{file = "boto3-1.28.17.tar.gz", hash = "sha256:90f7cfb5e1821af95b1fc084bc50e6c47fa3edc99f32de1a2591faa0c546bea7"},
{file = "boto3-1.34.136-py3-none-any.whl", hash = "sha256:d41037e2c680ab8d6c61a0a4ee6bf1fdd9e857f43996672830a95d62d6f6fa79"},
{file = "boto3-1.34.136.tar.gz", hash = "sha256:0314e6598f59ee0f34eb4e6d1a0f69fa65c146d2b88a6e837a527a9956ec2731"},
]
[package.dependencies]
botocore = ">=1.31.17,<1.32.0"
botocore = ">=1.34.136,<1.35.0"
jmespath = ">=0.7.1,<2.0.0"
s3transfer = ">=0.6.0,<0.7.0"
s3transfer = ">=0.10.0,<0.11.0"
[package.extras]
crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
[[package]]
name = "botocore"
version = "1.31.85"
version = "1.34.136"
description = "Low-level, data-driven core of boto 3."
optional = false
python-versions = ">= 3.7"
python-versions = ">=3.8"
files = [
{file = "botocore-1.31.85-py3-none-any.whl", hash = "sha256:b8f35d65f2b45af50c36fc25cc1844d6bd61d38d2148b2ef133b8f10e198555d"},
{file = "botocore-1.31.85.tar.gz", hash = "sha256:ce58e688222df73ec5691f934be1a2122a52c9d11d3037b586b3fff16ed6d25f"},
{file = "botocore-1.34.136-py3-none-any.whl", hash = "sha256:c63fe9032091fb9e9477706a3ebfa4d0c109b807907051d892ed574f9b573e61"},
{file = "botocore-1.34.136.tar.gz", hash = "sha256:7f7135178692b39143c8f152a618d2a3b71065a317569a7102d2306d4946f42f"},
]
[package.dependencies]
jmespath = ">=0.7.1,<2.0.0"
python-dateutil = ">=2.1,<3.0.0"
urllib3 = {version = ">=1.25.4,<2.1", markers = "python_version >= \"3.10\""}
urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}
[package.extras]
crt = ["awscrt (==0.19.12)"]
crt = ["awscrt (==0.20.11)"]
[[package]]
name = "bottleneck"
@@ -7032,20 +7032,20 @@ files = [
[[package]]
name = "s3transfer"
version = "0.6.2"
version = "0.10.2"
description = "An Amazon S3 Transfer Manager"
optional = false
python-versions = ">= 3.7"
python-versions = ">=3.8"
files = [
{file = "s3transfer-0.6.2-py3-none-any.whl", hash = "sha256:b014be3a8a2aab98cfe1abc7229cc5a9a0cf05eb9c1f2b86b230fd8df3f78084"},
{file = "s3transfer-0.6.2.tar.gz", hash = "sha256:cab66d3380cca3e70939ef2255d01cd8aece6a4907a9528740f668c4b0611861"},
{file = "s3transfer-0.10.2-py3-none-any.whl", hash = "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69"},
{file = "s3transfer-0.10.2.tar.gz", hash = "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6"},
]
[package.dependencies]
botocore = ">=1.12.36,<2.0a.0"
botocore = ">=1.33.2,<2.0a.0"
[package.extras]
crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"]
crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"]
[[package]]
name = "safetensors"
@@ -9095,4 +9095,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
[metadata]
lock-version = "2.0"
python-versions = "^3.10"
content-hash = "d40bed69caecf3a2bcd5ec054288d7cb36a9a231fff210d4f1a42745dd3bf604"
content-hash = "90f0e77567fbe5100d15bf2bc9472007aafc53c2fd594b6a90dd8455dea58582"

View File

@@ -107,7 +107,7 @@ authlib = "1.3.1"
azure-identity = "1.16.1"
azure-storage-blob = "12.13.0"
beautifulsoup4 = "4.12.2"
boto3 = "1.28.17"
boto3 = "1.34.136"
bs4 = "~0.0.1"
cachetools = "~5.3.0"
celery = "~5.3.6"

View File

@@ -17,6 +17,7 @@ from libs.passport import PassportService
from libs.password import compare_password, hash_password, valid_password
from libs.rsa import generate_key_pair
from models.account import *
from models.model import DifySetup
from services.errors.account import (
AccountAlreadyInTenantError,
AccountLoginError,
@@ -119,10 +120,11 @@ class AccountService:
return account
@staticmethod
def create_account(email: str, name: str, interface_language: str,
password: str = None,
interface_theme: str = 'light',
timezone: str = 'America/New_York', ) -> Account:
def create_account(email: str,
name: str,
interface_language: str,
password: Optional[str] = None,
interface_theme: str = 'light') -> Account:
"""create account"""
account = Account()
account.email = email
@@ -200,7 +202,6 @@ class AccountService:
account.last_login_ip = ip_address
db.session.add(account)
db.session.commit()
logging.info(f'Account {account.id} logged in successfully.')
@staticmethod
def login(account: Account, *, ip_address: Optional[str] = None):
@@ -444,8 +445,51 @@ class RegisterService:
return f'member_invite:token:{token}'
@classmethod
def register(cls, email, name, password: str = None, open_id: str = None, provider: str = None,
language: str = None, status: AccountStatus = None) -> Account:
def setup(cls, email: str, name: str, password: str, ip_address: str) -> None:
"""
Setup dify
:param email: email
:param name: username
:param password: password
:param ip_address: ip address
"""
try:
# Register
account = AccountService.create_account(
email=email,
name=name,
interface_language=languages[0],
password=password,
)
account.last_login_ip = ip_address
account.initialized_at = datetime.now(timezone.utc).replace(tzinfo=None)
TenantService.create_owner_tenant_if_not_exist(account)
dify_setup = DifySetup(
version=current_app.config['CURRENT_VERSION']
)
db.session.add(dify_setup)
db.session.commit()
except Exception as e:
db.session.query(DifySetup).delete()
db.session.query(TenantAccountJoin).delete()
db.session.query(Account).delete()
db.session.query(Tenant).delete()
db.session.commit()
logging.exception(f'Setup failed: {e}')
raise ValueError(f'Setup failed: {e}')
@classmethod
def register(cls, email, name,
password: Optional[str] = None,
open_id: Optional[str] = None,
provider: Optional[str] = None,
language: Optional[str] = None,
status: Optional[AccountStatus] = None) -> Account:
db.session.begin_nested()
"""Register account"""
try:

View File

@@ -16,13 +16,14 @@ from core.model_runtime.entities.model_entities import ModelPropertyKey, ModelTy
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
from core.tools.tool_manager import ToolManager
from core.tools.utils.configuration import ToolParameterConfigurationManager
from events.app_event import app_model_config_was_updated, app_was_created, app_was_deleted
from events.app_event import app_model_config_was_updated, app_was_created
from extensions.ext_database import db
from models.account import Account
from models.model import App, AppMode, AppModelConfig
from models.tools import ApiToolProvider
from services.tag_service import TagService
from services.workflow_service import WorkflowService
from tasks.remove_app_and_related_data_task import remove_app_and_related_data_task
class AppService:
@@ -395,16 +396,8 @@ class AppService:
"""
db.session.delete(app)
db.session.commit()
app_was_deleted.send(app)
# todo async delete related data by event
# app_model_configs, site, api_tokens, installed_apps, recommended_apps BY app
# app_annotation_hit_histories, app_annotation_settings, app_dataset_joins BY app
# workflows, workflow_runs, workflow_node_executions, workflow_app_logs BY app
# conversations, pinned_conversations, messages BY app
# message_feedbacks, message_annotations, message_chains BY message
# message_agent_thoughts, message_files, saved_messages BY message
# Trigger asynchronous deletion of app and related data
remove_app_and_related_data_task.delay(app.id)
def get_app_meta(self, app_model: App) -> dict:
"""

View File

@@ -34,7 +34,7 @@ from models.dataset import (
from models.model import UploadFile
from models.source import DataSourceOauthBinding
from services.errors.account import NoPermissionError
from services.errors.dataset import DatasetInUseError, DatasetNameDuplicateError
from services.errors.dataset import DatasetNameDuplicateError
from services.errors.document import DocumentIndexingError
from services.errors.file import FileNotExistsError
from services.feature_service import FeatureModel, FeatureService
@@ -234,9 +234,6 @@ class DatasetService:
@staticmethod
def delete_dataset(dataset_id, user):
count = AppDatasetJoin.query.filter_by(dataset_id=dataset_id).count()
if count > 0:
raise DatasetInUseError()
dataset = DatasetService.get_dataset(dataset_id)
@@ -251,6 +248,13 @@ class DatasetService:
db.session.commit()
return True
@staticmethod
def dataset_use_check(dataset_id) -> bool:
count = AppDatasetJoin.query.filter_by(dataset_id=dataset_id).count()
if count > 0:
return True
return False
@staticmethod
def check_dataset_permission(dataset, user):
if dataset.tenant_id != user.current_tenant_id:

View File

@@ -45,17 +45,6 @@ class HitTestingService:
if not retrieval_model:
retrieval_model = dataset.retrieval_model if dataset.retrieval_model else default_retrieval_model
# get embedding model
model_manager = ModelManager()
embedding_model = model_manager.get_model_instance(
tenant_id=dataset.tenant_id,
model_type=ModelType.TEXT_EMBEDDING,
provider=dataset.embedding_model_provider,
model=dataset.embedding_model
)
embeddings = CacheEmbedding(embedding_model)
all_documents = RetrievalService.retrieve(retrival_method=retrieval_model['search_method'],
dataset_id=dataset.id,
query=query,
@@ -67,6 +56,7 @@ class HitTestingService:
)
end = time.perf_counter()
logging.debug(f"Hit testing retrieve in {end - start:0.4f} seconds")
dataset_query = DatasetQuery(
@@ -80,20 +70,10 @@ class HitTestingService:
db.session.add(dataset_query)
db.session.commit()
return cls.compact_retrieve_response(dataset, embeddings, query, all_documents)
return cls.compact_retrieve_response(dataset, query, all_documents)
@classmethod
def compact_retrieve_response(cls, dataset: Dataset, embeddings: Embeddings, query: str, documents: list[Document]):
text_embeddings = [
embeddings.embed_query(query)
]
text_embeddings.extend(embeddings.embed_documents([document.page_content for document in documents]))
tsne_position_data = cls.get_tsne_positions_from_embeddings(text_embeddings)
query_position = tsne_position_data.pop(0)
def compact_retrieve_response(cls, dataset: Dataset, query: str, documents: list[Document]):
i = 0
records = []
for document in documents:
@@ -113,7 +93,6 @@ class HitTestingService:
record = {
"segment": segment,
"score": document.metadata.get('score', None),
"tsne_position": tsne_position_data[i]
}
records.append(record)
@@ -123,7 +102,6 @@ class HitTestingService:
return {
"query": {
"content": query,
"tsne_position": query_position,
},
"records": records
}

View File

@@ -0,0 +1,153 @@
import logging
import time
import click
from celery import shared_task
from sqlalchemy import select
from sqlalchemy.exc import SQLAlchemyError
from extensions.ext_database import db
from models.dataset import AppDatasetJoin
from models.model import (
ApiToken,
AppAnnotationHitHistory,
AppAnnotationSetting,
AppModelConfig,
Conversation,
EndUser,
InstalledApp,
Message,
MessageAgentThought,
MessageAnnotation,
MessageChain,
MessageFeedback,
MessageFile,
RecommendedApp,
Site,
TagBinding,
)
from models.tools import WorkflowToolProvider
from models.web import PinnedConversation, SavedMessage
from models.workflow import Workflow, WorkflowAppLog, WorkflowNodeExecution, WorkflowRun
@shared_task(queue='app_deletion', bind=True, max_retries=3)
def remove_app_and_related_data_task(self, app_id: str):
logging.info(click.style(f'Start deleting app and related data: {app_id}', fg='green'))
start_at = time.perf_counter()
try:
# Use a transaction to ensure all deletions succeed or none do
with db.session.begin_nested():
# Delete related data
_delete_app_model_configs(app_id)
_delete_app_site(app_id)
_delete_app_api_tokens(app_id)
_delete_installed_apps(app_id)
_delete_recommended_apps(app_id)
_delete_app_annotation_data(app_id)
_delete_app_dataset_joins(app_id)
_delete_app_workflows(app_id)
_delete_app_conversations(app_id)
_delete_app_messages(app_id)
_delete_workflow_tool_providers(app_id)
_delete_app_tag_bindings(app_id)
_delete_end_users(app_id)
# If we reach here, the transaction was successful
db.session.commit()
end_at = time.perf_counter()
logging.info(click.style(f'App and related data deleted: {app_id} latency: {end_at - start_at}', fg='green'))
except SQLAlchemyError as e:
db.session.rollback()
logging.exception(
click.style(f"Database error occurred while deleting app {app_id} and related data", fg='red'))
raise self.retry(exc=e, countdown=60) # Retry after 60 seconds
except Exception as e:
logging.exception(click.style(f"Error occurred while deleting app {app_id} and related data", fg='red'))
raise self.retry(exc=e, countdown=60) # Retry after 60 seconds
def _delete_app_model_configs(app_id: str):
db.session.query(AppModelConfig).filter(AppModelConfig.app_id == app_id).delete()
def _delete_app_site(app_id: str):
db.session.query(Site).filter(Site.app_id == app_id).delete()
def _delete_app_api_tokens(app_id: str):
db.session.query(ApiToken).filter(ApiToken.app_id == app_id).delete()
def _delete_installed_apps(app_id: str):
db.session.query(InstalledApp).filter(InstalledApp.app_id == app_id).delete()
def _delete_recommended_apps(app_id: str):
db.session.query(RecommendedApp).filter(RecommendedApp.app_id == app_id).delete()
def _delete_app_annotation_data(app_id: str):
db.session.query(AppAnnotationHitHistory).filter(AppAnnotationHitHistory.app_id == app_id).delete()
db.session.query(AppAnnotationSetting).filter(AppAnnotationSetting.app_id == app_id).delete()
def _delete_app_dataset_joins(app_id: str):
db.session.query(AppDatasetJoin).filter(AppDatasetJoin.app_id == app_id).delete()
def _delete_app_workflows(app_id: str):
db.session.query(WorkflowRun).filter(
WorkflowRun.workflow_id.in_(
db.session.query(Workflow.id).filter(Workflow.app_id == app_id)
)
).delete(synchronize_session=False)
db.session.query(WorkflowNodeExecution).filter(
WorkflowNodeExecution.workflow_id.in_(
db.session.query(Workflow.id).filter(Workflow.app_id == app_id)
)
).delete(synchronize_session=False)
db.session.query(WorkflowAppLog).filter(WorkflowAppLog.app_id == app_id).delete(synchronize_session=False)
db.session.query(Workflow).filter(Workflow.app_id == app_id).delete(synchronize_session=False)
def _delete_app_conversations(app_id: str):
db.session.query(PinnedConversation).filter(
PinnedConversation.conversation_id.in_(
db.session.query(Conversation.id).filter(Conversation.app_id == app_id)
)
).delete(synchronize_session=False)
db.session.query(Conversation).filter(Conversation.app_id == app_id).delete()
def _delete_app_messages(app_id: str):
message_ids = select(Message.id).filter(Message.app_id == app_id).scalar_subquery()
db.session.query(MessageFeedback).filter(MessageFeedback.message_id.in_(message_ids)).delete(
synchronize_session=False)
db.session.query(MessageAnnotation).filter(MessageAnnotation.message_id.in_(message_ids)).delete(
synchronize_session=False)
db.session.query(MessageChain).filter(MessageChain.message_id.in_(message_ids)).delete(synchronize_session=False)
db.session.query(MessageAgentThought).filter(MessageAgentThought.message_id.in_(message_ids)).delete(
synchronize_session=False)
db.session.query(MessageFile).filter(MessageFile.message_id.in_(message_ids)).delete(synchronize_session=False)
db.session.query(SavedMessage).filter(SavedMessage.message_id.in_(message_ids)).delete(synchronize_session=False)
db.session.query(Message).filter(Message.app_id == app_id).delete(synchronize_session=False)
def _delete_workflow_tool_providers(app_id: str):
db.session.query(WorkflowToolProvider).filter(
WorkflowToolProvider.app_id == app_id
).delete(synchronize_session=False)
def _delete_app_tag_bindings(app_id: str):
db.session.query(TagBinding).filter(
TagBinding.target_id == app_id
).delete(synchronize_session=False)
def _delete_end_users(app_id: str):
db.session.query(EndUser).filter(EndUser.app_id == app_id).delete()

View File

@@ -2,7 +2,7 @@ version: '3'
services:
# API service
api:
image: langgenius/dify-api:0.6.12
image: langgenius/dify-api:0.6.12-fix1
restart: always
environment:
# Startup mode, 'api' starts the API server.
@@ -222,7 +222,7 @@ services:
# worker service
# The Celery worker for processing the queue.
worker:
image: langgenius/dify-api:0.6.12
image: langgenius/dify-api:0.6.12-fix1
restart: always
environment:
CONSOLE_WEB_URL: ''
@@ -388,7 +388,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:0.6.12
image: langgenius/dify-web:0.6.12-fix1
restart: always
environment:
# The base URL of console application api server, refers to the Console base URL of WEB service if console domain is

View File

@@ -76,7 +76,7 @@ DEPLOY_ENV=PRODUCTION
# Whether to enable the version check policy.
# If set to false, https://updates.dify.ai will not be called for version check.
CHECK_UPDATE_URL=false
CHECK_UPDATE_URL=true
# Used to change the OpenAI base address, default is https://api.openai.com/v1.
# When OpenAI cannot be accessed in China, replace it with a domestic mirror address,
@@ -97,10 +97,10 @@ FILES_ACCESS_TIMEOUT=300
# ------------------------------
# API service binding address, default: 0.0.0.0, i.e., all addresses can be accessed.
DIFY_BIND_ADDRESS=
DIFY_BIND_ADDRESS=0.0.0.0
# API service binding port number, default 5001.
DIFY_PORT=
DIFY_PORT=5001
# The number of API server workers, i.e., the number of gevent workers.
# Formula: number of cpu cores x 2 + 1
@@ -210,7 +210,7 @@ AZURE_BLOB_ACCOUNT_URL=https://<your_account_name>.blob.core.windows.net
# Google Storage Configuration
# The name of the Google Storage bucket to use for storing files.
GOOGLE_STORAGE_BUCKET_NAME=yout-bucket-name
GOOGLE_STORAGE_BUCKET_NAME=your-bucket-name
# The service account JSON key to use for authenticating with the Google Storage service.
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64=your-google-service-account-json-base64-string
@@ -308,7 +308,7 @@ RELYT_PASSWORD=difyai123456
RELYT_DATABASE=postgres
# open search configuration, only available when VECTOR_STORE is `opensearch`
OPENSEARCH_HOST=127.0.0.1
OPENSEARCH_HOST=opensearch
OPENSEARCH_PORT=9200
OPENSEARCH_USER=admin
OPENSEARCH_PASSWORD=admin
@@ -361,16 +361,21 @@ UPLOAD_IMAGE_FILE_SIZE_LIMIT=10
# Used for application monitoring and error log tracking.
# ------------------------------
# Sentry DSN address, default is empty, when empty,
# API Service Sentry DSN address, default is empty, when empty,
# all monitoring information is not reported to Sentry.
# If not set, Sentry error reporting will be disabled.
SENTRY_DSN=
API_SENTRY_DSN=
# The reporting ratio of Sentry events, if it is 0.01, it is 1%.
SENTRY_TRACES_SAMPLE_RATE=1.0
# API Service The reporting ratio of Sentry events, if it is 0.01, it is 1%.
API_SENTRY_TRACES_SAMPLE_RATE=1.0
# The reporting ratio of Sentry profiles, if it is 0.01, it is 1%.
SENTRY_PROFILES_SAMPLE_RATE=1.0
# API Service The reporting ratio of Sentry profiles, if it is 0.01, it is 1%.
API_SENTRY_PROFILES_SAMPLE_RATE=1.0
# Web Service Sentry DSN address, default is empty, when empty,
# all monitoring information is not reported to Sentry.
# If not set, Sentry error reporting will be disabled.
WEB_SENTRY_DSN=
# ------------------------------
# Notion Integration Configuration
@@ -405,7 +410,7 @@ RESEND_API_KEY=your-resend-api-key
# SMTP server configuration, used when MAIL_TYPE is `smtp`
SMTP_SERVER=
SMTP_PORT=
SMTP_PORT=465
SMTP_USERNAME=
SMTP_PASSWORD=
SMTP_USE_TLS=true
@@ -424,7 +429,6 @@ INVITE_EXPIRY_HOURS=72
# The sandbox service endpoint.
CODE_EXECUTION_ENDPOINT=http://sandbox:8194
CODE_EXECUTION_API_KEY=dify-sandbox
CODE_MAX_NUMBER=9223372036854775807
CODE_MIN_NUMBER=-9223372036854775808
CODE_MAX_STRING_LENGTH=80000
@@ -455,17 +459,17 @@ PGDATA=/var/lib/postgresql/data/pgdata
# ------------------------------
# The API key for the sandbox service
API_KEY=dify-sandbox
SANDBOX_API_KEY=dify-sandbox
# The mode in which the Gin framework runs
GIN_MODE=release
SANDBOX_GIN_MODE=release
# The timeout for the worker in seconds
WORKER_TIMEOUT=15
SANDBOX_WORKER_TIMEOUT=15
# Enable network for the sandbox service
ENABLE_NETWORK=true
SANDBOX_ENABLE_NETWORK=true
# HTTP proxy URL for SSRF protection
HTTP_PROXY=http://ssrf_proxy:3128
SANDBOX_HTTP_PROXY=http://ssrf_proxy:3128
# HTTPS proxy URL for SSRF protection
HTTPS_PROXY=http://ssrf_proxy:3128
SANDBOX_HTTPS_PROXY=http://ssrf_proxy:3128
# The port on which the sandbox service runs
SANDBOX_PORT=8194
@@ -479,16 +483,16 @@ QDRANT_API_KEY=difyai123456
# Environment Variables for weaviate Service
# (only used when VECTOR_STORE is weaviate)
# ------------------------------
PERSISTENCE_DATA_PATH='/var/lib/weaviate'
QUERY_DEFAULTS_LIMIT=25
AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED=true
DEFAULT_VECTORIZER_MODULE=none
CLUSTER_HOSTNAME=node1
AUTHENTICATION_APIKEY_ENABLED=true
AUTHENTICATION_APIKEY_ALLOWED_KEYS=WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih
AUTHENTICATION_APIKEY_USERS=hello@dify.ai
AUTHORIZATION_ADMINLIST_ENABLED=true
AUTHORIZATION_ADMINLIST_USERS=hello@dify.ai
WEAVIATE_PERSISTENCE_DATA_PATH='/var/lib/weaviate'
WEAVIATE_QUERY_DEFAULTS_LIMIT=25
WEAVIATE_AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED=true
WEAVIATE_DEFAULT_VECTORIZER_MODULE=none
WEAVIATE_CLUSTER_HOSTNAME=node1
WEAVIATE_AUTHENTICATION_APIKEY_ENABLED=true
WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS=WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih
WEAVIATE_AUTHENTICATION_APIKEY_USERS=hello@dify.ai
WEAVIATE_AUTHORIZATION_ADMINLIST_ENABLED=true
WEAVIATE_AUTHORIZATION_ADMINLIST_USERS=hello@dify.ai
# ------------------------------
# Environment Variables for Chroma
@@ -500,7 +504,7 @@ CHROMA_SERVER_AUTHN_CREDENTIALS=difyai123456
# Authentication provider for Chroma server
CHROMA_SERVER_AUTHN_PROVIDER=chromadb.auth.token_authn.TokenAuthenticationServerProvider
# Persistence setting for Chroma server
IS_PERSISTENT=TRUE
CHROMA_IS_PERSISTENT=TRUE
# ------------------------------
# Environment Variables for Oracle Service
@@ -562,7 +566,7 @@ OPENSEARCH_NOFILE_HARD=65536
# Environment Variables for Nginx reverse proxy
# ------------------------------
NGINX_SERVER_NAME=_
HTTPS_ENABLED=false
NGINX_HTTPS_ENABLED=false
# HTTP port
NGINX_PORT=80
# SSL settings are only applied when HTTPS_ENABLED is true
@@ -585,14 +589,20 @@ NGINX_PROXY_SEND_TIMEOUT=3600s
# ------------------------------
# Environment Variables for SSRF Proxy
# ------------------------------
HTTP_PORT=3128
COREDUMP_DIR=/var/spool/squid
REVERSE_PROXY_PORT=8194
SANDBOX_HOST=sandbox
SSRF_HTTP_PORT=3128
SSRF_COREDUMP_DIR=/var/spool/squid
SSRF_REVERSE_PROXY_PORT=8194
SSRF_SANDBOX_HOST=sandbox
# ------------------------------
# docker env var for specifying vector db type at startup
# (based on the vector db type, the corresponding docker
# compose profile will be used)
# ------------------------------
COMPOSE_PROFILES=${VECTOR_STORE:-weaviate}
COMPOSE_PROFILES=${VECTOR_STORE:-weaviate}
# ------------------------------
# Docker Compose Service Expose Host Port Configurations
# ------------------------------
EXPOSE_NGINX_PORT=80
EXPOSE_NGINX_SSL_PORT=443

1
docker/.gitignore vendored
View File

@@ -1 +0,0 @@
nginx/conf.d/default.conf

88
docker/README.md Normal file
View File

@@ -0,0 +1,88 @@
## README for docker Deployment
Welcome to the new `docker` directory for deploying Dify using Docker Compose. This README outlines the updates, deployment instructions, and migration details for existing users.
### What's Updated
- **Persistent Environment Variables**: Environment variables are now managed through a `.env` file, ensuring that your configurations persist across deployments.
> What is `.env`? </br> </br>
> The `.env` file is a crucial component in Docker and Docker Compose environments, serving as a centralized configuration file where you can define environment variables that are accessible to the containers at runtime. This file simplifies the management of environment settings across different stages of development, testing, and production, providing consistency and ease of configuration to deployments.
- **Unified Vector Database Services**: All vector database services are now managed from a single Docker Compose file `docker-compose.yaml`. You can switch between different vector databases by setting the `VECTOR_STORE` environment variable in your `.env` file.
- **Mandatory .env File**: A `.env` file is now required to run `docker compose up`. This file is crucial for configuring your deployment and for any custom settings to persist through upgrades.
- **Legacy Support**: Previous deployment files are now located in the `docker-legacy` directory and will no longer be maintained.
### How to Deploy Dify with `docker-compose.yaml`
1. **Prerequisites**: Ensure Docker and Docker Compose are installed on your system.
2. **Environment Setup**:
- Navigate to the `docker` directory.
- Copy the `.env.example` file to a new file named `.env` by running `cp .env.example .env`.
- Customize the `.env` file as needed. Refer to the `.env.example` file for detailed configuration options.
3. **Running the Services**:
- Execute `docker compose up` from the `docker` directory to start the services.
- To specify a vector database, set the `VECTOR_store` variable in your `.env` file to your desired vector database service, such as `milvus`, `weaviate`, or `opensearch`.
### How to Deploy Middleware for Developing Dify
1. **Middleware Setup**:
- Use the `docker-compose.middleware.yaml` for setting up essential middleware services like databases and caches.
- Navigate to the `docker` directory.
- Ensure the `middleware.env` file is created by running `cp middleware.env.example middleware.env` (refer to the `middleware.env.example` file).
2. **Running Middleware Services**:
- Execute `docker-compose -f docker-compose.middleware.yaml up -d` to start the middleware services.
### Migration for Existing Users
For users migrating from the `docker-legacy` setup:
1. **Review Changes**: Familiarize yourself with the new `.env` configuration and Docker Compose setup.
2. **Transfer Customizations**:
- If you have customized configurations such as `docker-compose.yaml`, `ssrf_proxy/squid.conf`, or `nginx/conf.d/default.conf`, you will need to reflect these changes in the `.env` file you create.
3. **Data Migration**:
- Ensure that data from services like databases and caches is backed up and migrated appropriately to the new structure if necessary.
### Overview of `.env`
#### Key Modules and Customization
- **Vector Database Services**: Depending on the type of vector database used (`VECTOR_STORE`), users can set specific endpoints, ports, and authentication details.
- **Storage Services**: Depending on the storage type (`STORAGE_TYPE`), users can configure specific settings for S3, Azure Blob, Google Storage, etc.
- **API and Web Services**: Users can define URLs and other settings that affect how the API and web frontends operate.
#### Other notable variables
The `.env.example` file provided in the Docker setup is extensive and covers a wide range of configuration options. It is structured into several sections, each pertaining to different aspects of the application and its services. Here are some of the key sections and variables:
1. **Common Variables**:
- `CONSOLE_API_URL`, `SERVICE_API_URL`: URLs for different API services.
- `APP_WEB_URL`: Frontend application URL.
- `FILES_URL`: Base URL for file downloads and previews.
2. **Server Configuration**:
- `LOG_LEVEL`, `DEBUG`, `FLASK_DEBUG`: Logging and debug settings.
- `SECRET_KEY`: A key for encrypting session cookies and other sensitive data.
3. **Database Configuration**:
- `DB_USERNAME`, `DB_PASSWORD`, `DB_HOST`, `DB_PORT`, `DB_DATABASE`: PostgreSQL database credentials and connection details.
4. **Redis Configuration**:
- `REDIS_HOST`, `REDIS_PORT`, `REDIS_PASSWORD`: Redis server connection settings.
5. **Celery Configuration**:
- `CELERY_BROKER_URL`: Configuration for Celery message broker.
6. **Storage Configuration**:
- `STORAGE_TYPE`, `S3_BUCKET_NAME`, `AZURE_BLOB_ACCOUNT_NAME`: Settings for file storage options like local, S3, Azure Blob, etc.
7. **Vector Database Configuration**:
- `VECTOR_STORE`: Type of vector database (e.g., `weaviate`, `milvus`).
- Specific settings for each vector store like `WEAVIATE_ENDPOINT`, `MILVUS_HOST`.
8. **CORS Configuration**:
- `WEB_API_CORS_ALLOW_ORIGINS`, `CONSOLE_CORS_ALLOW_ORIGINS`: Settings for cross-origin resource sharing.
9. **Other Service-Specific Environment Variables**:
- Each service like `nginx`, `redis`, `db`, and vector databases have specific environment variables that are directly referenced in the `docker-compose.yaml`.
### Additional Information
- **Continuous Improvement Phase**: We are actively seeking feedback from the community to refine and enhance the deployment process. As more users adopt this new method, we will continue to make improvements based on your experiences and suggestions.
- **Support**: For detailed configuration options and environment variable settings, refer to the `.env.example` file and the Docker Compose configuration files in the `docker` directory.
This README aims to guide you through the deployment process using the new Docker Compose setup. For any issues or further assistance, please refer to the official documentation or contact support.

View File

@@ -12,7 +12,7 @@ services:
volumes:
- ./volumes/db/data:/var/lib/postgresql/data
ports:
- "5432:5432"
- "${EXPOSE_POSTGRES_PORT:-5432}:5432"
# The redis cache.
redis:
@@ -24,32 +24,7 @@ services:
# Set the redis password when startup redis server.
command: redis-server --requirepass difyai123456
ports:
- "6379:6379"
# The Weaviate vector store.
weaviate:
image: semitechnologies/weaviate:1.19.0
restart: always
volumes:
# Mount the Weaviate data directory to the container.
- ./volumes/weaviate:/var/lib/weaviate
env_file:
- ./middleware.env
environment:
# The Weaviate configurations
# You can refer to the [Weaviate](https://weaviate.io/developers/weaviate/config-refs/env-vars) documentation for more information.
PERSISTENCE_DATA_PATH: ${PERSISTENCE_DATA_PATH:-'/var/lib/weaviate'}
QUERY_DEFAULTS_LIMIT: ${QUERY_DEFAULTS_LIMIT:-25}
AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED: ${AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED:-false}
DEFAULT_VECTORIZER_MODULE: ${DEFAULT_VECTORIZER_MODULE:-none}
CLUSTER_HOSTNAME: ${CLUSTER_HOSTNAME:-node1}
AUTHENTICATION_APIKEY_ENABLED: ${AUTHENTICATION_APIKEY_ENABLED:-true}
AUTHENTICATION_APIKEY_ALLOWED_KEYS: ${AUTHENTICATION_APIKEY_ALLOWED_KEYS:-WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih}
AUTHENTICATION_APIKEY_USERS: ${AUTHENTICATION_APIKEY_USERS:-hello@dify.ai}
AUTHORIZATION_ADMINLIST_ENABLED: ${AUTHORIZATION_ADMINLIST_ENABLED:-true}
AUTHORIZATION_ADMINLIST_USERS: ${AUTHORIZATION_ADMINLIST_USERS:-hello@dify.ai}
ports:
- "8080:8080"
- "${EXPOSE_REDIS_PORT:-6379}:6379"
# The DifySandbox
sandbox:
@@ -59,12 +34,12 @@ services:
# The DifySandbox configurations
# Make sure you are changing this key for your deployment with a strong key.
# You can generate a strong key using `openssl rand -base64 42`.
API_KEY: ${API_KEY:-dify-sandbox}
GIN_MODE: ${GIN_MODE:-release}
WORKER_TIMEOUT: ${WORKER_TIMEOUT:-15}
ENABLE_NETWORK: ${ENABLE_NETWORK:-true}
HTTP_PROXY: ${HTTP_PROXY:-http://ssrf_proxy:3128}
HTTPS_PROXY: ${HTTPS_PROXY:-http://ssrf_proxy:3128}
API_KEY: ${SANDBOX_API_KEY:-dify-sandbox}
GIN_MODE: ${SANDBOX_GIN_MODE:-release}
WORKER_TIMEOUT: ${SANDBOX_WORKER_TIMEOUT:-15}
ENABLE_NETWORK: ${SANDBOX_ENABLE_NETWORK:-true}
HTTP_PROXY: ${SANDBOX_HTTP_PROXY:-http://ssrf_proxy:3128}
HTTPS_PROXY: ${SANDBOX_HTTPS_PROXY:-http://ssrf_proxy:3128}
SANDBOX_PORT: ${SANDBOX_PORT:-8194}
volumes:
- ./volumes/sandbox/dependencies:/dependencies
@@ -79,22 +54,49 @@ services:
restart: always
volumes:
- ./ssrf_proxy/squid.conf.template:/etc/squid/squid.conf.template
- ./ssrf_proxy/docker-entrypoint.sh:/docker-entrypoint.sh
entrypoint: /docker-entrypoint.sh
ports:
- "3128:3128"
- "8194:8194"
- ./ssrf_proxy/docker-entrypoint.sh:/docker-entrypoint-mount.sh
entrypoint: [ "sh", "-c", "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh" ]
environment:
# pls clearly modify the squid env vars to fit your network environment.
HTTP_PORT: ${HTTP_PORT:-3128}
COREDUMP_DIR: ${COREDUMP_DIR:-/var/spool/squid}
REVERSE_PROXY_PORT: ${REVERSE_PROXY_PORT:-8194}
SANDBOX_HOST: ${SANDBOX_HOST:-sandbox}
HTTP_PORT: ${SSRF_HTTP_PORT:-3128}
COREDUMP_DIR: ${SSRF_COREDUMP_DIR:-/var/spool/squid}
REVERSE_PROXY_PORT: ${SSRF_REVERSE_PROXY_PORT:-8194}
SANDBOX_HOST: ${SSRF_SANDBOX_HOST:-sandbox}
SANDBOX_PORT: ${SANDBOX_PORT:-8194}
ports:
- "${EXPOSE_SSRF_PROXY_PORT:-3128}:${SSRF_HTTP_PORT:-3128}"
- "${EXPOSE_SANDBOX_PORT:-8194}:${SANDBOX_PORT:-8194}"
networks:
- ssrf_proxy_network
- default
# The Weaviate vector store.
weaviate:
image: semitechnologies/weaviate:1.19.0
profiles:
- weaviate
restart: always
volumes:
# Mount the Weaviate data directory to the container.
- ./volumes/weaviate:/var/lib/weaviate
env_file:
- ./middleware.env
environment:
# The Weaviate configurations
# You can refer to the [Weaviate](https://weaviate.io/developers/weaviate/config-refs/env-vars) documentation for more information.
PERSISTENCE_DATA_PATH: ${WEAVIATE_PERSISTENCE_DATA_PATH:-/var/lib/weaviate}
QUERY_DEFAULTS_LIMIT: ${WEAVIATE_QUERY_DEFAULTS_LIMIT:-25}
AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED: ${WEAVIATE_AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED:-false}
DEFAULT_VECTORIZER_MODULE: ${WEAVIATE_DEFAULT_VECTORIZER_MODULE:-none}
CLUSTER_HOSTNAME: ${WEAVIATE_CLUSTER_HOSTNAME:-node1}
AUTHENTICATION_APIKEY_ENABLED: ${WEAVIATE_AUTHENTICATION_APIKEY_ENABLED:-true}
AUTHENTICATION_APIKEY_ALLOWED_KEYS: ${WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS:-WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih}
AUTHENTICATION_APIKEY_USERS: ${WEAVIATE_AUTHENTICATION_APIKEY_USERS:-hello@dify.ai}
AUTHORIZATION_ADMINLIST_ENABLED: ${WEAVIATE_AUTHORIZATION_ADMINLIST_ENABLED:-true}
AUTHORIZATION_ADMINLIST_USERS: ${WEAVIATE_AUTHORIZATION_ADMINLIST_USERS:-hello@dify.ai}
ports:
- "${EXPOSE_WEAVIATE_PORT:-8080}:8080"
networks:
# create a network between sandbox, api and ssrf_proxy, and can not access outside.
ssrf_proxy_network:

View File

@@ -1,265 +1,166 @@
x-shared-env: &shared-api-worker-env
# The log level for the application. Supported values are `DEBUG`, `INFO`, `WARNING`, `ERROR`, `CRITICAL`
LOG_LEVEL: ${LOG_LEVEL:-INFO}
# Debug mode, default is false. It is recommended to turn on this configuration for local development to prevent some problems caused by monkey patch.
DEBUG: ${DEBUG:-false}
# Flask debug mode, it can output trace information at the interface when turned on, which is convenient for debugging.
FLASK_DEBUG: ${FLASK_DEBUG:-false}
# A secretkey that is used for securely signing the session cookie and encrypting sensitive information on the database. You can generate a strong key using `openssl rand -base64 42`.
SECRET_KEY: ${SECRET_KEY}
# Password for admin user initialization.
# If left unset, admin user will not be prompted for a password when creating the initial admin account.
INIT_PASSWORD: ${INIT_PASSWORD}
# The base URL of console application web frontend, refers to the Console base URL of WEB service if console domain is
# different from api or web app domain.
# example: http://cloud.dify.ai
CONSOLE_WEB_URL: ${CONSOLE_WEB_URL}
# The base URL of console application api server, refers to the Console base URL of WEB service if console domain is
# different from api or web app domain.
# example: http://cloud.dify.ai
CONSOLE_API_URL: ${CONSOLE_API_URL}
# The URL prefix for Service API endpoints, refers to the base URL of the current API service if api domain is
# different from console domain.
# example: http://api.dify.ai
SERVICE_API_URL: ${SERVICE_API_URL}
# The URL prefix for Web APP frontend, refers to the Web App base URL of WEB service if web app domain is different from
# console or api domain.
# example: http://udify.app
APP_WEB_URL: ${APP_WEB_URL}
# Whether to enable the version check policy. If set to false, https://updates.dify.ai will not be called for version check.
CHECK_UPDATE_URL: ${CHECK_UPDATE_URL}
# Used to change the OpenAI base address, default is https://api.openai.com/v1.
# When OpenAI cannot be accessed in China, replace it with a domestic mirror address,
# or when a local model provides OpenAI compatible API, it can be replaced.
OPENAI_API_BASE: ${OPENAI_API_BASE}
# File preview or download Url prefix.
# used to display File preview or download Url to the front-end or as Multi-model inputs;
# Url is signed and has expiration time.
FILES_URL: ${FILES_URL}
# File Access Time specifies a time interval in seconds for the file to be accessed.
# The default value is 300 seconds.
SECRET_KEY: ${SECRET_KEY:-sk-9f73s3ljTXVcMT3Blb3ljTqtsKiGHXVcMT3BlbkFJLK7U}
INIT_PASSWORD: ${INIT_PASSWORD:-}
CONSOLE_WEB_URL: ${CONSOLE_WEB_URL:-}
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
SERVICE_API_URL: ${SERVICE_API_URL:-}
APP_WEB_URL: ${APP_WEB_URL:-}
CHECK_UPDATE_URL: ${CHECK_UPDATE_URL:-true}
OPENAI_API_BASE: ${OPENAI_API_BASE:-}
FILES_URL: ${FILES_URL:-}
FILES_ACCESS_TIMEOUT: ${FILES_ACCESS_TIMEOUT:-300}
# When enabled, migrations will be executed prior to application startup and the application will start after the migrations have completed.
MIGRATION_ENABLED: ${MIGRATION_ENABLED:-true}
# Deployment environment.
# Supported values are `PRODUCTION`, `TESTING`. Default is `PRODUCTION`.
# Testing environment. There will be a distinct color label on the front-end page,
# indicating that this environment is a testing environment.
DEPLOY_ENV: ${DEPLOY_ENV:-PRODUCTION}
# API service binding address, default: 0.0.0.0, i.e., all addresses can be accessed.
DIFY_BIND_ADDRESS: ${DIFY_BIND_ADDRESS}
# API service binding port number, default 5001.
DIFY_PORT: ${DIFY_PORT}
# The number of API server workers, i.e., the number of gevent workers.
# Formula: number of cpu cores x 2 + 1
# Reference: https://docs.gunicorn.org/en/stable/design.html#how-many-workers
SERVER_WORKER_AMOUNT: ${SERVER_WORKER_AMOUNT}
# Defaults to gevent. If using windows, it can be switched to sync or solo.
SERVER_WORKER_CLASS: ${SERVER_WORKER_CLASS}
# Similar to SERVER_WORKER_CLASS. Default is gevent.
# If using windows, it can be switched to sync or solo.
CELERY_WORKER_CLASS: ${CELERY_WORKER_CLASS}
# Request handling timeout. The default is 200,
# it is recommended to set it to 360 to support a longer sse connection time.
GUNICORN_TIMEOUT: ${GUNICORN_TIMEOUT}
# The number of Celery workers. The default is 1, and can be set as needed.
CELERY_WORKER_AMOUNT: ${CELERY_WORKER_AMOUNT}
# The configurations of postgres database connection.
# It is consistent with the configuration in the 'db' service below.
DB_USERNAME: ${DB_USERNAME}
DB_PASSWORD: ${DB_PASSWORD}
DB_HOST: ${DB_HOST}
DB_PORT: ${DB_PORT}
DB_DATABASE: ${DB_DATABASE}
# The size of the database connection pool.
# The default is 30 connections, which can be appropriately increased.
SQLALCHEMY_POOL_SIZE: ${SQLALCHEMY_POOL_SIZE}
# Database connection pool recycling time, the default is 3600 seconds.
SQLALCHEMY_POOL_RECYCLE: ${SQLALCHEMY_POOL_RECYCLE}
# Whether to print SQL, default is false.
SQLALCHEMY_ECHO: ${SQLALCHEMY_ECHO}
# The configurations of redis connection.
# It is consistent with the configuration in the 'redis' service below.
REDIS_HOST: ${REDIS_HOST}
DIFY_BIND_ADDRESS: ${DIFY_BIND_ADDRESS:-0.0.0.0}
DIFY_PORT: ${DIFY_PORT:-5001}
SERVER_WORKER_AMOUNT: ${SERVER_WORKER_AMOUNT:-}
SERVER_WORKER_CLASS: ${SERVER_WORKER_CLASS:-}
CELERY_WORKER_CLASS: ${CELERY_WORKER_CLASS:-}
GUNICORN_TIMEOUT: ${GUNICORN_TIMEOUT:-360}
CELERY_WORKER_AMOUNT: ${CELERY_WORKER_AMOUNT:-}
DB_USERNAME: ${DB_USERNAME:-postgres}
DB_PASSWORD: ${DB_PASSWORD:-difyai123456}
DB_HOST: ${DB_HOST:-db}
DB_PORT: ${DB_PORT:-5432}
DB_DATABASE: ${DB_DATABASE:-dify}
SQLALCHEMY_POOL_SIZE: ${SQLALCHEMY_POOL_SIZE:-30}
SQLALCHEMY_POOL_RECYCLE: ${SQLALCHEMY_POOL_RECYCLE:-3600}
SQLALCHEMY_ECHO: ${SQLALCHEMY_ECHO:-false}
REDIS_HOST: ${REDIS_HOST:-redis}
REDIS_PORT: ${REDIS_PORT:-6379}
REDIS_USERNAME: ${REDIS_USERNAME}
REDIS_PASSWORD: ${REDIS_PASSWORD}
REDIS_USE_SSL: ${REDIS_USE_SSL}
# Redis Database, default is 0. Please use a different Database from Session Redis and Celery Broker.
REDIS_USERNAME: ${REDIS_USERNAME:-}
REDIS_PASSWORD: ${REDIS_PASSWORD:-difyai123456}
REDIS_USE_SSL: ${REDIS_USE_SSL:-false}
REDIS_DB: 0
# The configurations of celery broker.
# Use redis as the broker, and redis db 1 for celery broker.
CELERY_BROKER_URL: ${CELERY_BROKER_URL}
BROKER_USE_SSL: ${BROKER_USE_SSL}
# Specifies the allowed origins for cross-origin requests to the Web API, e.g. https://dify.app or * for all origins.
WEB_API_CORS_ALLOW_ORIGINS: ${WEB_API_CORS_ALLOW_ORIGINS}
# Specifies the allowed origins for cross-origin requests to the console API, e.g. https://cloud.dify.ai or * for all origins.
CONSOLE_CORS_ALLOW_ORIGINS: ${CONSOLE_CORS_ALLOW_ORIGINS}
# The type of storage to use for storing user files. Supported values are `local` and `s3` and `azure-blob` and `google-storage`, Default: `local`
STORAGE_TYPE: ${STORAGE_TYPE}
# The path to the local storage directory, the directory relative the root path of API service codes or absolute path. Default: `storage` or `/home/john/storage`.
# only available when STORAGE_TYPE is `local`.
CELERY_BROKER_URL: ${CELERY_BROKER_URL:-redis://:difyai123456@redis:6379/1}
BROKER_USE_SSL: ${BROKER_USE_SSL:-false}
WEB_API_CORS_ALLOW_ORIGINS: ${WEB_API_CORS_ALLOW_ORIGINS:-*}
CONSOLE_CORS_ALLOW_ORIGINS: ${CONSOLE_CORS_ALLOW_ORIGINS:-*}
STORAGE_TYPE: ${STORAGE_TYPE:-local}
STORAGE_LOCAL_PATH: storage
# The S3 storage configurations, only available when STORAGE_TYPE is `s3`.
S3_USE_AWS_MANAGED_IAM: ${S3_USE_AWS_MANAGED_IAM}
S3_ENDPOINT: ${S3_ENDPOINT}
S3_BUCKET_NAME: ${S3_BUCKET_NAME}
S3_ACCESS_KEY: ${S3_ACCESS_KEY}
S3_SECRET_KEY: ${S3_SECRET_KEY}
S3_REGION: ${S3_REGION}
# The Azure Blob storage configurations, only available when STORAGE_TYPE is `azure-blob`.
AZURE_BLOB_ACCOUNT_NAME: ${AZURE_BLOB_ACCOUNT_NAME}
AZURE_BLOB_ACCOUNT_KEY: ${AZURE_BLOB_ACCOUNT_KEY}
AZURE_BLOB_CONTAINER_NAME: ${AZURE_BLOB_CONTAINER_NAME}
AZURE_BLOB_ACCOUNT_URL: ${AZURE_BLOB_ACCOUNT_URL}
# The Google storage configurations, only available when STORAGE_TYPE is `google-storage`.
GOOGLE_STORAGE_BUCKET_NAME: ${GOOGLE_STORAGE_BUCKET_NAME}
# if you want to use Application Default Credentials, you can leave GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64 empty.
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64: ${GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64}
# The Alibaba Cloud OSS configurations, only available when STORAGE_TYPE is `aliyun-oss`
ALIYUN_OSS_BUCKET_NAME: ${ALIYUN_OSS_BUCKET_NAME}
ALIYUN_OSS_ACCESS_KEY: ${ALIYUN_OSS_ACCESS_KEY}
ALIYUN_OSS_SECRET_KEY: ${ALIYUN_OSS_SECRET_KEY}
ALIYUN_OSS_ENDPOINT: ${ALIYUN_OSS_ENDPOINT}
ALIYUN_OSS_REGION: ${ALIYUN_OSS_REGION}
ALIYUN_OSS_AUTH_VERSION: ${ALIYUN_OSS_AUTH_VERSION}
# The Tencent COS storage configurations, only available when STORAGE_TYPE is `tencent-cos`.
TENCENT_COS_BUCKET_NAME: ${TENCENT_COS_BUCKET_NAME}
TENCENT_COS_SECRET_KEY: ${TENCENT_COS_SECRET_KEY}
TENCENT_COS_SECRET_ID: ${TENCENT_COS_SECRET_ID}
TENCENT_COS_REGION: ${TENCENT_COS_REGION}
TENCENT_COS_SCHEME: ${TENCENT_COS_SCHEME}
# The type of vector store to use. Supported values are `weaviate`, `qdrant`, `milvus`, `relyt`, `pgvector`, `chroma`, 'opensearch', 'tidb_vector'.
VECTOR_STORE: ${VECTOR_STORE}
# The Weaviate endpoint URL. Only available when VECTOR_STORE is `weaviate`.
WEAVIATE_ENDPOINT: ${WEAVIATE_ENDPOINT}
# The Weaviate API key.
WEAVIATE_API_KEY: ${WEAVIATE_API_KEY}
# The Qdrant endpoint URL. Only available when VECTOR_STORE is `qdrant`.
QDRANT_URL: ${QDRANT_URL}
# The Qdrant API key.
QDRANT_API_KEY: ${QDRANT_API_KEY}
# The Qdrant client timeout setting.
QDRANT_CLIENT_TIMEOUT: ${QDRANT_CLIENT_TIMEOUT}
# The Qdrant client enable gRPC mode.
QDRANT_GRPC_ENABLED: ${QDRANT_GRPC_ENABLED}
# The Qdrant server gRPC mode PORT.
QDRANT_GRPC_PORT: ${QDRANT_GRPC_PORT}
# Milvus configuration Only available when VECTOR_STORE is `milvus`.
# The milvus host.
MILVUS_HOST: ${MILVUS_HOST}
# The milvus host.
MILVUS_PORT: ${MILVUS_PORT}
# The milvus username.
MILVUS_USER: ${MILVUS_USER}
# The milvus password.
MILVUS_PASSWORD: ${MILVUS_PASSWORD}
# The milvus tls switch.
MILVUS_SECURE: ${MILVUS_SECURE}
# relyt configurations
RELYT_HOST: ${RELYT_HOST}
RELYT_PORT: ${RELYT_PORT}
RELYT_USER: ${RELYT_USER}
RELYT_PASSWORD: ${RELYT_PASSWORD}
RELYT_DATABASE: ${RELYT_DATABASE}
# pgvector configurations
PGVECTOR_HOST: ${PGVECTOR_HOST}
PGVECTOR_PORT: ${PGVECTOR_PORT}
PGVECTOR_USER: ${PGVECTOR_USER}
PGVECTOR_PASSWORD: ${PGVECTOR_PASSWORD}
PGVECTOR_DATABASE: ${PGVECTOR_DATABASE}
# tidb vector configurations
TIDB_VECTOR_HOST: ${TIDB_VECTOR_HOST}
TIDB_VECTOR_PORT: ${TIDB_VECTOR_PORT}
TIDB_VECTOR_USER: ${TIDB_VECTOR_USER}
TIDB_VECTOR_PASSWORD: ${TIDB_VECTOR_PASSWORD}
TIDB_VECTOR_DATABASE: ${TIDB_VECTOR_DATABASE}
# oracle configurations
ORACLE_HOST: ${ORACLE_HOST}
ORACLE_PORT: ${ORACLE_PORT}
ORACLE_USER: ${ORACLE_USER}
ORACLE_PASSWORD: ${ORACLE_PASSWORD}
ORACLE_DATABASE: ${ORACLE_DATABASE}
# Chroma configuration
CHROMA_HOST: ${CHROMA_HOST}
CHROMA_PORT: ${CHROMA_PORT}
CHROMA_TENANT: ${CHROMA_TENANT}
CHROMA_DATABASE: ${CHROMA_DATABASE}
CHROMA_AUTH_PROVIDER: ${CHROMA_AUTH_PROVIDER}
CHROMA_AUTH_CREDENTIALS: ${CHROMA_AUTH_CREDENTIALS}
# OpenSearch configuration
OPENSEARCH_HOST: ${OPENSEARCH_HOST}
OPENSEARCH_PORT: ${OPENSEARCH_PORT}
OPENSEARCH_USER: ${OPENSEARCH_USER}
OPENSEARCH_PASSWORD: ${OPENSEARCH_PASSWORD}
OPENSEARCH_SECURE: ${OPENSEARCH_SECURE}
# tencent configurations
TENCENT_VECTOR_DB_URL: ${TENCENT_VECTOR_DB_URL}
TENCENT_VECTOR_DB_API_KEY: ${TENCENT_VECTOR_DB_API_KEY}
TENCENT_VECTOR_DB_TIMEOUT: ${TENCENT_VECTOR_DB_TIMEOUT}
TENCENT_VECTOR_DB_USERNAME: ${TENCENT_VECTOR_DB_USERNAME}
TENCENT_VECTOR_DB_DATABASE: ${TENCENT_VECTOR_DB_DATABASE}
TENCENT_VECTOR_DB_SHARD: ${TENCENT_VECTOR_DB_SHARD}
TENCENT_VECTOR_DB_REPLICAS: ${TENCENT_VECTOR_DB_REPLICAS}
# Knowledge Configuration
# Upload file size limit, default 15M.
UPLOAD_FILE_SIZE_LIMIT: ${UPLOAD_FILE_SIZE_LIMIT}
# The maximum number of files that can be uploaded at a time, default 5.
UPLOAD_FILE_BATCH_LIMIT: ${UPLOAD_FILE_BATCH_LIMIT}
# `dify` Dify's proprietary file extraction scheme
# `Unstructured` Unstructured.io file extraction scheme
ETL_TYPE: ${ETL_TYPE}
# Unstructured API path, needs to be configured when ETL_TYPE is Unstructured.
UNSTRUCTURED_API_URL: ${UNSTRUCTURED_API_URL}
# Multi-modal Configuration
# The format of the image sent when the multi-modal model is input, the default is base64, optional url.
MULTIMODAL_SEND_IMAGE_FORMAT: ${MULTIMODAL_SEND_IMAGE_FORMAT}
# Upload image file size limit, default 10M.
UPLOAD_IMAGE_FILE_SIZE_LIMIT: ${UPLOAD_IMAGE_FILE_SIZE_LIMIT}
# The DSN for Sentry error reporting. If not set, Sentry error reporting will be disabled.
SENTRY_DSN: ${SENTRY_DSN}
# The sample rate for Sentry events. Default: `1.0`
SENTRY_TRACES_SAMPLE_RATE: ${SENTRY_TRACES_SAMPLE_RATE}
# The sample rate for Sentry profiles. Default: `1.0`
SENTRY_PROFILES_SAMPLE_RATE: ${SENTRY_PROFILES_SAMPLE_RATE}
# Notion import configuration, support public and internal
NOTION_INTEGRATION_TYPE: ${NOTION_INTEGRATION_TYPE}
NOTION_CLIENT_SECRET: ${NOTION_CLIENT_SECRET}
NOTION_CLIENT_ID: ${NOTION_CLIENT_ID}
NOTION_INTERNAL_SECRET: ${NOTION_INTERNAL_SECRET}
# Mail configuration, support: resend, smtp
MAIL_TYPE: ${MAIL_TYPE}
# default send from email address, if not specified
MAIL_DEFAULT_SEND_FROM: ${MAIL_DEFAULT_SEND_FROM}
SMTP_SERVER: ${SMTP_SERVER}
SMTP_PORT: ${SMTP_PORT}
SMTP_USERNAME: ${SMTP_USERNAME}
SMTP_PASSWORD: ${SMTP_PASSWORD}
SMTP_USE_TLS: ${SMTP_USE_TLS}
SMTP_OPPORTUNISTIC_TLS: ${SMTP_OPPORTUNISTIC_TLS}
# the api-key for resend (https://resend.com)
RESEND_API_KEY: ${RESEND_API_KEY}
S3_USE_AWS_MANAGED_IAM: ${S3_USE_AWS_MANAGED_IAM:-false}
S3_ENDPOINT: ${S3_ENDPOINT:-}
S3_BUCKET_NAME: ${S3_BUCKET_NAME:-}
S3_ACCESS_KEY: ${S3_ACCESS_KEY:-}
S3_SECRET_KEY: ${S3_SECRET_KEY:-}
S3_REGION: ${S3_REGION:-us-east-1}
AZURE_BLOB_ACCOUNT_NAME: ${AZURE_BLOB_ACCOUNT_NAME:-}
AZURE_BLOB_ACCOUNT_KEY: ${AZURE_BLOB_ACCOUNT_KEY:-}
AZURE_BLOB_CONTAINER_NAME: ${AZURE_BLOB_CONTAINER_NAME:-}
AZURE_BLOB_ACCOUNT_URL: ${AZURE_BLOB_ACCOUNT_URL:-}
GOOGLE_STORAGE_BUCKET_NAME: ${GOOGLE_STORAGE_BUCKET_NAME:-}
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64: ${GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64:-}
ALIYUN_OSS_BUCKET_NAME: ${ALIYUN_OSS_BUCKET_NAME:-}
ALIYUN_OSS_ACCESS_KEY: ${ALIYUN_OSS_ACCESS_KEY:-}
ALIYUN_OSS_SECRET_KEY: ${ALIYUN_OSS_SECRET_KEY:-}
ALIYUN_OSS_ENDPOINT: ${ALIYUN_OSS_ENDPOINT:-}
ALIYUN_OSS_REGION: ${ALIYUN_OSS_REGION:-}
ALIYUN_OSS_AUTH_VERSION: ${ALIYUN_OSS_AUTH_VERSION:-v4}
TENCENT_COS_BUCKET_NAME: ${TENCENT_COS_BUCKET_NAME:-}
TENCENT_COS_SECRET_KEY: ${TENCENT_COS_SECRET_KEY:-}
TENCENT_COS_SECRET_ID: ${TENCENT_COS_SECRET_ID:-}
TENCENT_COS_REGION: ${TENCENT_COS_REGION:-}
TENCENT_COS_SCHEME: ${TENCENT_COS_SCHEME:-}
OCI_ENDPOINT: ${OCI_ENDPOINT:-}
OCI_BUCKET_NAME: ${OCI_BUCKET_NAME:-}
OCI_ACCESS_KEY: ${OCI_ACCESS_KEY:-}
OCI_SECRET_KEY: ${OCI_SECRET_KEY:-}
OCI_REGION: ${OCI_REGION:-}
VECTOR_STORE: ${VECTOR_STORE:-weaviate}
WEAVIATE_ENDPOINT: ${WEAVIATE_ENDPOINT:-http://weaviate:8080}
WEAVIATE_API_KEY: ${WEAVIATE_API_KEY:-WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih}
QDRANT_URL: ${QDRANT_URL:-http://qdrant:6333}
QDRANT_API_KEY: ${QDRANT_API_KEY:-difyai123456}
QDRANT_CLIENT_TIMEOUT: ${QDRANT_CLIENT_TIMEOUT:-20}
QDRANT_GRPC_ENABLED: ${QDRANT_GRPC_ENABLED:-false}
QDRANT_GRPC_PORT: ${QDRANT_GRPC_PORT:-6334}
MILVUS_HOST: ${MILVUS_HOST:-127.0.0.1}
MILVUS_PORT: ${MILVUS_PORT:-19530}
MILVUS_USER: ${MILVUS_USER:-root}
MILVUS_PASSWORD: ${MILVUS_PASSWORD:-Milvus}
MILVUS_SECURE: ${MILVUS_SECURE:-false}
RELYT_HOST: ${RELYT_HOST:-db}
RELYT_PORT: ${RELYT_PORT:-5432}
RELYT_USER: ${RELYT_USER:-postgres}
RELYT_PASSWORD: ${RELYT_PASSWORD:-difyai123456}
RELYT_DATABASE: ${RELYT_DATABASE:-postgres}
PGVECTOR_HOST: ${PGVECTOR_HOST:-pgvector}
PGVECTOR_PORT: ${PGVECTOR_PORT:-5432}
PGVECTOR_USER: ${PGVECTOR_USER:-postgres}
PGVECTOR_PASSWORD: ${PGVECTOR_PASSWORD:-difyai123456}
PGVECTOR_DATABASE: ${PGVECTOR_DATABASE:-dify}
TIDB_VECTOR_HOST: ${TIDB_VECTOR_HOST:-tidb}
TIDB_VECTOR_PORT: ${TIDB_VECTOR_PORT:-4000}
TIDB_VECTOR_USER: ${TIDB_VECTOR_USER:-}
TIDB_VECTOR_PASSWORD: ${TIDB_VECTOR_PASSWORD:-}
TIDB_VECTOR_DATABASE: ${TIDB_VECTOR_DATABASE:-dify}
ORACLE_HOST: ${ORACLE_HOST:-oracle}
ORACLE_PORT: ${ORACLE_PORT:-1521}
ORACLE_USER: ${ORACLE_USER:-dify}
ORACLE_PASSWORD: ${ORACLE_PASSWORD:-dify}
ORACLE_DATABASE: ${ORACLE_DATABASE:-FREEPDB1}
CHROMA_HOST: ${CHROMA_HOST:-127.0.0.1}
CHROMA_PORT: ${CHROMA_PORT:-8000}
CHROMA_TENANT: ${CHROMA_TENANT:-default_tenant}
CHROMA_DATABASE: ${CHROMA_DATABASE:-default_database}
CHROMA_AUTH_PROVIDER: ${CHROMA_AUTH_PROVIDER:-chromadb.auth.token_authn.TokenAuthClientProvider}
CHROMA_AUTH_CREDENTIALS: ${CHROMA_AUTH_CREDENTIALS:-}
OPENSEARCH_HOST: ${OPENSEARCH_HOST:-opensearch}
OPENSEARCH_PORT: ${OPENSEARCH_PORT:-9200}
OPENSEARCH_USER: ${OPENSEARCH_USER:-admin}
OPENSEARCH_PASSWORD: ${OPENSEARCH_PASSWORD:-admin}
OPENSEARCH_SECURE: ${OPENSEARCH_SECURE:-true}
TENCENT_VECTOR_DB_URL: ${TENCENT_VECTOR_DB_URL:-http://127.0.0.1}
TENCENT_VECTOR_DB_API_KEY: ${TENCENT_VECTOR_DB_API_KEY:-dify}
TENCENT_VECTOR_DB_TIMEOUT: ${TENCENT_VECTOR_DB_TIMEOUT:-30}
TENCENT_VECTOR_DB_USERNAME: ${TENCENT_VECTOR_DB_USERNAME:-dify}
TENCENT_VECTOR_DB_DATABASE: ${TENCENT_VECTOR_DB_DATABASE:-dify}
TENCENT_VECTOR_DB_SHARD: ${TENCENT_VECTOR_DB_SHARD:-1}
TENCENT_VECTOR_DB_REPLICAS: ${TENCENT_VECTOR_DB_REPLICAS:-2}
UPLOAD_FILE_SIZE_LIMIT: ${UPLOAD_FILE_SIZE_LIMIT:-15}
UPLOAD_FILE_BATCH_LIMIT: ${UPLOAD_FILE_BATCH_LIMIT:-5}
ETL_TYPE: ${ETL_TYPE:-dify}
UNSTRUCTURED_API_URL: ${UNSTRUCTURED_API_URL:-}
MULTIMODAL_SEND_IMAGE_FORMAT: ${MULTIMODAL_SEND_IMAGE_FORMAT:-base64}
UPLOAD_IMAGE_FILE_SIZE_LIMIT: ${UPLOAD_IMAGE_FILE_SIZE_LIMIT:-10}
SENTRY_DSN: ${API_SENTRY_DSN:-}
SENTRY_TRACES_SAMPLE_RATE: ${API_SENTRY_TRACES_SAMPLE_RATE:-1.0}
SENTRY_PROFILES_SAMPLE_RATE: ${API_SENTRY_PROFILES_SAMPLE_RATE:-1.0}
NOTION_INTEGRATION_TYPE: ${NOTION_INTEGRATION_TYPE:-public}
NOTION_CLIENT_SECRET: ${NOTION_CLIENT_SECRET:-}
NOTION_CLIENT_ID: ${NOTION_CLIENT_ID:-}
NOTION_INTERNAL_SECRET: ${NOTION_INTERNAL_SECRET:-}
MAIL_TYPE: ${MAIL_TYPE:-resend}
MAIL_DEFAULT_SEND_FROM: ${MAIL_DEFAULT_SEND_FROM:-}
SMTP_SERVER: ${SMTP_SERVER:-}
SMTP_PORT: ${SMTP_PORT:-465}
SMTP_USERNAME: ${SMTP_USERNAME:-}
SMTP_PASSWORD: ${SMTP_PASSWORD:-}
SMTP_USE_TLS: ${SMTP_USE_TLS:-true}
SMTP_OPPORTUNISTIC_TLS: ${SMTP_OPPORTUNISTIC_TLS:-false}
RESEND_API_KEY: ${RESEND_API_KEY:-your-resend-api-key}
RESEND_API_URL: https://api.resend.com
# Indexing configuration
INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH: ${INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH}
# Other configurations
INVITE_EXPIRY_HOURS: ${INVITE_EXPIRY_HOURS}
INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH: ${INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH:-1000}
INVITE_EXPIRY_HOURS: ${INVITE_EXPIRY_HOURS:-72}
CODE_EXECUTION_ENDPOINT: ${CODE_EXECUTION_ENDPOINT:-http://sandbox:8194}
CODE_EXECUTION_API_KEY: ${CODE_EXECUTION_API_KEY:-dify-sandbox}
CODE_EXECUTION_API_KEY: ${SANDBOX_API_KEY:-dify-sandbox}
CODE_MAX_NUMBER: ${CODE_MAX_NUMBER:-9223372036854775807}
CODE_MIN_NUMBER: ${CODE_MIN_NUMBER:- -9223372036854775808}
CODE_MIN_NUMBER: ${CODE_MIN_NUMBER:--9223372036854775808}
CODE_MAX_STRING_LENGTH: ${CODE_MAX_STRING_LENGTH:-80000}
TEMPLATE_TRANSFORM_MAX_LENGTH: ${TEMPLATE_TRANSFORM_MAX_LENGTH:-80000}
CODE_MAX_STRING_ARRAY_LENGTH: ${CODE_MAX_STRING_ARRAY_LENGTH:-30}
CODE_MAX_OBJECT_ARRAY_LENGTH: ${CODE_MAX_OBJECT_ARRAY_LENGTH:-30}
CODE_MAX_NUMBER_ARRAY_LENGTH: ${CODE_MAX_NUMBER_ARRAY_LENGTH:-1000}
SSRF_PROXY_HTTP_URL: ${SSRF_PROXY_HTTP_URL:-'http://ssrf_proxy:3128'}
SSRF_PROXY_HTTPS_URL: ${SSRF_PROXY_HTTPS_URL:-'http://ssrf_proxy:3128'}
SSRF_PROXY_HTTP_URL: ${SSRF_PROXY_HTTP_URL:-http://ssrf_proxy:3128}
SSRF_PROXY_HTTPS_URL: ${SSRF_PROXY_HTTPS_URL:-http://ssrf_proxy:3128}
services:
# API service
api:
image: langgenius/dify-api:0.6.12
image: langgenius/dify-api:0.6.12-fix1
restart: always
environment:
# Use the shared environment variables.
@@ -272,9 +173,6 @@ services:
volumes:
# Mount the storage directory to the container, for storing user files.
- ./volumes/app/storage:/app/api/storage
# uncomment to expose dify-api port to host
# ports:
# - "5001:5001"
networks:
- ssrf_proxy_network
- default
@@ -282,7 +180,7 @@ services:
# worker service
# The Celery worker for processing the queue.
worker:
image: langgenius/dify-api:0.6.12
image: langgenius/dify-api:0.6.12-fix1
restart: always
environment:
# Use the shared environment variables.
@@ -301,15 +199,12 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:0.6.12
image: langgenius/dify-web:0.6.12-fix1
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
APP_API_URL: ${APP_API_URL:-}
SENTRY_DSN: ${SENTRY_DSN:-}
# uncomment to expose dify-web port to host
# ports:
# - "3000:3000"
SENTRY_DSN: ${WEB_SENTRY_DSN:-}
# The postgres database.
db:
@@ -322,9 +217,6 @@ services:
PGDATA: ${PGDATA:-/var/lib/postgresql/data/pgdata}
volumes:
- ./volumes/db/data:/var/lib/postgresql/data
# uncomment to expose db(postgresql) port to host
# ports:
# - "5432:5432"
healthcheck:
test: [ "CMD", "pg_isready" ]
interval: 1s
@@ -342,9 +234,6 @@ services:
command: redis-server --requirepass ${REDIS_PASSWORD:-difyai123456}
healthcheck:
test: [ "CMD", "redis-cli", "ping" ]
# uncomment to expose redis port to host
# ports:
# - "6379:6379"
# The DifySandbox
sandbox:
@@ -354,12 +243,12 @@ services:
# The DifySandbox configurations
# Make sure you are changing this key for your deployment with a strong key.
# You can generate a strong key using `openssl rand -base64 42`.
API_KEY: ${API_KEY:-dify-sandbox}
GIN_MODE: ${GIN_MODE:-release}
WORKER_TIMEOUT: ${WORKER_TIMEOUT:-15}
ENABLE_NETWORK: ${ENABLE_NETWORK:-true}
HTTP_PROXY: ${HTTP_PROXY:-http://ssrf_proxy:3128}
HTTPS_PROXY: ${HTTPS_PROXY:-http://ssrf_proxy:3128}
API_KEY: ${SANDBOX_API_KEY:-dify-sandbox}
GIN_MODE: ${SANDBOX_GIN_MODE:-release}
WORKER_TIMEOUT: ${SANDBOX_WORKER_TIMEOUT:-15}
ENABLE_NETWORK: ${SANDBOX_ENABLE_NETWORK:-true}
HTTP_PROXY: ${SANDBOX_HTTP_PROXY:-http://ssrf_proxy:3128}
HTTPS_PROXY: ${SANDBOX_HTTPS_PROXY:-http://ssrf_proxy:3128}
SANDBOX_PORT: ${SANDBOX_PORT:-8194}
volumes:
- ./volumes/sandbox/dependencies:/dependencies
@@ -374,14 +263,14 @@ services:
restart: always
volumes:
- ./ssrf_proxy/squid.conf.template:/etc/squid/squid.conf.template
- ./ssrf_proxy/docker-entrypoint.sh:/docker-entrypoint.sh
entrypoint: /docker-entrypoint.sh
- ./ssrf_proxy/docker-entrypoint.sh:/docker-entrypoint-mount.sh
entrypoint: [ "sh", "-c", "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh" ]
environment:
# pls clearly modify the squid env vars to fit your network environment.
HTTP_PORT: ${HTTP_PORT:-3128}
COREDUMP_DIR: ${COREDUMP_DIR:-/var/spool/squid}
REVERSE_PROXY_PORT: ${REVERSE_PROXY_PORT:-8194}
SANDBOX_HOST: ${SANDBOX_HOST:-sandbox}
HTTP_PORT: ${SSRF_HTTP_PORT:-3128}
COREDUMP_DIR: ${SSRF_COREDUMP_DIR:-/var/spool/squid}
REVERSE_PROXY_PORT: ${SSRF_REVERSE_PROXY_PORT:-8194}
SANDBOX_HOST: ${SSRF_SANDBOX_HOST:-sandbox}
SANDBOX_PORT: ${SANDBOX_PORT:-8194}
networks:
- ssrf_proxy_network
@@ -395,14 +284,16 @@ services:
volumes:
- ./nginx/nginx.conf.template:/etc/nginx/nginx.conf.template
- ./nginx/proxy.conf.template:/etc/nginx/proxy.conf.template
- ./nginx/https.conf.template:/etc/nginx/https.conf.template
- ./nginx/conf.d:/etc/nginx/conf.d
- ./nginx/docker-entrypoint.sh:/docker-entrypoint.sh
- ./nginx/docker-entrypoint.sh:/docker-entrypoint-mount.sh
- ./nginx/ssl:/etc/ssl
entrypoint: /docker-entrypoint.sh
entrypoint: [ "sh", "-c", "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh" ]
environment:
NGINX_SERVER_NAME: ${NGINX_SERVER_NAME:-_}
HTTPS_ENABLED: ${HTTPS_ENABLED:-false}
NGINX_HTTPS_ENABLED: ${NGINX_HTTPS_ENABLED:-false}
NGINX_SSL_PORT: ${NGINX_SSL_PORT:-443}
NGINX_PORT: ${NGINX_PORT:-80}
# You're required to add your own SSL certificates/keys to the `./nginx/ssl` directory
# and modify the env vars below in .env if HTTPS_ENABLED is true.
NGINX_SSL_CERT_FILENAME: ${NGINX_SSL_CERT_FILENAME:-dify.crt}
@@ -417,13 +308,14 @@ services:
- api
- web
ports:
- "${NGINX_PORT:-80}:80"
- "${NGINX_SSL_PORT:-443}:443"
- "${NGINX_PORT:-80}:${EXPOSE_NGINX_PORT:-80}"
- "${NGINX_SSL_PORT:-443}:${EXPOSE_NGINX_SSL_PORT:-443}"
# The Weaviate vector store.
weaviate:
image: semitechnologies/weaviate:1.19.0
profiles:
- ''
- weaviate
restart: always
volumes:
@@ -432,20 +324,16 @@ services:
environment:
# The Weaviate configurations
# You can refer to the [Weaviate](https://weaviate.io/developers/weaviate/config-refs/env-vars) documentation for more information.
PERSISTENCE_DATA_PATH: ${PERSISTENCE_DATA_PATH:-/var/lib/weaviate}
QUERY_DEFAULTS_LIMIT: ${QUERY_DEFAULTS_LIMIT:-25}
AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED: ${AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED:-false}
DEFAULT_VECTORIZER_MODULE: ${DEFAULT_VECTORIZER_MODULE:-none}
CLUSTER_HOSTNAME: ${CLUSTER_HOSTNAME:-node1}
AUTHENTICATION_APIKEY_ENABLED: ${AUTHENTICATION_APIKEY_ENABLED:-true}
AUTHENTICATION_APIKEY_ALLOWED_KEYS: ${AUTHENTICATION_APIKEY_ALLOWED_KEYS:-WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih}
AUTHENTICATION_APIKEY_USERS: ${AUTHENTICATION_APIKEY_USERS:-hello@dify.ai}
AUTHORIZATION_ADMINLIST_ENABLED: ${AUTHORIZATION_ADMINLIST_ENABLED:-true}
AUTHORIZATION_ADMINLIST_USERS: ${AUTHORIZATION_ADMINLIST_USERS:-hello@dify.ai}
# uncomment to expose weaviate port to host
ports:
- "8080:8080"
PERSISTENCE_DATA_PATH: ${WEAVIATE_PERSISTENCE_DATA_PATH:-/var/lib/weaviate}
QUERY_DEFAULTS_LIMIT: ${WEAVIATE_QUERY_DEFAULTS_LIMIT:-25}
AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED: ${WEAVIATE_AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED:-false}
DEFAULT_VECTORIZER_MODULE: ${WEAVIATE_DEFAULT_VECTORIZER_MODULE:-none}
CLUSTER_HOSTNAME: ${WEAVIATE_CLUSTER_HOSTNAME:-node1}
AUTHENTICATION_APIKEY_ENABLED: ${WEAVIATE_AUTHENTICATION_APIKEY_ENABLED:-true}
AUTHENTICATION_APIKEY_ALLOWED_KEYS: ${WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS:-WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih}
AUTHENTICATION_APIKEY_USERS: ${WEAVIATE_AUTHENTICATION_APIKEY_USERS:-hello@dify.ai}
AUTHORIZATION_ADMINLIST_ENABLED: ${WEAVIATE_AUTHORIZATION_ADMINLIST_ENABLED:-true}
AUTHORIZATION_ADMINLIST_USERS: ${WEAVIATE_AUTHORIZATION_ADMINLIST_USERS:-hello@dify.ai}
# Qdrant vector store.
# (if used, you need to set VECTOR_STORE to qdrant in the api & worker service.)
@@ -458,10 +346,6 @@ services:
- ./volumes/qdrant:/qdrant/storage
environment:
QDRANT_API_KEY: ${QDRANT_API_KEY:-difyai123456}
# uncomment to expose qdrant port to host
ports:
- "6333:6333"
- "6334:6334"
# The pgvector vector database.
pgvector:
@@ -479,9 +363,6 @@ services:
PGDATA: ${PGVECTOR_PGDATA:-/var/lib/postgresql/data/pgdata}
volumes:
- ./volumes/pgvector/data:/var/lib/postgresql/data
# uncomment to expose db(postgresql) port to host
ports:
- "5433:5432"
healthcheck:
test: [ "CMD", "pg_isready" ]
interval: 1s
@@ -495,18 +376,15 @@ services:
- pgvecto-rs
restart: always
environment:
PGUSER: ${PGUSER:-postgres}
PGUSER: ${PGVECTOR_PGUSER:-postgres}
# The password for the default postgres user.
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-difyai123456}
POSTGRES_PASSWORD: ${PGVECTOR_POSTGRES_PASSWORD:-difyai123456}
# The name of the default postgres database.
POSTGRES_DB: ${POSTGRES_DB:-dify}
POSTGRES_DB: ${PGVECTOR_POSTGRES_DB:-dify}
# postgres data directory
PGDATA: ${PGDATA:-/var/lib/postgresql/data/pgdata}
PGDATA: ${PGVECTOR_PGDATA:-/var/lib/postgresql/data/pgdata}
volumes:
- ./volumes/pgvecto_rs/data:/var/lib/postgresql/data
# uncomment to expose db(postgresql) port to host
ports:
- "5431:5432"
healthcheck:
test: [ "CMD", "pg_isready" ]
interval: 1s
@@ -524,17 +402,14 @@ services:
environment:
CHROMA_SERVER_AUTHN_CREDENTIALS: ${CHROMA_SERVER_AUTHN_CREDENTIALS:-difyai123456}
CHROMA_SERVER_AUTHN_PROVIDER: ${CHROMA_SERVER_AUTHN_PROVIDER:-chromadb.auth.token_authn.TokenAuthenticationServerProvider}
IS_PERSISTENT: ${IS_PERSISTENT:-TRUE}
ports:
- "8000:8000"
IS_PERSISTENT: ${CHROMA_IS_PERSISTENT:-TRUE}
# Oracle vector database
oracle:
image: container-registry.oracle.com/database/free:latest
profiles:
- oracle
restart: always
ports:
- 1521:1521
volumes:
- type: volume
source: oradata
@@ -574,9 +449,6 @@ services:
environment:
MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY:-minioadmin}
MINIO_SECRET_KEY: ${MINIO_SECRET_KEY:-minioadmin}
ports:
- "9001:9001"
- "9000:9000"
volumes:
- ./volumes/milvus/minio:/minio_data
command: minio server /minio_data --console-address ":9001"
@@ -606,15 +478,13 @@ services:
start_period: 90s
timeout: 20s
retries: 3
ports:
- "19530:19530"
- "9091:9091"
depends_on:
- "etcd"
- "minio"
networks:
- milvus
# Opensearch vector database
opensearch:
container_name: opensearch
image: opensearchproject/opensearch:latest
@@ -634,9 +504,6 @@ services:
hard: ${OPENSEARCH_NOFILE_HARD:-65536}
volumes:
- ./volumes/opensearch/data:/usr/share/opensearch/data
ports:
- "9200:9200"
- "9600:9600"
networks:
- opensearch-net
@@ -645,10 +512,6 @@ services:
image: opensearchproject/opensearch-dashboards:latest
profiles:
- opensearch
ports:
- "5601:5601"
expose:
- "5601"
environment:
OPENSEARCH_HOSTS: '["https://opensearch:9200"]'
volumes:

View File

@@ -10,33 +10,43 @@ POSTGRES_DB=dify
PGDATA=/var/lib/postgresql/data/pgdata
# ------------------------------
# Environment Variables for qdrant Service
# (only used when VECTOR_STORE is qdrant)
# ------------------------------
QDRANT_API_KEY=difyai123456
# ------------------------------
# Environment Variables for sandbox Service
API_KEY=dify-sandbox
GIN_MODE=release
WORKER_TIMEOUT=15
ENABLE_NETWORK=true
HTTP_PROXY=http://ssrf_proxy:3128
HTTPS_PROXY=http://ssrf_proxy:3128
SANDBOX_API_KEY=dify-sandbox
SANDBOX_GIN_MODE=release
SANDBOX_WORKER_TIMEOUT=15
SANDBOX_ENABLE_NETWORK=true
SANDBOX_HTTP_PROXY=http://ssrf_proxy:3128
SANDBOX_HTTPS_PROXY=http://ssrf_proxy:3128
SANDBOX_PORT=8194
# ------------------------------
# ------------------------------
# Environment Variables for weaviate Service
# (only used when VECTOR_STORE is weaviate)
# Environment Variables for ssrf_proxy Service
# ------------------------------
QUERY_DEFAULTS_LIMIT=25
AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED=true
DEFAULT_VECTORIZER_MODULE=none
CLUSTER_HOSTNAME=node1
AUTHENTICATION_APIKEY_ENABLED=true
AUTHENTICATION_APIKEY_ALLOWED_KEYS=WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih
AUTHENTICATION_APIKEY_USERS=hello@dify.ai
AUTHORIZATION_ADMINLIST_ENABLED=true
AUTHORIZATION_ADMINLIST_USERS=hello@dify.ai
SSRF_HTTP_PORT=3128
SSRF_COREDUMP_DIR=/var/spool/squid
SSRF_REVERSE_PROXY_PORT=8194
SSRF_SANDBOX_HOST=sandbox
# ------------------------------
# Environment Variables for weaviate Service
# ------------------------------
WEAVIATE_QUERY_DEFAULTS_LIMIT=25
WEAVIATE_AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED=true
WEAVIATE_DEFAULT_VECTORIZER_MODULE=none
WEAVIATE_CLUSTER_HOSTNAME=node1
WEAVIATE_AUTHENTICATION_APIKEY_ENABLED=true
WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS=WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih
WEAVIATE_AUTHENTICATION_APIKEY_USERS=hello@dify.ai
WEAVIATE_AUTHORIZATION_ADMINLIST_ENABLED=true
WEAVIATE_AUTHORIZATION_ADMINLIST_USERS=hello@dify.ai
# ------------------------------
# Docker Compose Service Expose Host Port Configurations
# ------------------------------
EXPOSE_POSTGRES_PORT=5432
EXPOSE_REDIS_PORT=6379
EXPOSE_SANDBOX_PORT=8194
EXPOSE_SSRF_PROXY_PORT=3128
EXPOSE_WEAVIATE_PORT=8080

View File

@@ -1,7 +1,7 @@
# Please do not directly edit this file. Instead, modify the .env variables related to NGINX configuration.
server {
listen 80;
listen ${NGINX_PORT};
server_name ${NGINX_SERVER_NAME};
location /console/api {

View File

@@ -1,6 +1,6 @@
#!/bin/bash
if [ "${HTTPS_ENABLED}" = "true" ]; then
if [ "${NGINX_HTTPS_ENABLED}" = "true" ]; then
# set the HTTPS_CONFIG environment variable to the content of the https.conf.template
HTTPS_CONFIG=$(envsubst < /etc/nginx/https.conf.template)
export HTTPS_CONFIG

View File

@@ -10,7 +10,7 @@ import {
} from '@remixicon/react'
import Confirm from '@/app/components/base/confirm'
import { ToastContext } from '@/app/components/base/toast'
import { deleteDataset } from '@/service/datasets'
import { checkIsUsedInApp, deleteDataset } from '@/service/datasets'
import type { DataSet } from '@/models/datasets'
import Tooltip from '@/app/components/base/tooltip'
import { Folder } from '@/app/components/base/icons/src/vender/solid/files'
@@ -36,6 +36,19 @@ const DatasetCard = ({
const [showRenameModal, setShowRenameModal] = useState(false)
const [showConfirmDelete, setShowConfirmDelete] = useState(false)
const [confirmMessage, setConfirmMessage] = useState<string>('')
const detectIsUsedByApp = useCallback(async () => {
try {
const { is_using: isUsedByApp } = await checkIsUsedInApp(dataset.id)
setConfirmMessage(isUsedByApp ? t('dataset.datasetUsedByApp')! : t('dataset.deleteDatasetConfirmContent')!)
}
catch (e: any) {
const res = await e.json()
notify({ type: 'error', message: res?.message || 'Unknown error' })
}
setShowConfirmDelete(true)
}, [dataset.id, notify, t])
const onConfirmDelete = useCallback(async () => {
try {
await deleteDataset(dataset.id)
@@ -44,10 +57,9 @@ const DatasetCard = ({
onSuccess()
}
catch (e: any) {
notify({ type: 'error', message: `${t('dataset.datasetDeleteFailed')}${'message' in e ? `: ${e.message}` : ''}` })
}
setShowConfirmDelete(false)
}, [dataset.id])
}, [dataset.id, notify, onSuccess, t])
const Operations = (props: HtmlContentProps) => {
const onMouseLeave = async () => {
@@ -63,7 +75,7 @@ const DatasetCard = ({
e.stopPropagation()
props.onClick?.()
e.preventDefault()
setShowConfirmDelete(true)
detectIsUsedByApp()
}
return (
<div className="relative w-full py-1" onMouseLeave={onMouseLeave}>
@@ -159,7 +171,7 @@ const DatasetCard = ({
/>
</div>
</div>
<div className='!hidden group-hover:!flex shrink-0 mx-1 w-[1px] h-[14px] bg-gray-200'/>
<div className='!hidden group-hover:!flex shrink-0 mx-1 w-[1px] h-[14px] bg-gray-200' />
<div className='!hidden group-hover:!flex shrink-0'>
<CustomPopover
htmlContent={<Operations />}
@@ -194,7 +206,7 @@ const DatasetCard = ({
{showConfirmDelete && (
<Confirm
title={t('dataset.deleteDatasetConfirmTitle')}
content={t('dataset.deleteDatasetConfirmContent')}
content={confirmMessage}
isShow={showConfirmDelete}
onClose={() => setShowConfirmDelete(false)}
onConfirm={onConfirmDelete}

View File

@@ -44,7 +44,8 @@ const preprocessLaTeX = (content: string) => {
if (typeof content !== 'string')
return content
return content.replace(/\\\[(.*?)\\\]/gs, (_, equation) => `$$${equation}$$`)
.replace(/\\\((.*?)\\\)/gs, (_, equation) => `$${equation}$`)
.replace(/\\\((.*?)\\\)/gs, (_, equation) => `$$${equation}$$`)
.replace(/(^|[^\\])\$(.+?)\$/gs, (_, prefix, equation) => `${prefix}$${equation}$`)
}
export function PreCode(props: { children: any }) {

View File

@@ -69,7 +69,7 @@ const WorkplaceSelector = () => {
<Menu.Items
className={cn(
`
absolute top-[1px] min-w-[200px] z-10 bg-white border-[0.5px] border-gray-200
absolute top-[1px] min-w-[200px] max-h-[70vh] overflow-y-scroll z-10 bg-white border-[0.5px] border-gray-200
divide-y divide-gray-100 origin-top-right rounded-xl
`,
s.popup,

View File

@@ -43,8 +43,14 @@ function useOutputVarList<T>({
handleOutVarRenameChange(id, [id, outputKeyOrders[changedIndex!]], [id, newKey])
}, [inputs, setInputs, handleOutVarRenameChange, id, outputKeyOrders, varKey, onOutputKeyOrdersChange])
const generateNewKey = useCallback(() => {
let keyIndex = Object.keys((inputs as any)[varKey]).length + 1
while (((inputs as any)[varKey])[`var_${keyIndex}`])
keyIndex++
return `var_${keyIndex}`
}, [inputs, varKey])
const handleAddVariable = useCallback(() => {
const newKey = `var_${Object.keys((inputs as any)[varKey]).length + 1}`
const newKey = generateNewKey()
const newInputs = produce(inputs, (draft: any) => {
draft[varKey] = {
...draft[varKey],
@@ -56,7 +62,7 @@ function useOutputVarList<T>({
})
setInputs(newInputs)
onOutputKeyOrdersChange([...outputKeyOrders, newKey])
}, [inputs, setInputs, varKey, outputKeyOrders, onOutputKeyOrdersChange])
}, [generateNewKey, inputs, setInputs, onOutputKeyOrdersChange, outputKeyOrders, varKey])
const [isShowRemoveVarConfirm, {
setTrue: showRemoveVarConfirm,

View File

@@ -8,6 +8,7 @@ const translation = {
deleteDatasetConfirmTitle: 'Dieses Wissen löschen?',
deleteDatasetConfirmContent:
'Das Löschen des Wissens ist unwiderruflich. Benutzer werden nicht mehr auf Ihr Wissen zugreifen können und alle Eingabeaufforderungen, Konfigurationen und Protokolle werden dauerhaft gelöscht.',
datasetUsedByApp: 'Das Wissen wird von einigen Apps verwendet. Apps werden dieses Wissen nicht mehr nutzen können, und alle Prompt-Konfigurationen und Protokolle werden dauerhaft gelöscht.',
datasetDeleted: 'Wissen gelöscht',
datasetDeleteFailed: 'Löschen des Wissens fehlgeschlagen',
didYouKnow: 'Wusstest du schon?',

View File

@@ -8,6 +8,7 @@ const translation = {
deleteDatasetConfirmTitle: 'Delete this Knowledge?',
deleteDatasetConfirmContent:
'Deleting the Knowledge is irreversible. Users will no longer be able to access your Knowledge, and all prompt configurations and logs will be permanently deleted.',
datasetUsedByApp: 'The knowledge is being used by some apps. Apps will no longer be able to use this Knowledge, and all prompt configurations and logs will be permanently deleted.',
datasetDeleted: 'Knowledge deleted',
datasetDeleteFailed: 'Failed to delete Knowledge',
didYouKnow: 'Did you know?',

View File

@@ -8,6 +8,7 @@ const translation = {
deleteDatasetConfirmTitle: 'Supprimer cette Connaissance ?',
deleteDatasetConfirmContent:
'La suppression de la Connaissance est irréversible. Les utilisateurs ne pourront plus accéder à votre Savoir, et toutes les configurations de prompt et les journaux seront supprimés de façon permanente.',
datasetUsedByApp: 'La connaissance est utilisée par certaines applications. Les applications ne pourront plus utiliser cette Connaissance, et toutes les configurations de prompts et les journaux seront définitivement supprimés.',
datasetDeleted: 'Connaissance supprimée',
datasetDeleteFailed: 'Échec de la suppression de la Connaissance',
didYouKnow: 'Saviez-vous ?',

View File

@@ -9,6 +9,7 @@ const translation = {
deleteDatasetConfirmTitle: 'क्या आप यह ज्ञान हटाना चाहते हैं?',
deleteDatasetConfirmContent:
'ज्ञान को हटाना अपरिवर्तनीय है। उपयोगकर्ता अब आपके ज्ञान को प्राप्त नहीं कर पाएंगे, और सभी प्रॉम्प्ट कॉन्फ़िगरेशन और लॉग स्थायी रूप से मिटा दिए जाएंगे।',
datasetUsedByApp: 'यह ज्ञान कुछ ऐप्स द्वारा उपयोग किया जा रहा है। ऐप्स अब इस ज्ञान का उपयोग नहीं कर पाएंगे, और सभी प्रॉम्प्ट कॉन्फ़िगरेशन और लॉग स्थायी रूप से हटा दिए जाएंगे।',
datasetDeleted: 'ज्ञान हटा दिया गया',
datasetDeleteFailed: 'ज्ञान हटाने में विफल',
didYouKnow: 'क्या आप जानते हैं?',

View File

@@ -8,6 +8,7 @@ const translation = {
deleteDatasetConfirmTitle: 'この知識を削除しますか?',
deleteDatasetConfirmContent:
'知識を削除すると元に戻すことはできません。ユーザーはもはやあなたの知識にアクセスできず、すべてのプロンプトの設定とログが永久に削除されます。',
datasetUsedByApp: 'この知識は一部のアプリによって使用されています。アプリはこの知識を使用できなくなり、すべてのプロンプト設定とログは永久に削除されます。',
datasetDeleted: '知識が削除されました',
datasetDeleteFailed: '知識の削除に失敗しました',
didYouKnow: 'ご存知ですか?',

View File

@@ -7,6 +7,7 @@ const translation = {
createDatasetIntro: '자체 텍스트 데이터를 가져오거나 LLM 컨텍스트를 강화하기 위해 웹훅을 통해 실시간 데이터를 기록할 수 있습니다.',
deleteDatasetConfirmTitle: '이 지식을 삭제하시겠습니까?',
deleteDatasetConfirmContent: '지식을 삭제하면 다시 되돌릴 수 없습니다. 사용자는 더 이상 귀하의 지식에 액세스할 수 없으며 모든 프롬프트 설정과 로그가 영구적으로 삭제됩니다.',
datasetUsedByApp: '이 지식은 일부 앱에서 사용 중입니다. 앱에서 더 이상 이 지식을 사용할 수 없게 되며, 모든 프롬프트 구성 및 로그가 영구적으로 삭제됩니다.',
datasetDeleted: '지식이 삭제되었습니다',
datasetDeleteFailed: '지식 삭제에 실패했습니다',
didYouKnow: '알고 계셨나요?',

View File

@@ -9,6 +9,7 @@ const translation = {
deleteDatasetConfirmTitle: 'Czy na pewno usunąć tę Wiedzę?',
deleteDatasetConfirmContent:
'Usunięcie Wiedzy jest nieodwracalne. Użytkownicy nie będą już mieli dostępu do Twojej Wiedzy, a wszystkie konfiguracje i logi zostaną trwale usunięte.',
datasetUsedByApp: 'Ta wiedza jest wykorzystywana przez niektóre aplikacje. Aplikacje nie będą już mogły korzystać z tej Wiedzy, a wszystkie konfiguracje podpowiedzi i logi zostaną trwale usunięte.',
datasetDeleted: 'Wiedza usunięta',
datasetDeleteFailed: 'Nie udało się usunąć Wiedzy',
didYouKnow: 'Czy wiedziałeś?',

View File

@@ -8,6 +8,7 @@ const translation = {
deleteDatasetConfirmTitle: 'Excluir este Conhecimento?',
deleteDatasetConfirmContent:
'A exclusão do Conhecimento é irreversível. Os usuários não poderão mais acessar seu Conhecimento e todas as configurações e registros de prompt serão excluídos permanentemente.',
datasetUsedByApp: 'O conhecimento está sendo usado por alguns aplicativos. Os aplicativos não poderão mais usar esse Conhecimento, e todas as configurações de prompt e logs serão excluídos permanentemente.',
datasetDeleted: 'Conhecimento excluído',
datasetDeleteFailed: 'Falha ao excluir o Conhecimento',
didYouKnow: 'Você sabia?',

View File

@@ -8,6 +8,7 @@ const translation = {
deleteDatasetConfirmTitle: 'Ștergeți această Cunoștință?',
deleteDatasetConfirmContent:
'Ștergerea Cunoștințelor este ireversibilă. Utilizatorii nu vor mai putea accesa Cunoștințele, iar toate configurațiile și jurnalele prompt vor fi șterse permanent.',
datasetUsedByApp: 'Cunoștințele sunt utilizate de unele aplicații. Aplicațiile nu vor mai putea utiliza aceste Cunoștințe, iar toate configurațiile de prompt și jurnalele vor fi șterse definitiv.',
datasetDeleted: 'Cunoștințe șterse',
datasetDeleteFailed: 'Eșec la ștergerea Cunoștințelor',
didYouKnow: 'Știați că?',

View File

@@ -8,6 +8,7 @@ const translation = {
deleteDatasetConfirmTitle: 'Видалити це Знання?',
deleteDatasetConfirmContent:
'Видалення "Знання" є незворотнім. Користувачі більше не матимуть доступу до Знань, а всі конфігурації підказок і журнали будуть безповоротно видалені.',
datasetUsedByApp: 'Ці знання використовуються деякими додатками. Додатки більше не зможуть використовувати ці Знання, а всі конфігурації підказок та журнали будуть остаточно видалені.',
datasetDeleted: 'Знання видалено',
datasetDeleteFailed: 'Не вдалося видалити Знання',
didYouKnow: 'Чи знаєте ви?',

View File

@@ -8,6 +8,7 @@ const translation = {
deleteDatasetConfirmTitle: 'Xóa Kiến thức này?',
deleteDatasetConfirmContent:
'Xóa Kiến thức là không thể đảo ngược. Người dùng sẽ không còn có khả năng truy cập Kiến thức của bạn nữa, và tất cả các cấu hình và nhật ký nhắc nhở sẽ bị xóa vĩnh viễn.',
datasetUsedByApp: 'Kiến thức này đang được sử dụng bởi một số ứng dụng. Các ứng dụng sẽ không thể sử dụng Kiến thức này nữa, và tất cả cấu hình lời nhắc và nhật ký sẽ bị xóa vĩnh viễn.',
datasetDeleted: 'Kiến thức đã bị xóa',
datasetDeleteFailed: 'Xóa Kiến thức không thành công',
didYouKnow: 'Bạn đã biết chưa?',

View File

@@ -8,6 +8,7 @@ const translation = {
deleteDatasetConfirmTitle: '要删除知识库吗?',
deleteDatasetConfirmContent:
'删除知识库是不可逆的。用户将无法再访问您的知识库,所有的提示配置和日志将被永久删除。',
datasetUsedByApp: '某些应用正在使用该知识库。应用将无法再使用该知识库,所有的提示配置和日志将被永久删除。',
datasetDeleted: '知识库已删除',
datasetDeleteFailed: '删除知识库失败',
didYouKnow: '你知道吗?',

View File

@@ -8,6 +8,7 @@ const translation = {
deleteDatasetConfirmTitle: '要刪除知識庫嗎?',
deleteDatasetConfirmContent:
'刪除知識庫是不可逆的。使用者將無法再訪問您的知識庫,所有的提示配置和日誌將被永久刪除。',
datasetUsedByApp: '這些知識正被一些應用程序使用。應用程序將無法再使用這些知識,所有提示配置和日誌將被永久刪除。',
datasetDeleted: '知識庫已刪除',
datasetDeleteFailed: '刪除知識庫失敗',
didYouKnow: '你知道嗎?',

View File

@@ -1,6 +1,6 @@
{
"name": "dify-web",
"version": "0.6.12",
"version": "0.6.12-fix1",
"private": true,
"scripts": {
"dev": "next dev",

View File

@@ -72,6 +72,12 @@ export const createEmptyDataset: Fetcher<DataSet, { name: string }> = ({ name })
return post<DataSet>('/datasets', { body: { name } })
}
export const checkIsUsedInApp: Fetcher<{ is_using: boolean }, string> = (id) => {
return get<{ is_using: boolean }>(`/datasets/${id}/use-check`, {}, {
silent: true,
})
}
export const deleteDataset: Fetcher<DataSet, string> = (datasetID) => {
return del<DataSet>(`/datasets/${datasetID}`)
}

View File

@@ -1,3 +1,5 @@
import { escape } from 'lodash-es'
export const sleep = (ms: number) => {
return new Promise(resolve => setTimeout(resolve, ms))
}
@@ -35,5 +37,5 @@ export const getPurifyHref = (href: string) => {
if (!href)
return ''
return href.replace(/javascript:/ig, '').replace(/vbscript:/ig, '').replace(/data:/ig, '')
return escape(href)
}