Compare commits

...

86 Commits
0.3.1 ... 0.3.4

Author SHA1 Message Date
John Wang
54f3bbbf47 feat: bump version to 0.3.4 (#406) 2023-06-19 16:44:48 +08:00
zxhlyh
f797fab206 Fix/dataset add pages tip (#410) 2023-06-19 16:32:25 +08:00
Jyong
ce2996e7d4 Fix/dataset init (#409) 2023-06-19 16:32:03 +08:00
crazywoola
82d07ed2a8 doc: add annaconda info (#402) 2023-06-19 11:09:40 +08:00
crazywoola
c39d8f954e fix: word break in en and other languages (#385) 2023-06-19 09:36:05 +08:00
Jyong
226f28edcb Feature/self host notion import (#397) 2023-06-17 19:50:21 +08:00
John Wang
402b0b81d2 feat: add community helm support readme (#395) 2023-06-17 18:25:40 +08:00
Jyong
b08c19d926 fix encoding is none (#394) 2023-06-17 15:21:48 +08:00
Jyong
9253f72dea Feat/dataset notion import (#392)
Co-authored-by: StyleZhang <jasonapring2015@outlook.com>
Co-authored-by: JzoNg <jzongcode@gmail.com>
2023-06-16 21:47:51 +08:00
Jyong
f350948bde Fix the issue of decoding a non-UTF-8 encoded file using UTF-8 (#389) 2023-06-16 14:23:03 +08:00
Columbus
eeb2c28526 Fix the issue of decoding a non-UTF-8 encoded file using UTF-8 encodi… (#378) 2023-06-16 14:12:07 +08:00
Ben Jefferies
673288d58e fix(i18n): Make text gender neutral (#379) 2023-06-16 07:25:50 +08:00
Joel
772d67fd65 feat: suport var select options sortable (#376) 2023-06-15 17:07:17 +08:00
John Wang
7552a6be36 feat: add last active at for accounts (#375) 2023-06-15 13:59:36 +08:00
crazywoola
33200090e8 feat: update actions 2023-06-15 12:51:51 +08:00
Joel
01a6c725fa fix: max token tooltip description (#370) 2023-06-15 10:06:43 +08:00
crazywoola
f6e04389e4 Community i18n doc (#365) 2023-06-15 09:39:56 +08:00
zxhlyh
e22814b291 fix application model selector style (#360) 2023-06-14 14:23:41 +08:00
John Wang
a66ef7210b feat: bump version to 0.3.3 (#359) 2023-06-14 12:17:56 +08:00
John Wang
184afa69ff feat: add gpt-3.5-turbo-16k support and update openai gpt-3.5-turbo & Embedding Ada v2 unit price (#358) 2023-06-14 12:17:43 +08:00
John Wang
ab115b5f87 fix: completion stop invalid (#355) 2023-06-13 17:47:42 +08:00
Joel
3bbc4ad3db fix: change default help link to english (#354) 2023-06-13 17:12:51 +08:00
Joel
87af414a52 feat: stop response enchancement (#352) 2023-06-13 16:34:53 +08:00
Joel
72555d5df8 feat: add frontend sentry docker compose config (#353) 2023-06-13 16:30:31 +08:00
Joel
fff39a307a feat: use react sentry to support pass config via runtime (#350) 2023-06-13 16:04:54 +08:00
John Wang
a11f36ca60 fix: stop completion response not save to db (#351) 2023-06-13 15:47:58 +08:00
crazywoola
433f8cb57e Feature/add emoji to webapp (#345) 2023-06-13 14:54:12 +08:00
John Wang
cd136fb293 feat: add WEAVIATE_BATCH_SIZE (#349) 2023-06-13 14:49:40 +08:00
John Wang
6a3ab36101 feat: optimize weaviate batch size (#348) 2023-06-13 11:28:15 +08:00
John Wang
1af968e73a feat: optimize api language support (#344) 2023-06-13 10:06:49 +08:00
Panmuse
94646f29c3 Update README_CN.md (#342) 2023-06-12 21:14:34 +08:00
Panmuse
e028a0595c Update README.md (#341) 2023-06-12 21:14:21 +08:00
Joel
b16a7b0b3b feat: stop response call api (#340) 2023-06-12 16:37:03 +08:00
SergioRico1
e083a7067b Create README_ES.md (#335) 2023-06-10 18:25:13 +08:00
bowen
205459d54d fix: button abnormal style (#333) 2023-06-10 13:19:08 +08:00
Jyong
3d14431b96 Fix/excel data format (#334) 2023-06-09 20:21:11 +08:00
John Wang
2ba0ee989a feat: bump version to 0.3.2 (#330) 2023-06-09 16:25:26 +08:00
KVOJJJin
b055470147 Fix: xls not supported (#329) 2023-06-09 16:11:27 +08:00
Columbus
5943385d42 Fix: the bug that allows regular users to add unregistered users to the workspace. (#328) 2023-06-09 16:07:53 +08:00
lisaifei@cvte.com
0abd67288b feat: support xlsx file parsing (#304)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2023-06-09 15:57:19 +08:00
Joel
bbe58327c8 feat: remove ph (#327) 2023-06-09 14:39:37 +08:00
Joel
299c51ebc4 feat: npm sdk to 2.0 to fix steaming problem (#326) 2023-06-09 14:36:48 +08:00
crazywoola
3a7f58d2a6 Feature/fix streaming mode (#324) 2023-06-09 14:24:59 +08:00
John Wang
6123bba96d feat: add reset-encrypt-key-pair cmd for self hosted mode (#325) 2023-06-09 11:36:38 +08:00
Joel
d5ab3b5072 fix: output code too long break ui (#320) 2023-06-08 16:27:37 +08:00
crazywoola
df26f82536 Feature/support xlsx (#311) 2023-06-08 15:23:38 +08:00
Joel
dbe0c43515 Chore: support gradient border and text (#317) 2023-06-08 09:38:11 +08:00
张今灵
f4052fdbc7 fix: analysis all time param (#316) 2023-06-07 22:18:21 +08:00
Joel
b5ade19c75 feat: fix frontend docker image build fail (#314) 2023-06-07 16:47:49 +08:00
Joel
040eacb8bd fix: safari 14 not show modal (#310) 2023-06-07 09:59:33 +08:00
杨睿
20899c44ff fix: segment search by keyword (#303) 2023-06-07 00:45:25 +08:00
Jyong
35a2beb195 delete segment not commit (#309) 2023-06-06 23:16:51 +08:00
crazywoola
2056093855 update docker compose cmd (#308) 2023-06-06 20:26:45 +08:00
Jyong
2bf48514bc fix markdown parser (#230) 2023-06-06 19:51:40 +08:00
crazywoola
c109b1a920 fix: stale.yml 2023-06-06 15:27:04 +08:00
crazywoola
45499328b8 fix: actions 2023-06-06 15:22:20 +08:00
crazywoola
4c61aa399d Create stale.yml 2023-06-06 15:19:27 +08:00
Joel
3e380c082a fix: reset some config not work: like var required status, dataset, feature status (#305) 2023-06-06 14:58:56 +08:00
zxhlyh
53db5bab36 Feat/add GitHub star icon (#302) 2023-06-06 11:22:00 +08:00
Joel
6483beb096 Feat/auto rule generate (#300) 2023-06-06 10:52:02 +08:00
zxhlyh
e61c84ca72 fix: header nav load more app (#296) 2023-06-06 10:42:32 +08:00
Joel
d70086b841 feat: sentry to dify account (#299) 2023-06-06 10:29:38 +08:00
John Wang
a3ee037d6d feat: optimize output parse failed error (#298) 2023-06-05 11:23:51 +08:00
Joel
2de18a6490 fix: ignore VSCode setting.json path (#297) 2023-06-05 10:54:09 +08:00
Joel
4134e915ce fix: tooltip covered by high z index element (#295) 2023-06-05 10:49:06 +08:00
Joel
a838ba7b46 Chore/ignore vscode setting (#293) 2023-06-05 10:15:16 +08:00
Joel
5f38214a41 chore: mute handle message cut off (#291) 2023-06-05 09:55:03 +08:00
John Wang
19b5cb1e10 feat: fix json end with `` (#285) 2023-06-02 17:34:24 +08:00
John Wang
2478c88e07 feat: increase dataset description length to 400 (#283) 2023-06-02 14:03:18 +08:00
KVOJJJin
59e59c19b2 fix: missing imports (#281) 2023-06-01 23:40:34 +08:00
KVOJJJin
c67f626b66 Feat: Support re-segmentation (#114)
Co-authored-by: John Wang <takatost@gmail.com>
Co-authored-by: Jyong <718720800@qq.com>
Co-authored-by: 金伟强 <iamjoel007@gmail.com>
2023-06-01 23:19:36 +08:00
crazywoola
f65a3ad1cc Feature/replace default icon in overview (#279) 2023-06-01 13:06:56 +08:00
John Wang
490858a4d5 feat: auto rule generator (#273) 2023-05-31 22:03:15 +08:00
John Wang
44a1aa5e44 fix: dataset_tool npe (#274) 2023-05-31 17:16:27 +08:00
Joel
a616bf3129 Fix/long more suggestion not see all (#272) 2023-05-31 17:09:55 +08:00
Joel
f2f19484b8 fix: text generation too long hide the operation btn (#271) 2023-05-31 16:24:30 +08:00
Joel
f572b55237 chore: link prefetch deprecated. Remove warning message. (#270) 2023-05-31 14:56:14 +08:00
Joel
554570dc22 feat: feature support UI preview (#269) 2023-05-31 14:10:59 +08:00
Joel
5239b2c7ab Feat/dashboard more chart (#266) 2023-05-31 11:21:30 +08:00
John Wang
ae94b067b3 feat: new stats (#265) 2023-05-31 11:20:24 +08:00
Joel
5e772bd10b fix: stop response btn hide messages (#261) 2023-05-30 16:15:08 +08:00
Joel
91bcbd0b26 fix: svg attr in ts file (#260) 2023-05-30 15:26:26 +08:00
Joel
54bb309d87 fix: remove sentry for community edtion and dev (#259) 2023-05-30 15:09:25 +08:00
John Wang
75f7a96025 feat: ignore validate failed error log (#256) 2023-05-30 12:25:42 +08:00
John Wang
ccd80653ff fix: query empty not allow (#255) 2023-05-30 12:24:51 +08:00
John Wang
5ca88a4fd9 fix: raw json parse in llm router chain (#254) 2023-05-30 12:16:45 +08:00
261 changed files with 10034 additions and 3025 deletions

29
.github/workflows/stale.yml vendored Normal file
View File

@@ -0,0 +1,29 @@
# This workflow warns and then closes issues and PRs that have had no activity for a specified amount of time.
#
# You can adjust the behavior by modifying this file.
# For more information, see:
# https://github.com/actions/stale
name: Mark stale issues and pull requests
on:
schedule:
- cron: '0 3 * * *'
jobs:
stale:
runs-on: ubuntu-latest
permissions:
issues: write
pull-requests: write
steps:
- uses: actions/stale@v5
with:
days-before-issue-stale: 30
days-before-issue-close: 3
repo-token: ${{ secrets.GITHUB_TOKEN }}
stale-issue-message: "Close due to it's no longer active, if you have any questions, you can reopen it."
stale-pr-message: "Close due to it's no longer active, if you have any questions, you can reopen it."
stale-issue-label: 'no-issue-activity'
stale-pr-label: 'no-pr-activity'

1
.gitignore vendored
View File

@@ -130,6 +130,7 @@ dmypy.json
.idea/'
.DS_Store
web/.vscode/settings.json
# Intellij IDEA Files
.idea/

View File

@@ -54,3 +54,8 @@ Did you have an issue, like a merge conflict, or don't know how to open a pull r
## Community channels
Stuck somewhere? Have any questions? Join the [Discord Community Server](https://discord.gg/AhzKf7dNgk). We are here to help!
### i18n (Internationalization) Support
We are looking for contributors to help with translations in other languages. If you are interested in helping, please join the [Discord Community Server](https://discord.gg/AhzKf7dNgk) and let us know.
Also check out the [Frontend i18n README]((web/i18n/README_EN.md)) for more information.

View File

@@ -51,3 +51,7 @@ git clone git@github.com:<github_username>/dify.git
## 社区渠道
遇到困难了吗?有任何问题吗? 加入 [Discord Community Server](https://discord.gg/AhzKf7dNgk),我们将为您提供帮助。
### 多语言支持
需要参与贡献翻译内容,请参阅[前端多语言翻译 README](web/i18n/README_CN.md)。

View File

@@ -2,14 +2,12 @@
<p align="center">
<a href="./README.md">English</a> |
<a href="./README_CN.md">简体中文</a> |
<a href="./README_JA.md">日本語</a>
<a href="./README_JA.md">日本語</a> |
<a href="./README_ES.md">Español</a>
</p>
[Website](https://dify.ai) • [Docs](https://docs.dify.ai) • [Twitter](https://twitter.com/dify_ai) • [Discord](https://discord.gg/FngNHpbcY7)
Vote for us on Product Hunt ↓
<a href="https://www.producthunt.com/posts/dify-ai"><img src="https://api.producthunt.com/widgets/embed-image/v1/featured.svg?sanitize=true&post_id=dify-ai&theme=light" alt="Product Hunt Badge" width="250" height="54"></a>
**Dify** is an easy-to-use LLMOps platform designed to empower more people to create sustainable, AI-native applications. With visual orchestration for various application types, Dify offers out-of-the-box, ready-to-use applications that can also serve as Backend-as-a-Service APIs. Unify your development process with one API for plugins and datasets integration, and streamline your operations using a single interface for prompt engineering, visual analytics, and continuous improvement.
Applications created with Dify include:
@@ -42,11 +40,16 @@ The easiest way to start the Dify server is to run our [docker-compose.yml](dock
```bash
cd docker
docker-compose up -d
docker compose up -d
```
After running, you can access the Dify dashboard in your browser at [http://localhost/install](http://localhost/install) and start the initialization installation process.
### Helm Chart
A big thanks to @BorisPolonsky for providing us with a [Helm Chart](https://helm.sh/) version, which allows Dify to be deployed on Kubernetes.
You can go to https://github.com/BorisPolonsky/dify-helm for deployment information.
### Configuration
If you need to customize the configuration, please refer to the comments in our [docker-compose.yml](docker/docker-compose.yaml) file and manually set the environment configuration. After making the changes, please run 'docker-compose up -d' again.
@@ -85,6 +88,32 @@ A: English and Chinese are currently supported, and you can contribute language
[![Star History Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)
## Contributing
We welcome you to contribute to Dify to help make Dify better. We welcome contributions in various ways, submitting code, issues, new ideas, or sharing the interesting and useful AI applications you have created based on Dify. At the same time, we also welcome you to share Dify at different events, conferences, and social media.
### Submit a Pull Request
To ensure proper review, all code contributions, including from contributors with direct commit access, must be submitted as PR requests and approved by core developers before merging branches.
We welcome PRs from everyone! If you're willing to help out, you can learn more about how to contribute code to the project in the [Contribution Guide](CONTRIBUTING.md).
### Submit issues or ideas
You can submit your issues or ideas by adding issues to the Dify repository. If you encounter issues, please describe the steps you took to encounter the issue as much as possible so we can better discover it. If you have any new ideas for our product, we also welcome your feedback. Please share your insights as much as possible so we can get more feedback and further discussion in the community.
### Share your applications
We encourage all community members to share their AI applications built on Dify, which can be applied to different scenarios or different users. This will provide powerful inspiration for people who want to create AI capabilities! You can share your experience by [submitting an issue in the Dify-user-case repository](https://github.com/langgenius/dify-user-case/issues).
### Share Dify with others
We encourage community contributors to actively demonstrate different aspects of using Dify. You can talk or share any feature of using Dify at meetups and conferences, blogs or social media. We believe your unique sharing will be of great help to others! Mention @Dify.AI on Twitter and/or communicate on [Discord](https://discord.gg/FngNHpbcY7) so we can give pointers and tips and help you spread the word by promoting your content on the different Dify communication channels.
### Help others
You can also help people in need of help on Discord, GitHub issues or other social platforms, guide others to solve problems encountered during use and share usage experiences. This is also a great contribution! If you want to become a maintainer of the Dify community, please contact the official team via [Discord](https://discord.gg/FngNHpbcY7) or email us at support@dify.ai.
## Contact Us
If you have any questions, suggestions, or partnership inquiries, feel free to contact us through the following channels:
@@ -95,12 +124,6 @@ If you have any questions, suggestions, or partnership inquiries, feel free to c
We're eager to assist you and together create more fun and useful AI applications!
## Contributing
To ensure proper review, all code contributions - including those from contributors with direct commit access - must be submitted via pull requests and approved by the core development team prior to being merged.
We welcome all pull requests! If you'd like to help, check out the [Contribution Guide](CONTRIBUTING.md) for more information on how to get started.
## Security
To protect your privacy, please avoid posting security issues on GitHub. Instead, send your questions to security@dify.ai and we will provide you with a more detailed answer.

View File

@@ -2,15 +2,13 @@
<p align="center">
<a href="./README.md">English</a> |
<a href="./README_CN.md">简体中文</a> |
<a href="./README_JA.md">日本語</a>
<a href="./README_JA.md">日本語</a> |
<a href="./README_ES.md">Español</a>
</p>
[官方网站](https://dify.ai) • [文档](https://docs.dify.ai/v/zh-hans) • [Twitter](https://twitter.com/dify_ai) • [Discord](https://discord.gg/FngNHpbcY7)
在 Product Hunt 上投我们一票吧 ↓
<a href="https://www.producthunt.com/posts/dify-ai"><img src="https://api.producthunt.com/widgets/embed-image/v1/featured.svg?sanitize=true&post_id=dify-ai&theme=light" alt="Product Hunt Badge" width="250" height="54"></a>
**Dify** 是一个易用的 LLMOps 平台,旨在让更多人可以创建可持续运营的原生 AI 应用。Dify 提供多种类型应用的可视化编排,应用可开箱即用,也能以“后端即服务”的 API 提供服务。
通过 Dify 创建的应用包含了:
@@ -44,11 +42,16 @@ Dify 兼容 Langchain这意味着我们将逐步支持多种 LLMs ,目前
```bash
cd docker
docker-compose up -d
docker compose up -d
```
运行后,可以在浏览器上访问 [http://localhost/install](http://localhost/install) 进入 Dify 控制台并开始初始化安装操作。
### Helm Chart
非常感谢 @BorisPolonsky 为我们提供了一个 [Helm Chart](https://helm.sh/) 版本,可以在 Kubernetes 上部署 Dify。
您可以前往 https://github.com/BorisPolonsky/dify-helm 来获取部署信息。
### 配置
需要自定义配置,请参考我们的 [docker-compose.yml](docker/docker-compose.yaml) 文件中的注释,并手动设置环境配置,修改完毕后,请再次执行 `docker-compose up -d`
@@ -86,6 +89,29 @@ A: 现已支持英文与中文,你可以为我们贡献语言包。
[![Star History Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)
## 贡献
我们欢迎你为 Dify 作出贡献帮助 Dify 变得更好。我们欢迎各种方式的贡献,提交代码、问题、新想法、或者分享你基于 Dify 创建出的各种有趣有用的 AI 应用。同时,我们也欢迎你在不同的活动、研讨会、社交媒体上分享 Dify。
### 贡献代码
为了确保正确审查,所有代码贡献 - 包括来自具有直接提交更改权限的贡献者 - 都必须提交 PR 请求并在合并分支之前得到核心开发人员的批准。
我们欢迎所有人提交 PR如果您愿意提供帮助可以在 [贡献指南](CONTRIBUTING_CN.md) 中了解有关如何为项目做出代码贡献的更多信息。
### 提交问题或想法
你可以通过 Dify 代码仓库新增 issues 来提交你的问题或想法。如遇到问题,请尽可能描述你遇到问题的操作步骤,以便我们更好地发现它。如果你对我们的产品有任何新想法,也欢迎向我们反馈,请尽可能多地分享你的见解,以便我们在社区中获得更多反馈和进一步讨论。
### 分享你的应用
我们鼓励所有社区成员分享他们基于 Dify 创造出的 AI 应用,它们可以是应用于不同情景或不同用户,这将有助于为希望基于 AI 能力创造的人们提供强大灵感!你可以通过 [Dify-user-case 仓库项目提交 issue](https://github.com/langgenius/dify-user-case) 来分享你的应用案例。
### 向别人分享 Dify
我们鼓励社区贡献者们积极展示你使用 Dify 的不同角度。你可以通过线下研讨会、博客或社交媒体上谈论或分享你使用 Dify 的任意功能,相信你独特的使用分享会给别人带来非常大的帮助!如果你需要任何指导帮助,欢迎联系我们 support@dify.ai ,你也可以在 twitter @Dify.AI 或在 [Discord 社区](https://discord.gg/FngNHpbcY7)交流来帮助你传播信息。
### 帮助别人
你还可以在 Discord、GitHub issues或其他社交平台上帮助需要帮助的人指导别人解决使用过程中遇到的问题和分享使用经验。这也是个非常了不起的贡献如果你希望成为 Dify 社区的维护者,请通过[Discord 社区](https://discord.gg/FngNHpbcY7) 联系官方团队或邮件联系我们 support@dify.ai.
## 联系我们
如果您有任何问题、建议或合作意向,欢迎通过以下方式联系我们:
@@ -94,12 +120,6 @@ A: 现已支持英文与中文,你可以为我们贡献语言包。
- 在我们的 [Discord 社区](https://discord.gg/FngNHpbcY7) 上加入讨论
- 发送邮件至 hello@dify.ai
## 贡献代码
为了确保正确审查,所有代码贡献 - 包括来自具有直接提交更改权限的贡献者 - 都必须提交 PR 请求并在合并分支之前得到核心开发人员的批准。
我们欢迎所有人提交 PR如果您愿意提供帮助可以在 [贡献指南](CONTRIBUTING_CN.md) 中了解有关如何为项目做出贡献的更多信息。
## 安全
为了保护您的隐私,请避免在 GitHub 上发布安全问题。发送问题至 security@dify.ai我们将为您做更细致的解答。

124
README_ES.md Normal file
View File

@@ -0,0 +1,124 @@
![](./images/describe-en.png)
<p align="center">
<a href="./README.md">English</a> |
<a href="./README_CN.md">简体中文</a> |
<a href="./README_JA.md">日本語</a> |
<a href="./README_ES.md">Español</a>
</p>
[Sitio web](https://dify.ai) • [Documentación](https://docs.dify.ai) • [Twitter](https://twitter.com/dify_ai) • [Discord](https://discord.gg/FngNHpbcY7)
**Dify** es una plataforma LLMOps fácil de usar diseñada para capacitar a más personas para que creen aplicaciones sostenibles basadas en IA. Con orquestación visual para varios tipos de aplicaciones, Dify ofrece aplicaciones listas para usar que también pueden funcionar como APIs de Backend-as-a-Service. Unifica tu proceso de desarrollo con una API para la integración de complementos y conjuntos de datos, y agiliza tus operaciones utilizando una interfaz única para la ingeniería de indicaciones, análisis visual y mejora continua.
Las aplicaciones creadas con Dify incluyen:
- Sitios web listos para usar que admiten el modo de formulario y el modo de conversación por chat.
- Una API única que abarca capacidades de complementos, mejora de contexto y más, lo que te ahorra esfuerzo de programación en el backend.
- Análisis visual de datos, revisión de registros y anotación para aplicaciones.
Dify es compatible con Langchain, lo que significa que gradualmente admitiremos múltiples LLMs, actualmente compatibles con:
- GPT 3 (text-davinci-003)
- GPT 3.5 Turbo (ChatGPT)
- GPT-4
## Usar servicios en la nube
Visita [Dify.ai](https://dify.ai)
## Instalar la Edición Comunitaria
### Requisitos del sistema
Antes de instalar Dify, asegúrate de que tu máquina cumple con los siguientes requisitos mínimos del sistema:
- CPU >= 1 Core
- RAM >= 4GB
### Inicio rápido
La forma más sencilla de iniciar el servidor de Dify es ejecutar nuestro archivo [docker-compose.yml](docker/docker-compose.yaml). Antes de ejecutar el comando de instalación, asegúrate de que [Docker](https://docs.docker.com/get-docker/) y [Docker Compose](https://docs.docker.com/compose/install/) estén instalados en tu máquina:
```bash
cd docker
docker compose up -d
```
Después de ejecutarlo, puedes acceder al panel de control de Dify en tu navegador desde [http://localhost/install](http://localhost/install) y comenzar el proceso de instalación de inicialización.
### Helm Chart
Un gran agradecimiento a @BorisPolonsky por proporcionarnos una versión de [Helm Chart](https://helm.sh/), que permite desplegar Dify en Kubernetes.
Puede ir a https://github.com/BorisPolonsky/dify-helm para obtener información de despliegue.
### Configuración
Si necesitas personalizar la configuración, consulta los comentarios en nuestro archivo [docker-compose.yml](docker/docker-compose.yaml) y configura manualmente la configuración del entorno. Después de realizar los cambios, ejecuta nuevamente 'docker-compose up -d'.
## Hoja de ruta
Funciones en desarrollo:
- **Conjuntos de datos**, admitiendo más conjuntos de datos, por ejemplo, sincronización de contenido desde Notion o páginas web.
Admitiremos más conjuntos de datos, incluidos texto, páginas web e incluso contenido de Notion. Los usuarios pueden construir aplicaciones de IA basadas en sus propias fuentes de datos
- **Complementos**, introduciendo complementos estándar de ChatGPT para aplicaciones, o utilizando complementos producidos por Dify.
Lanzaremos complementos que cumplan con el estándar de ChatGPT, o nuestros propios complementos de Dify para habilitar más capacidades en las aplicaciones.
- **Modelos de código abierto**, por ejemplo, adoptar Llama como proveedor de modelos o para un ajuste adicional.
Trabajaremos con excelentes modelos de código abierto como Llama, proporcionándolos como opciones de modelos en nuestra plataforma o utilizándolos para un ajuste adicional.
## Preguntas y respuestas
**P: ¿Qué puedo hacer con Dify?**
R: Dify es una herramienta de desarrollo y operaciones de LLM, simple pero poderosa. Puedes usarla para construir aplicaciones de calidad comercial y asistentes personales. Si deseas desarrollar tus propias aplicaciones, LangDifyGenius puede ahorrarte trabajo en el backend al integrar con OpenAI y ofrecer capacidades de operaciones visuales, lo que te permite mejorar y entrenar continuamente tu modelo GPT.
**P: ¿Cómo uso Dify para "entrenar" mi propio modelo?**
R: Una aplicación valiosa consta de Ingeniería de indicaciones, mejora de contexto y ajuste fino. Hemos creado un enfoque de programación híbrida que combina las indicaciones con lenguajes de programación (similar a un motor de plantillas), lo que facilita la incorporación de texto largo o la captura de subtítulos de un video de YouTube ingresado por el usuario, todo lo cual se enviará como contexto para que los LLM lo procesen. Damos gran importancia a la operabilidad de la aplicación, con los datos generados por los usuarios durante el uso de la aplicación disponibles para análisis, anotación y entrenamiento continuo. Sin las herramientas adecuadas, estos pasos pueden llevar mucho tiempo.
**P: ¿Qué necesito preparar si quiero crear mi propia aplicación?**
R: Suponemos que ya tienes una clave de API de OpenAI; si no la tienes, por favor regístrate. ¡Si ya tienes contenido que pueda servir como contexto de entrenamiento, eso es genial!
**P: ¿Qué idiomas de interfaz están disponibles?**
R: Actualmente se admiten inglés y chino, y puedes contribuir con paquetes de idiomas.
## Historial de estrellas
[![Gráfico de historial de estrellas](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)
## Contáctanos
Si tienes alguna pregunta, sugerencia o consulta sobre asociación, no dudes en contactarnos a través de los siguientes canales:
- Presentar un problema o una solicitud de extracción en nuestro repositorio de GitHub.
- Únete a la discusión en nuestra comunidad de [Discord](https://discord.gg/FngNHpbcY7).
- Envía un correo electrónico a hello@dify.ai.
¡Estamos ansiosos por ayudarte y crear juntos aplicaciones de IA más divertidas y útiles!
## Contribuciones
Para garantizar una revisión adecuada, todas las contribuciones de código, incluidas las de los colaboradores con acceso directo a los compromisos, deben enviarse mediante solicitudes de extracción y ser aprobadas por el equipo principal de
desarrollo antes de fusionarse.
¡Agradecemos todas las solicitudes de extracción! Si deseas ayudar, consulta la [Guía de Contribución](CONTRIBUTING.md) para obtener más información sobre cómo comenzar.
## Seguridad
Para proteger tu privacidad, evita publicar problemas de seguridad en GitHub. En su lugar, envía tus preguntas a security@dify.ai y te proporcionaremos una respuesta más detallada.
## Citación
Este software utiliza el siguiente software de código abierto:
- Chase, H. (2022). LangChain [Software de computadora]. https://github.com/hwchase17/langchain
- Liu, J. (2022). LlamaIndex [Software de computadora]. doi: 10.5281/zenodo.1234.
Para obtener más información, consulta el sitio web oficial o el texto de la licencia del software correspondiente.
## Licencia
Este repositorio está disponible bajo la [Licencia de código abierto de Dify](LICENSE).

View File

@@ -2,14 +2,12 @@
<p align="center">
<a href="./README.md">English</a> |
<a href="./README_CN.md">简体中文</a> |
<a href="./README_JA.md">日本語</a>
<a href="./README_JA.md">日本語</a> |
<a href="./README_ES.md">Español</a>
</p>
[Web サイト](https://dify.ai) • [ドキュメント](https://docs.dify.ai) • [Twitter](https://twitter.com/dify_ai) • [Discord](https://discord.gg/FngNHpbcY7)
Product Huntで私たちに投票してください ↓
<a href="https://www.producthunt.com/posts/dify-ai"><img src="https://api.producthunt.com/widgets/embed-image/v1/featured.svg?sanitize=true&post_id=dify-ai&theme=light" alt="Product Hunt Badge" width="250" height="54"></a>
**Dify** は、より多くの人々が持続可能な AI ネイティブアプリケーションを作成できるように設計された、使いやすい LLMOps プラットフォームです。様々なアプリケーションタイプに対応したビジュアルオーケストレーションにより Dify は Backend-as-a-Service API としても機能する、すぐに使えるアプリケーションを提供します。プラグインやデータセットを統合するための1つの API で開発プロセスを統一し、プロンプトエンジニアリング、ビジュアル分析、継続的な改善のための1つのインターフェイスを使って業務を合理化します。
@@ -43,11 +41,16 @@ Dify サーバーを起動する最も簡単な方法は、[docker-compose.yml](
```bash
cd docker
docker-compose up -d
docker compose up -d
```
実行後、ブラウザで [http://localhost/install](http://localhost/install) にアクセスし、初期化インストール作業を開始することができます。
### Helm Chart
@BorisPolonsky に大感謝します。彼は Dify を Kubernetes 上にデプロイするための [Helm Chart](https://helm.sh/) バージョンを提供してくれました。
デプロイ情報については、https://github.com/BorisPolonsky/dify-helm をご覧ください。
### 構成
カスタマイズが必要な場合は、[docker-compose.yml](docker/docker-compose.yaml) ファイルのコメントを参照し、手動で環境設定をお願いします。変更後、再度 'docker-compose up -d' を実行してください。

View File

@@ -22,6 +22,7 @@ CELERY_BROKER_URL=redis://:difyai123456@localhost:6379/1
# redis configuration
REDIS_HOST=localhost
REDIS_PORT=6379
REDIS_USERNAME: ''
REDIS_PASSWORD=difyai123456
REDIS_DB=0
@@ -72,6 +73,7 @@ VECTOR_STORE=weaviate
WEAVIATE_ENDPOINT=http://localhost:8080
WEAVIATE_API_KEY=WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih
WEAVIATE_GRPC_ENABLED=false
WEAVIATE_BATCH_SIZE=100
# Qdrant configuration, use `path:` prefix for local mode or `https://your-qdrant-cluster-url.qdrant.io` for remote mode
QDRANT_URL=path:storage/qdrant
@@ -83,3 +85,9 @@ SENTRY_DSN=
# DEBUG
DEBUG=false
SQLALCHEMY_ECHO=false
# Notion import configuration, support public and internal
NOTION_INTEGRATION_TYPE=public
NOTION_CLIENT_SECRET=you-client-secret
NOTION_CLIENT_ID=you-client-id
NOTION_INTERNAL_SECRET=you-internal-secret

View File

@@ -17,6 +17,11 @@
```bash
openssl rand -base64 42
```
3.5 If you use annaconda, create a new environment and activate it
```bash
conda create --name dify python=3.10
conda activate dify
```
4. Install dependencies
```bash
pip install -r requirements.txt

View File

@@ -1,5 +1,7 @@
# -*- coding:utf-8 -*-
import os
from datetime import datetime
if not os.environ.get("DEBUG") or os.environ.get("DEBUG").lower() != 'true':
from gevent import monkey
monkey.patch_all()
@@ -18,7 +20,7 @@ from extensions.ext_database import db
from extensions.ext_login import login_manager
# DO NOT REMOVE BELOW
from models import model, account, dataset, web, task
from models import model, account, dataset, web, task, source
from events import event_handlers
# DO NOT REMOVE ABOVE
@@ -122,6 +124,9 @@ def load_user(user_id):
account.current_tenant_id = tenant_account_join.tenant_id
session['workspace_id'] = account.current_tenant_id
account.last_active_at = datetime.utcnow()
db.session.commit()
# Log in the user with the updated user_id
flask_login.login_user(account, remember=True)

View File

@@ -3,15 +3,19 @@ import random
import string
import click
from flask import current_app
from libs.password import password_pattern, valid_password, hash_password
from libs.helper import email as email_validate
from extensions.ext_database import db
from models.account import InvitationCode
from libs.rsa import generate_key_pair
from models.account import InvitationCode, Tenant
from models.model import Account
import secrets
import base64
from models.provider import Provider
@click.command('reset-password', help='Reset the account password.')
@click.option('--email', prompt=True, help='The email address of the account whose password you need to reset')
@@ -73,6 +77,31 @@ def reset_email(email, new_email, email_confirm):
click.echo(click.style('Congratulations!, email has been reset.', fg='green'))
@click.command('reset-encrypt-key-pair', help='Reset the asymmetric key pair of workspace for encrypt LLM credentials. '
'After the reset, all LLM credentials will become invalid, '
'requiring re-entry.'
'Only support SELF_HOSTED mode.')
@click.confirmation_option(prompt=click.style('Are you sure you want to reset encrypt key pair?'
' this operation cannot be rolled back!', fg='red'))
def reset_encrypt_key_pair():
if current_app.config['EDITION'] != 'SELF_HOSTED':
click.echo(click.style('Sorry, only support SELF_HOSTED mode.', fg='red'))
return
tenant = db.session.query(Tenant).first()
if not tenant:
click.echo(click.style('Sorry, no workspace found. Please enter /install to initialize.', fg='red'))
return
tenant.encrypt_public_key = generate_key_pair(tenant.id)
db.session.query(Provider).filter(Provider.provider_type == 'custom').delete()
db.session.commit()
click.echo(click.style('Congratulations! '
'the asymmetric key pair of workspace {} has been reset.'.format(tenant.id), fg='green'))
@click.command('generate-invitation-codes', help='Generate invitation codes.')
@click.option('--batch', help='The batch of invitation codes.')
@click.option('--count', prompt=True, help='Invitation codes count.')
@@ -134,3 +163,4 @@ def register_commands(app):
app.cli.add_command(reset_password)
app.cli.add_command(reset_email)
app.cli.add_command(generate_invitation_codes)
app.cli.add_command(reset_encrypt_key_pair)

View File

@@ -43,6 +43,7 @@ DEFAULTS = {
'SENTRY_TRACES_SAMPLE_RATE': 1.0,
'SENTRY_PROFILES_SAMPLE_RATE': 1.0,
'WEAVIATE_GRPC_ENABLED': 'True',
'WEAVIATE_BATCH_SIZE': 100,
'CELERY_BACKEND': 'database',
'PDF_PREVIEW': 'True',
'LOG_LEVEL': 'INFO',
@@ -78,7 +79,7 @@ class Config:
self.CONSOLE_URL = get_env('CONSOLE_URL')
self.API_URL = get_env('API_URL')
self.APP_URL = get_env('APP_URL')
self.CURRENT_VERSION = "0.3.1"
self.CURRENT_VERSION = "0.3.4"
self.COMMIT_SHA = get_env('COMMIT_SHA')
self.EDITION = "SELF_HOSTED"
self.DEPLOY_ENV = get_env('DEPLOY_ENV')
@@ -138,6 +139,7 @@ class Config:
self.WEAVIATE_ENDPOINT = get_env('WEAVIATE_ENDPOINT')
self.WEAVIATE_API_KEY = get_env('WEAVIATE_API_KEY')
self.WEAVIATE_GRPC_ENABLED = get_bool_env('WEAVIATE_GRPC_ENABLED')
self.WEAVIATE_BATCH_SIZE = int(get_env('WEAVIATE_BATCH_SIZE'))
# qdrant settings
self.QDRANT_URL = get_env('QDRANT_URL')
@@ -185,6 +187,12 @@ class Config:
# For temp use only
# set default LLM provider, default is 'openai', support `azure_openai`
self.DEFAULT_LLM_PROVIDER = get_env('DEFAULT_LLM_PROVIDER')
# notion import setting
self.NOTION_CLIENT_ID = get_env('NOTION_CLIENT_ID')
self.NOTION_CLIENT_SECRET = get_env('NOTION_CLIENT_SECRET')
self.NOTION_INTEGRATION_TYPE = get_env('NOTION_INTEGRATION_TYPE')
self.NOTION_INTERNAL_SECRET = get_env('NOTION_INTERNAL_SECRET')
class CloudEditionConfig(Config):

View File

@@ -9,13 +9,13 @@ api = ExternalApi(bp)
from . import setup, version, apikey, admin
# Import app controllers
from .app import app, site, completion, model_config, statistic, conversation, message
from .app import app, site, completion, model_config, statistic, conversation, message, generator
# Import auth controllers
from .auth import login, oauth
from .auth import login, oauth, data_source_oauth
# Import datasets controllers
from .datasets import datasets, datasets_document, datasets_segments, file, hit_testing
from .datasets import datasets, datasets_document, datasets_segments, file, hit_testing, data_source
# Import workspace controllers
from .workspace import workspace, members, providers, account

View File

@@ -8,6 +8,7 @@ from werkzeug.exceptions import NotFound, Unauthorized
from controllers.console import api
from controllers.console.wraps import only_edition_cloud
from extensions.ext_database import db
from libs.helper import supported_language
from models.model import RecommendedApp, App, InstalledApp
@@ -47,8 +48,7 @@ class InsertExploreAppListApi(Resource):
parser.add_argument('desc', type=str, location='json')
parser.add_argument('copyright', type=str, location='json')
parser.add_argument('privacy_policy', type=str, location='json')
parser.add_argument('language', type=str, required=True, nullable=False, choices=['en-US', 'zh-Hans'],
location='json')
parser.add_argument('language', type=supported_language, required=True, nullable=False, location='json')
parser.add_argument('category', type=str, required=True, nullable=False, location='json')
parser.add_argument('position', type=int, required=True, nullable=False, location='json')
args = parser.parse_args()

View File

@@ -9,18 +9,13 @@ from werkzeug.exceptions import Unauthorized, Forbidden
from constants.model_template import model_templates, demo_model_templates
from controllers.console import api
from controllers.console.app.error import AppNotFoundError, ProviderNotInitializeError, ProviderQuotaExceededError, \
CompletionRequestError, ProviderModelCurrentlyNotSupportError
from controllers.console.app.error import AppNotFoundError
from controllers.console.setup import setup_required
from controllers.console.wraps import account_initialization_required
from core.generator.llm_generator import LLMGenerator
from core.llm.error import ProviderTokenNotInitError, QuotaExceededError, LLMBadRequestError, LLMAPIConnectionError, \
LLMAPIUnavailableError, LLMRateLimitError, LLMAuthorizationError, ModelCurrentlyNotSupportError
from events.app_event import app_was_created, app_was_deleted
from libs.helper import TimestampField
from extensions.ext_database import db
from models.model import App, AppModelConfig, Site, InstalledApp
from services.account_service import TenantService
from models.model import App, AppModelConfig, Site
from services.app_model_config_service import AppModelConfigService
model_config_fields = {
@@ -220,7 +215,11 @@ class AppTemplateApi(Resource):
account = current_user
interface_language = account.interface_language
return {'data': demo_model_templates.get(interface_language)}
templates = demo_model_templates.get(interface_language)
if not templates:
templates = demo_model_templates.get('en-US')
return {'data': templates}
class AppApi(Resource):
@@ -478,35 +477,6 @@ class AppExport(Resource):
pass
class IntroductionGenerateApi(Resource):
@setup_required
@login_required
@account_initialization_required
def post(self):
parser = reqparse.RequestParser()
parser.add_argument('prompt_template', type=str, required=True, location='json')
args = parser.parse_args()
account = current_user
try:
answer = LLMGenerator.generate_introduction(
account.current_tenant_id,
args['prompt_template']
)
except ProviderTokenNotInitError:
raise ProviderNotInitializeError()
except QuotaExceededError:
raise ProviderQuotaExceededError()
except ModelCurrentlyNotSupportError:
raise ProviderModelCurrentlyNotSupportError()
except (LLMBadRequestError, LLMAPIConnectionError, LLMAPIUnavailableError,
LLMRateLimitError, LLMAuthorizationError) as e:
raise CompletionRequestError(str(e))
return {'introduction': answer}
api.add_resource(AppListApi, '/apps')
api.add_resource(AppTemplateApi, '/app-templates')
api.add_resource(AppApi, '/apps/<uuid:app_id>')
@@ -515,4 +485,3 @@ api.add_resource(AppNameApi, '/apps/<uuid:app_id>/name')
api.add_resource(AppSiteStatus, '/apps/<uuid:app_id>/site-enable')
api.add_resource(AppApiStatus, '/apps/<uuid:app_id>/api-enable')
api.add_resource(AppRateLimit, '/apps/<uuid:app_id>/rate-limit')
api.add_resource(IntroductionGenerateApi, '/introduction-generate')

View File

@@ -0,0 +1,75 @@
from flask_login import login_required, current_user
from flask_restful import Resource, reqparse
from controllers.console import api
from controllers.console.app.error import ProviderNotInitializeError, ProviderQuotaExceededError, \
CompletionRequestError, ProviderModelCurrentlyNotSupportError
from controllers.console.setup import setup_required
from controllers.console.wraps import account_initialization_required
from core.generator.llm_generator import LLMGenerator
from core.llm.error import ProviderTokenNotInitError, QuotaExceededError, LLMBadRequestError, LLMAPIConnectionError, \
LLMAPIUnavailableError, LLMRateLimitError, LLMAuthorizationError, ModelCurrentlyNotSupportError
class IntroductionGenerateApi(Resource):
@setup_required
@login_required
@account_initialization_required
def post(self):
parser = reqparse.RequestParser()
parser.add_argument('prompt_template', type=str, required=True, location='json')
args = parser.parse_args()
account = current_user
try:
answer = LLMGenerator.generate_introduction(
account.current_tenant_id,
args['prompt_template']
)
except ProviderTokenNotInitError:
raise ProviderNotInitializeError()
except QuotaExceededError:
raise ProviderQuotaExceededError()
except ModelCurrentlyNotSupportError:
raise ProviderModelCurrentlyNotSupportError()
except (LLMBadRequestError, LLMAPIConnectionError, LLMAPIUnavailableError,
LLMRateLimitError, LLMAuthorizationError) as e:
raise CompletionRequestError(str(e))
return {'introduction': answer}
class RuleGenerateApi(Resource):
@setup_required
@login_required
@account_initialization_required
def post(self):
parser = reqparse.RequestParser()
parser.add_argument('audiences', type=str, required=True, nullable=False, location='json')
parser.add_argument('hoping_to_solve', type=str, required=True, nullable=False, location='json')
args = parser.parse_args()
account = current_user
try:
rules = LLMGenerator.generate_rule_config(
account.current_tenant_id,
args['audiences'],
args['hoping_to_solve']
)
except ProviderTokenNotInitError:
raise ProviderNotInitializeError()
except QuotaExceededError:
raise ProviderQuotaExceededError()
except ModelCurrentlyNotSupportError:
raise ProviderModelCurrentlyNotSupportError()
except (LLMBadRequestError, LLMAPIConnectionError, LLMAPIUnavailableError,
LLMRateLimitError, LLMAuthorizationError) as e:
raise CompletionRequestError(str(e))
return rules
api.add_resource(IntroductionGenerateApi, '/introduction-generate')
api.add_resource(RuleGenerateApi, '/rule-generate')

View File

@@ -1,4 +1,5 @@
# -*- coding:utf-8 -*-
from decimal import Decimal
from datetime import datetime
import pytz
@@ -59,18 +60,20 @@ class DailyConversationStatistic(Resource):
arg_dict['end'] = end_datetime_utc
sql_query += ' GROUP BY date order by date'
rs = db.session.execute(sql_query, arg_dict)
response_date = []
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql_query), arg_dict)
response_data = []
for i in rs:
response_date.append({
response_data.append({
'date': str(i.date),
'conversation_count': i.conversation_count
})
return jsonify({
'data': response_date
'data': response_data
})
@@ -119,18 +122,20 @@ class DailyTerminalsStatistic(Resource):
arg_dict['end'] = end_datetime_utc
sql_query += ' GROUP BY date order by date'
rs = db.session.execute(sql_query, arg_dict)
response_date = []
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql_query), arg_dict)
response_data = []
for i in rs:
response_date.append({
response_data.append({
'date': str(i.date),
'terminal_count': i.terminal_count
})
return jsonify({
'data': response_date
'data': response_data
})
@@ -180,12 +185,14 @@ class DailyTokenCostStatistic(Resource):
arg_dict['end'] = end_datetime_utc
sql_query += ' GROUP BY date order by date'
rs = db.session.execute(sql_query, arg_dict)
response_date = []
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql_query), arg_dict)
response_data = []
for i in rs:
response_date.append({
response_data.append({
'date': str(i.date),
'token_count': i.token_count,
'total_price': i.total_price,
@@ -193,10 +200,207 @@ class DailyTokenCostStatistic(Resource):
})
return jsonify({
'data': response_date
'data': response_data
})
class AverageSessionInteractionStatistic(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self, app_id):
account = current_user
app_id = str(app_id)
app_model = _get_app(app_id, 'chat')
parser = reqparse.RequestParser()
parser.add_argument('start', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
parser.add_argument('end', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
args = parser.parse_args()
sql_query = """SELECT date(DATE_TRUNC('day', c.created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
AVG(subquery.message_count) AS interactions
FROM (SELECT m.conversation_id, COUNT(m.id) AS message_count
FROM conversations c
JOIN messages m ON c.id = m.conversation_id
WHERE c.override_model_configs IS NULL AND c.app_id = :app_id"""
arg_dict = {'tz': account.timezone, 'app_id': app_model.id}
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
if args['start']:
start_datetime = datetime.strptime(args['start'], '%Y-%m-%d %H:%M')
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
sql_query += ' and c.created_at >= :start'
arg_dict['start'] = start_datetime_utc
if args['end']:
end_datetime = datetime.strptime(args['end'], '%Y-%m-%d %H:%M')
end_datetime = end_datetime.replace(second=0)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
sql_query += ' and c.created_at < :end'
arg_dict['end'] = end_datetime_utc
sql_query += """
GROUP BY m.conversation_id) subquery
LEFT JOIN conversations c on c.id=subquery.conversation_id
GROUP BY date
ORDER BY date"""
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql_query), arg_dict)
response_data = []
for i in rs:
response_data.append({
'date': str(i.date),
'interactions': float(i.interactions.quantize(Decimal('0.01')))
})
return jsonify({
'data': response_data
})
class UserSatisfactionRateStatistic(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self, app_id):
account = current_user
app_id = str(app_id)
app_model = _get_app(app_id)
parser = reqparse.RequestParser()
parser.add_argument('start', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
parser.add_argument('end', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
args = parser.parse_args()
sql_query = '''
SELECT date(DATE_TRUNC('day', m.created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
COUNT(m.id) as message_count, COUNT(mf.id) as feedback_count
FROM messages m
LEFT JOIN message_feedbacks mf on mf.message_id=m.id
WHERE m.app_id = :app_id
'''
arg_dict = {'tz': account.timezone, 'app_id': app_model.id}
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
if args['start']:
start_datetime = datetime.strptime(args['start'], '%Y-%m-%d %H:%M')
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
sql_query += ' and m.created_at >= :start'
arg_dict['start'] = start_datetime_utc
if args['end']:
end_datetime = datetime.strptime(args['end'], '%Y-%m-%d %H:%M')
end_datetime = end_datetime.replace(second=0)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
sql_query += ' and m.created_at < :end'
arg_dict['end'] = end_datetime_utc
sql_query += ' GROUP BY date order by date'
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql_query), arg_dict)
response_data = []
for i in rs:
response_data.append({
'date': str(i.date),
'rate': round((i.feedback_count * 1000 / i.message_count) if i.message_count > 0 else 0, 2),
})
return jsonify({
'data': response_data
})
class AverageResponseTimeStatistic(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self, app_id):
account = current_user
app_id = str(app_id)
app_model = _get_app(app_id, 'completion')
parser = reqparse.RequestParser()
parser.add_argument('start', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
parser.add_argument('end', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
args = parser.parse_args()
sql_query = '''
SELECT date(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
AVG(provider_response_latency) as latency
FROM messages
WHERE app_id = :app_id
'''
arg_dict = {'tz': account.timezone, 'app_id': app_model.id}
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
if args['start']:
start_datetime = datetime.strptime(args['start'], '%Y-%m-%d %H:%M')
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
sql_query += ' and created_at >= :start'
arg_dict['start'] = start_datetime_utc
if args['end']:
end_datetime = datetime.strptime(args['end'], '%Y-%m-%d %H:%M')
end_datetime = end_datetime.replace(second=0)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
sql_query += ' and created_at < :end'
arg_dict['end'] = end_datetime_utc
sql_query += ' GROUP BY date order by date'
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql_query), arg_dict)
response_data = []
for i in rs:
response_data.append({
'date': str(i.date),
'latency': round(i.latency * 1000, 4)
})
return jsonify({
'data': response_data
})
api.add_resource(DailyConversationStatistic, '/apps/<uuid:app_id>/statistics/daily-conversations')
api.add_resource(DailyTerminalsStatistic, '/apps/<uuid:app_id>/statistics/daily-end-users')
api.add_resource(DailyTokenCostStatistic, '/apps/<uuid:app_id>/statistics/token-costs')
api.add_resource(AverageSessionInteractionStatistic, '/apps/<uuid:app_id>/statistics/average-session-interactions')
api.add_resource(UserSatisfactionRateStatistic, '/apps/<uuid:app_id>/statistics/user-satisfaction-rate')
api.add_resource(AverageResponseTimeStatistic, '/apps/<uuid:app_id>/statistics/average-response-time')

View File

@@ -0,0 +1,101 @@
import logging
from datetime import datetime
from typing import Optional
import flask_login
import requests
from flask import request, redirect, current_app, session
from flask_login import current_user, login_required
from flask_restful import Resource
from werkzeug.exceptions import Forbidden
from libs.oauth_data_source import NotionOAuth
from controllers.console import api
from ..setup import setup_required
from ..wraps import account_initialization_required
def get_oauth_providers():
with current_app.app_context():
notion_oauth = NotionOAuth(client_id=current_app.config.get('NOTION_CLIENT_ID'),
client_secret=current_app.config.get(
'NOTION_CLIENT_SECRET'),
redirect_uri=current_app.config.get(
'CONSOLE_URL') + '/console/api/oauth/data-source/callback/notion')
OAUTH_PROVIDERS = {
'notion': notion_oauth
}
return OAUTH_PROVIDERS
class OAuthDataSource(Resource):
def get(self, provider: str):
# The role of the current user in the table must be admin or owner
if current_user.current_tenant.current_role not in ['admin', 'owner']:
raise Forbidden()
OAUTH_DATASOURCE_PROVIDERS = get_oauth_providers()
with current_app.app_context():
oauth_provider = OAUTH_DATASOURCE_PROVIDERS.get(provider)
print(vars(oauth_provider))
if not oauth_provider:
return {'error': 'Invalid provider'}, 400
if current_app.config.get('NOTION_INTEGRATION_TYPE') == 'internal':
internal_secret = current_app.config.get('NOTION_INTERNAL_SECRET')
oauth_provider.save_internal_access_token(internal_secret)
return redirect(f'{current_app.config.get("CONSOLE_URL")}?oauth_data_source=success')
else:
auth_url = oauth_provider.get_authorization_url()
return redirect(auth_url)
class OAuthDataSourceCallback(Resource):
def get(self, provider: str):
OAUTH_DATASOURCE_PROVIDERS = get_oauth_providers()
with current_app.app_context():
oauth_provider = OAUTH_DATASOURCE_PROVIDERS.get(provider)
if not oauth_provider:
return {'error': 'Invalid provider'}, 400
if 'code' in request.args:
code = request.args.get('code')
try:
oauth_provider.get_access_token(code)
except requests.exceptions.HTTPError as e:
logging.exception(
f"An error occurred during the OAuthCallback process with {provider}: {e.response.text}")
return {'error': 'OAuth data source process failed'}, 400
return redirect(f'{current_app.config.get("CONSOLE_URL")}?oauth_data_source=success')
elif 'error' in request.args:
error = request.args.get('error')
return redirect(f'{current_app.config.get("CONSOLE_URL")}?oauth_data_source={error}')
else:
return redirect(f'{current_app.config.get("CONSOLE_URL")}?oauth_data_source=access_denied')
class OAuthDataSourceSync(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self, provider, binding_id):
provider = str(provider)
binding_id = str(binding_id)
OAUTH_DATASOURCE_PROVIDERS = get_oauth_providers()
with current_app.app_context():
oauth_provider = OAUTH_DATASOURCE_PROVIDERS.get(provider)
if not oauth_provider:
return {'error': 'Invalid provider'}, 400
try:
oauth_provider.sync_data_source(binding_id)
except requests.exceptions.HTTPError as e:
logging.exception(
f"An error occurred during the OAuthCallback process with {provider}: {e.response.text}")
return {'error': 'OAuth data source process failed'}, 400
return {'result': 'success'}, 200
api.add_resource(OAuthDataSource, '/oauth/data-source/<string:provider>')
api.add_resource(OAuthDataSourceCallback, '/oauth/data-source/callback/<string:provider>')
api.add_resource(OAuthDataSourceSync, '/oauth/data-source/<string:provider>/<uuid:binding_id>/sync')

View File

@@ -0,0 +1,303 @@
import datetime
import json
from cachetools import TTLCache
from flask import request, current_app
from flask_login import login_required, current_user
from flask_restful import Resource, marshal_with, fields, reqparse, marshal
from werkzeug.exceptions import NotFound
from controllers.console import api
from controllers.console.setup import setup_required
from controllers.console.wraps import account_initialization_required
from core.data_source.notion import NotionPageReader
from core.indexing_runner import IndexingRunner
from extensions.ext_database import db
from libs.helper import TimestampField
from libs.oauth_data_source import NotionOAuth
from models.dataset import Document
from models.source import DataSourceBinding
from services.dataset_service import DatasetService, DocumentService
from tasks.document_indexing_sync_task import document_indexing_sync_task
cache = TTLCache(maxsize=None, ttl=30)
FILE_SIZE_LIMIT = 15 * 1024 * 1024 # 15MB
ALLOWED_EXTENSIONS = ['txt', 'markdown', 'md', 'pdf', 'html', 'htm']
PREVIEW_WORDS_LIMIT = 3000
class DataSourceApi(Resource):
integrate_icon_fields = {
'type': fields.String,
'url': fields.String,
'emoji': fields.String
}
integrate_page_fields = {
'page_name': fields.String,
'page_id': fields.String,
'page_icon': fields.Nested(integrate_icon_fields, allow_null=True),
'parent_id': fields.String,
'type': fields.String
}
integrate_workspace_fields = {
'workspace_name': fields.String,
'workspace_id': fields.String,
'workspace_icon': fields.String,
'pages': fields.List(fields.Nested(integrate_page_fields)),
'total': fields.Integer
}
integrate_fields = {
'id': fields.String,
'provider': fields.String,
'created_at': TimestampField,
'is_bound': fields.Boolean,
'disabled': fields.Boolean,
'link': fields.String,
'source_info': fields.Nested(integrate_workspace_fields)
}
integrate_list_fields = {
'data': fields.List(fields.Nested(integrate_fields)),
}
@setup_required
@login_required
@account_initialization_required
@marshal_with(integrate_list_fields)
def get(self):
# get workspace data source integrates
data_source_integrates = db.session.query(DataSourceBinding).filter(
DataSourceBinding.tenant_id == current_user.current_tenant_id,
DataSourceBinding.disabled == False
).all()
base_url = request.url_root.rstrip('/')
data_source_oauth_base_path = "/console/api/oauth/data-source"
providers = ["notion"]
integrate_data = []
for provider in providers:
# existing_integrate = next((ai for ai in data_source_integrates if ai.provider == provider), None)
existing_integrates = filter(lambda item: item.provider == provider, data_source_integrates)
if existing_integrates:
for existing_integrate in list(existing_integrates):
integrate_data.append({
'id': existing_integrate.id,
'provider': provider,
'created_at': existing_integrate.created_at,
'is_bound': True,
'disabled': existing_integrate.disabled,
'source_info': existing_integrate.source_info,
'link': f'{base_url}{data_source_oauth_base_path}/{provider}'
})
else:
integrate_data.append({
'id': None,
'provider': provider,
'created_at': None,
'source_info': None,
'is_bound': False,
'disabled': None,
'link': f'{base_url}{data_source_oauth_base_path}/{provider}'
})
return {'data': integrate_data}, 200
@setup_required
@login_required
@account_initialization_required
def patch(self, binding_id, action):
binding_id = str(binding_id)
action = str(action)
data_source_binding = DataSourceBinding.query.filter_by(
id=binding_id
).first()
if data_source_binding is None:
raise NotFound('Data source binding not found.')
# enable binding
if action == 'enable':
if data_source_binding.disabled:
data_source_binding.disabled = False
data_source_binding.updated_at = datetime.datetime.utcnow()
db.session.add(data_source_binding)
db.session.commit()
else:
raise ValueError('Data source is not disabled.')
# disable binding
if action == 'disable':
if not data_source_binding.disabled:
data_source_binding.disabled = True
data_source_binding.updated_at = datetime.datetime.utcnow()
db.session.add(data_source_binding)
db.session.commit()
else:
raise ValueError('Data source is disabled.')
return {'result': 'success'}, 200
class DataSourceNotionListApi(Resource):
integrate_icon_fields = {
'type': fields.String,
'url': fields.String,
'emoji': fields.String
}
integrate_page_fields = {
'page_name': fields.String,
'page_id': fields.String,
'page_icon': fields.Nested(integrate_icon_fields, allow_null=True),
'is_bound': fields.Boolean,
'parent_id': fields.String,
'type': fields.String
}
integrate_workspace_fields = {
'workspace_name': fields.String,
'workspace_id': fields.String,
'workspace_icon': fields.String,
'pages': fields.List(fields.Nested(integrate_page_fields))
}
integrate_notion_info_list_fields = {
'notion_info': fields.List(fields.Nested(integrate_workspace_fields)),
}
@setup_required
@login_required
@account_initialization_required
@marshal_with(integrate_notion_info_list_fields)
def get(self):
dataset_id = request.args.get('dataset_id', default=None, type=str)
exist_page_ids = []
# import notion in the exist dataset
if dataset_id:
dataset = DatasetService.get_dataset(dataset_id)
if not dataset:
raise NotFound('Dataset not found.')
if dataset.data_source_type != 'notion_import':
raise ValueError('Dataset is not notion type.')
documents = Document.query.filter_by(
dataset_id=dataset_id,
tenant_id=current_user.current_tenant_id,
data_source_type='notion_import',
enabled=True
).all()
if documents:
for document in documents:
data_source_info = json.loads(document.data_source_info)
exist_page_ids.append(data_source_info['notion_page_id'])
# get all authorized pages
data_source_bindings = DataSourceBinding.query.filter_by(
tenant_id=current_user.current_tenant_id,
provider='notion',
disabled=False
).all()
if not data_source_bindings:
return {
'notion_info': []
}, 200
pre_import_info_list = []
for data_source_binding in data_source_bindings:
source_info = data_source_binding.source_info
pages = source_info['pages']
# Filter out already bound pages
for page in pages:
if page['page_id'] in exist_page_ids:
page['is_bound'] = True
else:
page['is_bound'] = False
pre_import_info = {
'workspace_name': source_info['workspace_name'],
'workspace_icon': source_info['workspace_icon'],
'workspace_id': source_info['workspace_id'],
'pages': pages,
}
pre_import_info_list.append(pre_import_info)
return {
'notion_info': pre_import_info_list
}, 200
class DataSourceNotionApi(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self, workspace_id, page_id, page_type):
workspace_id = str(workspace_id)
page_id = str(page_id)
data_source_binding = DataSourceBinding.query.filter(
db.and_(
DataSourceBinding.tenant_id == current_user.current_tenant_id,
DataSourceBinding.provider == 'notion',
DataSourceBinding.disabled == False,
DataSourceBinding.source_info['workspace_id'] == f'"{workspace_id}"'
)
).first()
if not data_source_binding:
raise NotFound('Data source binding not found.')
reader = NotionPageReader(integration_token=data_source_binding.access_token)
if page_type == 'page':
page_content = reader.read_page(page_id)
elif page_type == 'database':
page_content = reader.query_database_data(page_id)
else:
page_content = ""
return {
'content': page_content
}, 200
@setup_required
@login_required
@account_initialization_required
def post(self):
parser = reqparse.RequestParser()
parser.add_argument('notion_info_list', type=list, required=True, nullable=True, location='json')
parser.add_argument('process_rule', type=dict, required=True, nullable=True, location='json')
args = parser.parse_args()
# validate args
DocumentService.estimate_args_validate(args)
indexing_runner = IndexingRunner()
response = indexing_runner.notion_indexing_estimate(args['notion_info_list'], args['process_rule'])
return response, 200
class DataSourceNotionDatasetSyncApi(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self, dataset_id):
dataset_id_str = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id_str)
if dataset is None:
raise NotFound("Dataset not found.")
documents = DocumentService.get_document_by_dataset_id(dataset_id_str)
for document in documents:
document_indexing_sync_task.delay(dataset_id_str, document.id)
return 200
class DataSourceNotionDocumentSyncApi(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self, dataset_id, document_id):
dataset_id_str = str(dataset_id)
document_id_str = str(document_id)
dataset = DatasetService.get_dataset(dataset_id_str)
if dataset is None:
raise NotFound("Dataset not found.")
document = DocumentService.get_document(dataset_id_str, document_id_str)
if document is None:
raise NotFound("Document not found.")
document_indexing_sync_task.delay(dataset_id_str, document_id_str)
return 200
api.add_resource(DataSourceApi, '/data-source/integrates', '/data-source/integrates/<uuid:binding_id>/<string:action>')
api.add_resource(DataSourceNotionListApi, '/notion/pre-import/pages')
api.add_resource(DataSourceNotionApi,
'/notion/workspaces/<uuid:workspace_id>/pages/<uuid:page_id>/<string:page_type>/preview',
'/datasets/notion-indexing-estimate')
api.add_resource(DataSourceNotionDatasetSyncApi, '/datasets/<uuid:dataset_id>/notion/sync')
api.add_resource(DataSourceNotionDocumentSyncApi, '/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/notion/sync')

View File

@@ -12,8 +12,9 @@ from controllers.console.wraps import account_initialization_required
from core.indexing_runner import IndexingRunner
from libs.helper import TimestampField
from extensions.ext_database import db
from models.dataset import DocumentSegment, Document
from models.model import UploadFile
from services.dataset_service import DatasetService
from services.dataset_service import DatasetService, DocumentService
dataset_detail_fields = {
'id': fields.String,
@@ -50,8 +51,8 @@ def _validate_name(name):
def _validate_description_length(description):
if len(description) > 200:
raise ValueError('Description cannot exceed 200 characters.')
if len(description) > 400:
raise ValueError('Description cannot exceed 400 characters.')
return description
@@ -217,17 +218,31 @@ class DatasetIndexingEstimateApi(Resource):
@login_required
@account_initialization_required
def post(self):
segment_rule = request.get_json()
file_detail = db.session.query(UploadFile).filter(
UploadFile.tenant_id == current_user.current_tenant_id,
UploadFile.id == segment_rule["file_id"]
).first()
parser = reqparse.RequestParser()
parser.add_argument('info_list', type=dict, required=True, nullable=True, location='json')
parser.add_argument('process_rule', type=dict, required=True, nullable=True, location='json')
args = parser.parse_args()
# validate args
DocumentService.estimate_args_validate(args)
if args['info_list']['data_source_type'] == 'upload_file':
file_ids = args['info_list']['file_info_list']['file_ids']
file_details = db.session.query(UploadFile).filter(
UploadFile.tenant_id == current_user.current_tenant_id,
UploadFile.id.in_(file_ids)
).all()
if file_detail is None:
raise NotFound("File not found.")
if file_details is None:
raise NotFound("File not found.")
indexing_runner = IndexingRunner()
response = indexing_runner.indexing_estimate(file_detail, segment_rule['process_rule'])
indexing_runner = IndexingRunner()
response = indexing_runner.file_indexing_estimate(file_details, args['process_rule'])
elif args['info_list']['data_source_type'] == 'notion_import':
indexing_runner = IndexingRunner()
response = indexing_runner.notion_indexing_estimate(args['info_list']['notion_info_list'],
args['process_rule'])
else:
raise ValueError('Data source type not support')
return response, 200
@@ -274,8 +289,54 @@ class DatasetRelatedAppListApi(Resource):
}, 200
class DatasetIndexingStatusApi(Resource):
document_status_fields = {
'id': fields.String,
'indexing_status': fields.String,
'processing_started_at': TimestampField,
'parsing_completed_at': TimestampField,
'cleaning_completed_at': TimestampField,
'splitting_completed_at': TimestampField,
'completed_at': TimestampField,
'paused_at': TimestampField,
'error': fields.String,
'stopped_at': TimestampField,
'completed_segments': fields.Integer,
'total_segments': fields.Integer,
}
document_status_fields_list = {
'data': fields.List(fields.Nested(document_status_fields))
}
@setup_required
@login_required
@account_initialization_required
def get(self, dataset_id):
dataset_id = str(dataset_id)
documents = db.session.query(Document).filter(
Document.dataset_id == dataset_id,
Document.tenant_id == current_user.current_tenant_id
).all()
documents_status = []
for document in documents:
completed_segments = DocumentSegment.query.filter(DocumentSegment.completed_at.isnot(None),
DocumentSegment.document_id == str(document.id),
DocumentSegment.status != 're_segment').count()
total_segments = DocumentSegment.query.filter(DocumentSegment.document_id == str(document.id),
DocumentSegment.status != 're_segment').count()
document.completed_segments = completed_segments
document.total_segments = total_segments
documents_status.append(marshal(document, self.document_status_fields))
data = {
'data': documents_status
}
return data
api.add_resource(DatasetListApi, '/datasets')
api.add_resource(DatasetApi, '/datasets/<uuid:dataset_id>')
api.add_resource(DatasetQueryApi, '/datasets/<uuid:dataset_id>/queries')
api.add_resource(DatasetIndexingEstimateApi, '/datasets/file-indexing-estimate')
api.add_resource(DatasetIndexingEstimateApi, '/datasets/indexing-estimate')
api.add_resource(DatasetRelatedAppListApi, '/datasets/<uuid:dataset_id>/related-apps')
api.add_resource(DatasetIndexingStatusApi, '/datasets/<uuid:dataset_id>/indexing-status')

View File

@@ -1,6 +1,7 @@
# -*- coding:utf-8 -*-
import random
from datetime import datetime
from typing import List
from flask import request
from flask_login import login_required, current_user
@@ -61,6 +62,29 @@ document_fields = {
'hit_count': fields.Integer,
}
document_with_segments_fields = {
'id': fields.String,
'position': fields.Integer,
'data_source_type': fields.String,
'data_source_info': fields.Raw(attribute='data_source_info_dict'),
'dataset_process_rule_id': fields.String,
'name': fields.String,
'created_from': fields.String,
'created_by': fields.String,
'created_at': TimestampField,
'tokens': fields.Integer,
'indexing_status': fields.String,
'error': fields.String,
'enabled': fields.Boolean,
'disabled_at': TimestampField,
'disabled_by': fields.String,
'archived': fields.Boolean,
'display_status': fields.String,
'word_count': fields.Integer,
'hit_count': fields.Integer,
'completed_segments': fields.Integer,
'total_segments': fields.Integer
}
class DocumentResource(Resource):
def get_document(self, dataset_id: str, document_id: str) -> Document:
@@ -83,6 +107,23 @@ class DocumentResource(Resource):
return document
def get_batch_documents(self, dataset_id: str, batch: str) -> List[Document]:
dataset = DatasetService.get_dataset(dataset_id)
if not dataset:
raise NotFound('Dataset not found.')
try:
DatasetService.check_dataset_permission(dataset, current_user)
except services.errors.account.NoPermissionError as e:
raise Forbidden(str(e))
documents = DocumentService.get_batch_documents(dataset_id, batch)
if not documents:
raise NotFound('Documents not found.')
return documents
class GetProcessRuleApi(Resource):
@setup_required
@@ -132,9 +173,9 @@ class DatasetDocumentListApi(Resource):
dataset_id = str(dataset_id)
page = request.args.get('page', default=1, type=int)
limit = request.args.get('limit', default=20, type=int)
search = request.args.get('search', default=None, type=str)
search = request.args.get('keyword', default=None, type=str)
sort = request.args.get('sort', default='-created_at', type=str)
fetch = request.args.get('fetch', default=False, type=bool)
dataset = DatasetService.get_dataset(dataset_id)
if not dataset:
raise NotFound('Dataset not found.')
@@ -173,9 +214,20 @@ class DatasetDocumentListApi(Resource):
paginated_documents = query.paginate(
page=page, per_page=limit, max_per_page=100, error_out=False)
documents = paginated_documents.items
if fetch:
for document in documents:
completed_segments = DocumentSegment.query.filter(DocumentSegment.completed_at.isnot(None),
DocumentSegment.document_id == str(document.id),
DocumentSegment.status != 're_segment').count()
total_segments = DocumentSegment.query.filter(DocumentSegment.document_id == str(document.id),
DocumentSegment.status != 're_segment').count()
document.completed_segments = completed_segments
document.total_segments = total_segments
data = marshal(documents, document_with_segments_fields)
else:
data = marshal(documents, document_fields)
response = {
'data': marshal(documents, document_fields),
'data': data,
'has_more': len(documents) == limit,
'limit': limit,
'total': paginated_documents.total,
@@ -184,10 +236,15 @@ class DatasetDocumentListApi(Resource):
return response
documents_and_batch_fields = {
'documents': fields.List(fields.Nested(document_fields)),
'batch': fields.String
}
@setup_required
@login_required
@account_initialization_required
@marshal_with(document_fields)
@marshal_with(documents_and_batch_fields)
def post(self, dataset_id):
dataset_id = str(dataset_id)
@@ -208,9 +265,10 @@ class DatasetDocumentListApi(Resource):
parser = reqparse.RequestParser()
parser.add_argument('indexing_technique', type=str, choices=Dataset.INDEXING_TECHNIQUE_LIST, nullable=False,
location='json')
parser.add_argument('data_source', type=dict, required=True, nullable=True, location='json')
parser.add_argument('process_rule', type=dict, required=True, nullable=True, location='json')
parser.add_argument('data_source', type=dict, required=False, location='json')
parser.add_argument('process_rule', type=dict, required=False, location='json')
parser.add_argument('duplicate', type=bool, nullable=False, location='json')
parser.add_argument('original_document_id', type=str, required=False, location='json')
args = parser.parse_args()
if not dataset.indexing_technique and not args['indexing_technique']:
@@ -220,7 +278,7 @@ class DatasetDocumentListApi(Resource):
DocumentService.document_create_args_validate(args)
try:
document = DocumentService.save_document_with_dataset_id(dataset, args, current_user)
documents, batch = DocumentService.save_document_with_dataset_id(dataset, args, current_user)
except ProviderTokenNotInitError:
raise ProviderNotInitializeError()
except QuotaExceededError:
@@ -228,13 +286,17 @@ class DatasetDocumentListApi(Resource):
except ModelCurrentlyNotSupportError:
raise ProviderModelCurrentlyNotSupportError()
return document
return {
'documents': documents,
'batch': batch
}
class DatasetInitApi(Resource):
dataset_and_document_fields = {
'dataset': fields.Nested(dataset_fields),
'document': fields.Nested(document_fields)
'documents': fields.List(fields.Nested(document_fields)),
'batch': fields.String
}
@setup_required
@@ -257,7 +319,7 @@ class DatasetInitApi(Resource):
DocumentService.document_create_args_validate(args)
try:
dataset, document = DocumentService.save_document_without_dataset_id(
dataset, documents, batch = DocumentService.save_document_without_dataset_id(
tenant_id=current_user.current_tenant_id,
document_data=args,
account=current_user
@@ -271,7 +333,8 @@ class DatasetInitApi(Resource):
response = {
'dataset': dataset,
'document': document
'documents': documents,
'batch': batch
}
return response
@@ -316,11 +379,122 @@ class DocumentIndexingEstimateApi(DocumentResource):
raise NotFound('File not found.')
indexing_runner = IndexingRunner()
response = indexing_runner.indexing_estimate(file, data_process_rule_dict)
response = indexing_runner.file_indexing_estimate([file], data_process_rule_dict)
return response
class DocumentBatchIndexingEstimateApi(DocumentResource):
@setup_required
@login_required
@account_initialization_required
def get(self, dataset_id, batch):
dataset_id = str(dataset_id)
batch = str(batch)
dataset = DatasetService.get_dataset(dataset_id)
if dataset is None:
raise NotFound("Dataset not found.")
documents = self.get_batch_documents(dataset_id, batch)
response = {
"tokens": 0,
"total_price": 0,
"currency": "USD",
"total_segments": 0,
"preview": []
}
if not documents:
return response
data_process_rule = documents[0].dataset_process_rule
data_process_rule_dict = data_process_rule.to_dict()
info_list = []
for document in documents:
if document.indexing_status in ['completed', 'error']:
raise DocumentAlreadyFinishedError()
data_source_info = document.data_source_info_dict
# format document files info
if data_source_info and 'upload_file_id' in data_source_info:
file_id = data_source_info['upload_file_id']
info_list.append(file_id)
# format document notion info
elif data_source_info and 'notion_workspace_id' in data_source_info and 'notion_page_id' in data_source_info:
pages = []
page = {
'page_id': data_source_info['notion_page_id'],
'type': data_source_info['type']
}
pages.append(page)
notion_info = {
'workspace_id': data_source_info['notion_workspace_id'],
'pages': pages
}
info_list.append(notion_info)
if dataset.data_source_type == 'upload_file':
file_details = db.session.query(UploadFile).filter(
UploadFile.tenant_id == current_user.current_tenant_id,
UploadFile.id in info_list
).all()
if file_details is None:
raise NotFound("File not found.")
indexing_runner = IndexingRunner()
response = indexing_runner.file_indexing_estimate(file_details, data_process_rule_dict)
elif dataset.data_source_type:
indexing_runner = IndexingRunner()
response = indexing_runner.notion_indexing_estimate(info_list,
data_process_rule_dict)
else:
raise ValueError('Data source type not support')
return response
class DocumentBatchIndexingStatusApi(DocumentResource):
document_status_fields = {
'id': fields.String,
'indexing_status': fields.String,
'processing_started_at': TimestampField,
'parsing_completed_at': TimestampField,
'cleaning_completed_at': TimestampField,
'splitting_completed_at': TimestampField,
'completed_at': TimestampField,
'paused_at': TimestampField,
'error': fields.String,
'stopped_at': TimestampField,
'completed_segments': fields.Integer,
'total_segments': fields.Integer,
}
document_status_fields_list = {
'data': fields.List(fields.Nested(document_status_fields))
}
@setup_required
@login_required
@account_initialization_required
def get(self, dataset_id, batch):
dataset_id = str(dataset_id)
batch = str(batch)
documents = self.get_batch_documents(dataset_id, batch)
documents_status = []
for document in documents:
completed_segments = DocumentSegment.query.filter(DocumentSegment.completed_at.isnot(None),
DocumentSegment.document_id == str(document.id),
DocumentSegment.status != 're_segment').count()
total_segments = DocumentSegment.query.filter(DocumentSegment.document_id == str(document.id),
DocumentSegment.status != 're_segment').count()
document.completed_segments = completed_segments
document.total_segments = total_segments
documents_status.append(marshal(document, self.document_status_fields))
data = {
'data': documents_status
}
return data
class DocumentIndexingStatusApi(DocumentResource):
document_status_fields = {
'id': fields.String,
@@ -347,10 +521,12 @@ class DocumentIndexingStatusApi(DocumentResource):
completed_segments = DocumentSegment.query \
.filter(DocumentSegment.completed_at.isnot(None),
DocumentSegment.document_id == str(document_id)) \
DocumentSegment.document_id == str(document_id),
DocumentSegment.status != 're_segment') \
.count()
total_segments = DocumentSegment.query \
.filter_by(document_id=str(document_id)) \
.filter(DocumentSegment.document_id == str(document_id),
DocumentSegment.status != 're_segment') \
.count()
document.completed_segments = completed_segments
@@ -405,7 +581,7 @@ class DocumentDetailApi(DocumentResource):
'disabled_by': document.disabled_by,
'archived': document.archived,
'segment_count': document.segment_count,
'average_segment_length': document.average_segment_length,
'average_segment_length': document.average_segment_length,
'hit_count': document.hit_count,
'display_status': document.display_status
}
@@ -425,7 +601,7 @@ class DocumentDetailApi(DocumentResource):
'created_at': document.created_at.timestamp(),
'tokens': document.tokens,
'indexing_status': document.indexing_status,
'completed_at': int(document.completed_at.timestamp())if document.completed_at else None,
'completed_at': int(document.completed_at.timestamp()) if document.completed_at else None,
'updated_at': int(document.updated_at.timestamp()) if document.updated_at else None,
'indexing_latency': document.indexing_latency,
'error': document.error,
@@ -576,6 +752,8 @@ class DocumentStatusApi(DocumentResource):
return {'result': 'success'}, 200
elif action == "disable":
if not document.completed_at or document.indexing_status != 'completed':
raise InvalidActionError('Document is not completed.')
if not document.enabled:
raise InvalidActionError('Document already disabled.')
@@ -675,6 +853,10 @@ api.add_resource(DatasetInitApi,
'/datasets/init')
api.add_resource(DocumentIndexingEstimateApi,
'/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/indexing-estimate')
api.add_resource(DocumentBatchIndexingEstimateApi,
'/datasets/<uuid:dataset_id>/batch/<string:batch>/indexing-estimate')
api.add_resource(DocumentBatchIndexingStatusApi,
'/datasets/<uuid:dataset_id>/batch/<string:batch>/indexing-status')
api.add_resource(DocumentIndexingStatusApi,
'/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/indexing-status')
api.add_resource(DocumentDetailApi,

View File

@@ -78,12 +78,14 @@ class DatasetDocumentSegmentListApi(Resource):
parser.add_argument('hit_count_gte', type=int,
default=None, location='args')
parser.add_argument('enabled', type=str, default='all', location='args')
parser.add_argument('keyword', type=str, default=None, location='args')
args = parser.parse_args()
last_id = args['last_id']
limit = min(args['limit'], 100)
status_list = args['status']
hit_count_gte = args['hit_count_gte']
keyword = args['keyword']
query = DocumentSegment.query.filter(
DocumentSegment.document_id == str(document_id),
@@ -104,6 +106,9 @@ class DatasetDocumentSegmentListApi(Resource):
if hit_count_gte is not None:
query = query.filter(DocumentSegment.hit_count >= hit_count_gte)
if keyword:
query = query.where(DocumentSegment.content.ilike(f'%{keyword}%'))
if args['enabled'].lower() != 'all':
if args['enabled'].lower() == 'true':
query = query.filter(DocumentSegment.enabled == True)

View File

@@ -1,6 +1,7 @@
import datetime
import hashlib
import tempfile
import chardet
import time
import uuid
from pathlib import Path
@@ -18,6 +19,7 @@ from controllers.console.setup import setup_required
from controllers.console.wraps import account_initialization_required
from core.index.readers.html_parser import HTMLParser
from core.index.readers.pdf_parser import PDFParser
from core.index.readers.xlsx_parser import XLSXParser
from extensions.ext_storage import storage
from libs.helper import TimestampField
from extensions.ext_database import db
@@ -26,7 +28,7 @@ from models.model import UploadFile
cache = TTLCache(maxsize=None, ttl=30)
FILE_SIZE_LIMIT = 15 * 1024 * 1024 # 15MB
ALLOWED_EXTENSIONS = ['txt', 'markdown', 'md', 'pdf', 'html', 'htm']
ALLOWED_EXTENSIONS = ['txt', 'markdown', 'md', 'pdf', 'html', 'htm', 'xlsx']
PREVIEW_WORDS_LIMIT = 3000
@@ -133,11 +135,18 @@ class FilePreviewApi(Resource):
# Use BeautifulSoup to extract text
parser = HTMLParser()
text = parser.parse_file(Path(filepath))
elif extension == 'xlsx':
parser = XLSXParser()
text = parser.parse_file(filepath)
else:
# ['txt', 'markdown', 'md']
with open(filepath, "rb") as fp:
data = fp.read()
text = data.decode(encoding='utf-8').strip() if data else ''
encoding = chardet.detect(data)['encoding']
if encoding:
text = data.decode(encoding=encoding).strip() if data else ''
else:
text = data.decode(encoding='utf-8').strip() if data else ''
text = text[0:PREVIEW_WORDS_LIMIT] if text else ''
return {'content': text}

View File

@@ -69,12 +69,16 @@ class DocumentListApi(DatasetApiResource):
document_data = {
'data_source': {
'type': 'upload_file',
'info': upload_file.id
'info': [
{
'upload_file_id': upload_file.id
}
]
}
}
try:
document = DocumentService.save_document_with_dataset_id(
documents, batch = DocumentService.save_document_with_dataset_id(
dataset=dataset,
document_data=document_data,
account=dataset.created_by_account,
@@ -83,7 +87,7 @@ class DocumentListApi(DatasetApiResource):
)
except ProviderTokenNotInitError:
raise ProviderNotInitializeError()
document = documents[0]
if doc_type and doc_metadata:
metadata_schema = DocumentService.DOCUMENT_METADATA_SCHEMA[doc_type]

View File

@@ -16,7 +16,7 @@ def validate_token(view=None):
def decorated(*args, **kwargs):
site = validate_and_get_site()
app_model = db.session.query(App).get(site.app_id)
app_model = db.session.query(App).filter(App.id == site.app_id).first()
if not app_model:
raise NotFound()

View File

@@ -75,7 +75,12 @@ class LLMCallbackHandler(BaseCallbackHandler):
self.conversation_message_task.save_message(self.llm_message)
def on_llm_new_token(self, token: str, **kwargs: Any) -> None:
self.conversation_message_task.append_message_text(token)
try:
self.conversation_message_task.append_message_text(token)
except ConversationTaskStoppedException as ex:
self.on_llm_error(error=ex)
raise ex
self.llm_message.completion += token
def on_llm_error(

View File

@@ -11,6 +11,8 @@ from langchain.chains import LLMChain
from langchain.prompts import BasePromptTemplate
from langchain.schema import BaseOutputParser, OutputParserException, BaseLanguageModel
from libs.json_in_md_parser import parse_and_check_json_markdown
class Route(NamedTuple):
destination: Optional[str]
@@ -82,38 +84,10 @@ class RouterOutputParser(BaseOutputParser[Dict[str, str]]):
next_inputs_type: Type = str
next_inputs_inner_key: str = "input"
def parse_json_markdown(self, json_string: str) -> dict:
# Remove the triple backticks if present
start_index = json_string.find("```json")
end_index = json_string.find("```", start_index + len("```json"))
if start_index != -1 and end_index != -1:
extracted_content = json_string[start_index + len("```json"):end_index].strip()
# Parse the JSON string into a Python dictionary
parsed = json.loads(extracted_content)
else:
raise Exception("Could not find JSON block in the output.")
return parsed
def parse_and_check_json_markdown(self, text: str, expected_keys: List[str]) -> dict:
try:
json_obj = self.parse_json_markdown(text)
except json.JSONDecodeError as e:
raise OutputParserException(f"Got invalid JSON object. Error: {e}")
for key in expected_keys:
if key not in json_obj:
raise OutputParserException(
f"Got invalid return object. Expected key `{key}` "
f"to be present, but got {json_obj}"
)
return json_obj
def parse(self, text: str) -> Dict[str, Any]:
try:
expected_keys = ["destination", "next_inputs"]
parsed = self.parse_and_check_json_markdown(text, expected_keys)
parsed = parse_and_check_json_markdown(text, expected_keys)
if not isinstance(parsed["destination"], str):
raise ValueError("Expected 'destination' to be a string.")
if not isinstance(parsed["next_inputs"], self.next_inputs_type):
@@ -131,5 +105,5 @@ class RouterOutputParser(BaseOutputParser[Dict[str, str]]):
return parsed
except Exception as e:
raise OutputParserException(
f"Parsing text\n{text}\n raised following error:\n{e}"
f"Parsing text\n{text}\n of llm router raised following error:\n{e}"
)

View File

@@ -23,7 +23,8 @@ think that revising it will ultimately lead to a better response from the langua
model.
<< FORMATTING >>
Return a markdown code snippet with a JSON object formatted to look like:
Return a markdown code snippet with a JSON object formatted to look like, \
no any other string out of markdown code snippet:
```json
{{{{
"destination": string \\ name of the prompt to use or "DEFAULT"
@@ -110,7 +111,10 @@ class MultiDatasetRouterChain(Chain):
response_mode='no_synthesizer', # "compact"
callback_handler=DatasetToolCallbackHandler(conversation_message_task)
)
dataset_tools[dataset.id] = dataset_tool
if dataset_tool:
dataset_tools[dataset.id] = dataset_tool
return cls(
router_chain=router_chain,
dataset_tools=dataset_tools,

View File

@@ -4,6 +4,7 @@ models = {
'gpt-4': 'openai', # 8,192 tokens
'gpt-4-32k': 'openai', # 32,768 tokens
'gpt-3.5-turbo': 'openai', # 4,096 tokens
'gpt-3.5-turbo-16k': 'openai', # 16384 tokens
'text-davinci-003': 'openai', # 4,097 tokens
'text-davinci-002': 'openai', # 4,097 tokens
'text-curie-001': 'openai', # 2,049 tokens
@@ -16,6 +17,7 @@ max_context_token_length = {
'gpt-4': 8192,
'gpt-4-32k': 32768,
'gpt-3.5-turbo': 4096,
'gpt-3.5-turbo-16k': 16384,
'text-davinci-003': 4097,
'text-davinci-002': 4097,
'text-curie-001': 2049,
@@ -29,11 +31,13 @@ models_by_mode = {
'gpt-4', # 8,192 tokens
'gpt-4-32k', # 32,768 tokens
'gpt-3.5-turbo', # 4,096 tokens
'gpt-3.5-turbo-16k', # 16,384 tokens
],
'completion': [
'gpt-4', # 8,192 tokens
'gpt-4-32k', # 32,768 tokens
'gpt-3.5-turbo', # 4,096 tokens
'gpt-3.5-turbo-16k', # 16,384 tokens
'text-davinci-003', # 4,097 tokens
'text-davinci-002' # 4,097 tokens
'text-curie-001', # 2,049 tokens
@@ -57,9 +61,13 @@ model_prices = {
'completion': Decimal('0.12')
},
'gpt-3.5-turbo': {
'prompt': Decimal('0.002'),
'prompt': Decimal('0.0015'),
'completion': Decimal('0.002')
},
'gpt-3.5-turbo-16k': {
'prompt': Decimal('0.003'),
'completion': Decimal('0.004')
},
'text-davinci-003': {
'prompt': Decimal('0.02'),
'completion': Decimal('0.02')
@@ -77,7 +85,7 @@ model_prices = {
'completion': Decimal('0.0004')
},
'text-embedding-ada-002': {
'usage': Decimal('0.0004'),
'usage': Decimal('0.0001'),
}
}

View File

@@ -0,0 +1,369 @@
"""Notion reader."""
import json
import logging
import os
from datetime import datetime
from typing import Any, Dict, List, Optional
import requests # type: ignore
from llama_index.readers.base import BaseReader
from llama_index.readers.schema.base import Document
INTEGRATION_TOKEN_NAME = "NOTION_INTEGRATION_TOKEN"
BLOCK_CHILD_URL_TMPL = "https://api.notion.com/v1/blocks/{block_id}/children"
DATABASE_URL_TMPL = "https://api.notion.com/v1/databases/{database_id}/query"
SEARCH_URL = "https://api.notion.com/v1/search"
RETRIEVE_PAGE_URL_TMPL = "https://api.notion.com/v1/pages/{page_id}"
RETRIEVE_DATABASE_URL_TMPL = "https://api.notion.com/v1/databases/{database_id}"
HEADING_TYPE = ['heading_1', 'heading_2', 'heading_3']
logger = logging.getLogger(__name__)
# TODO: Notion DB reader coming soon!
class NotionPageReader(BaseReader):
"""Notion Page reader.
Reads a set of Notion pages.
Args:
integration_token (str): Notion integration token.
"""
def __init__(self, integration_token: Optional[str] = None) -> None:
"""Initialize with parameters."""
if integration_token is None:
integration_token = os.getenv(INTEGRATION_TOKEN_NAME)
if integration_token is None:
raise ValueError(
"Must specify `integration_token` or set environment "
"variable `NOTION_INTEGRATION_TOKEN`."
)
self.token = integration_token
self.headers = {
"Authorization": "Bearer " + self.token,
"Content-Type": "application/json",
"Notion-Version": "2022-06-28",
}
def _read_block(self, block_id: str, num_tabs: int = 0) -> str:
"""Read a block."""
done = False
result_lines_arr = []
cur_block_id = block_id
while not done:
block_url = BLOCK_CHILD_URL_TMPL.format(block_id=cur_block_id)
query_dict: Dict[str, Any] = {}
res = requests.request(
"GET", block_url, headers=self.headers, json=query_dict
)
data = res.json()
if 'results' not in data or data["results"] is None:
done = True
break
heading = ''
for result in data["results"]:
result_type = result["type"]
result_obj = result[result_type]
cur_result_text_arr = []
if result_type == 'table':
result_block_id = result["id"]
text = self._read_table_rows(result_block_id)
result_lines_arr.append(text)
else:
if "rich_text" in result_obj:
for rich_text in result_obj["rich_text"]:
# skip if doesn't have text object
if "text" in rich_text:
text = rich_text["text"]["content"]
prefix = "\t" * num_tabs
cur_result_text_arr.append(prefix + text)
if result_type in HEADING_TYPE:
heading = text
result_block_id = result["id"]
has_children = result["has_children"]
block_type = result["type"]
if has_children and block_type != 'child_page':
children_text = self._read_block(
result_block_id, num_tabs=num_tabs + 1
)
cur_result_text_arr.append(children_text)
cur_result_text = "\n".join(cur_result_text_arr)
if result_type in HEADING_TYPE:
result_lines_arr.append(cur_result_text)
else:
result_lines_arr.append(f'{heading}\n{cur_result_text}')
if data["next_cursor"] is None:
done = True
break
else:
cur_block_id = data["next_cursor"]
result_lines = "\n".join(result_lines_arr)
return result_lines
def _read_table_rows(self, block_id: str) -> str:
"""Read table rows."""
done = False
result_lines_arr = []
cur_block_id = block_id
while not done:
block_url = BLOCK_CHILD_URL_TMPL.format(block_id=cur_block_id)
query_dict: Dict[str, Any] = {}
res = requests.request(
"GET", block_url, headers=self.headers, json=query_dict
)
data = res.json()
# get table headers text
table_header_cell_texts = []
tabel_header_cells = data["results"][0]['table_row']['cells']
for tabel_header_cell in tabel_header_cells:
if tabel_header_cell:
for table_header_cell_text in tabel_header_cell:
text = table_header_cell_text["text"]["content"]
table_header_cell_texts.append(text)
# get table columns text and format
results = data["results"]
for i in range(len(results)-1):
column_texts = []
tabel_column_cells = data["results"][i+1]['table_row']['cells']
for j in range(len(tabel_column_cells)):
if tabel_column_cells[j]:
for table_column_cell_text in tabel_column_cells[j]:
column_text = table_column_cell_text["text"]["content"]
column_texts.append(f'{table_header_cell_texts[j]}:{column_text}')
cur_result_text = "\n".join(column_texts)
result_lines_arr.append(cur_result_text)
if data["next_cursor"] is None:
done = True
break
else:
cur_block_id = data["next_cursor"]
result_lines = "\n".join(result_lines_arr)
return result_lines
def _read_parent_blocks(self, block_id: str, num_tabs: int = 0) -> List[str]:
"""Read a block."""
done = False
result_lines_arr = []
cur_block_id = block_id
while not done:
block_url = BLOCK_CHILD_URL_TMPL.format(block_id=cur_block_id)
query_dict: Dict[str, Any] = {}
res = requests.request(
"GET", block_url, headers=self.headers, json=query_dict
)
data = res.json()
# current block's heading
heading = ''
for result in data["results"]:
result_type = result["type"]
result_obj = result[result_type]
cur_result_text_arr = []
if result_type == 'table':
result_block_id = result["id"]
text = self._read_table_rows(result_block_id)
text += "\n\n"
result_lines_arr.append(text)
else:
if "rich_text" in result_obj:
for rich_text in result_obj["rich_text"]:
# skip if doesn't have text object
if "text" in rich_text:
text = rich_text["text"]["content"]
cur_result_text_arr.append(text)
if result_type in HEADING_TYPE:
heading = text
result_block_id = result["id"]
has_children = result["has_children"]
block_type = result["type"]
if has_children and block_type != 'child_page':
children_text = self._read_block(
result_block_id, num_tabs=num_tabs + 1
)
cur_result_text_arr.append(children_text)
cur_result_text = "\n".join(cur_result_text_arr)
cur_result_text += "\n\n"
if result_type in HEADING_TYPE:
result_lines_arr.append(cur_result_text)
else:
result_lines_arr.append(f'{heading}\n{cur_result_text}')
if data["next_cursor"] is None:
done = True
break
else:
cur_block_id = data["next_cursor"]
return result_lines_arr
def read_page(self, page_id: str) -> str:
"""Read a page."""
return self._read_block(page_id)
def read_page_as_documents(self, page_id: str) -> List[str]:
"""Read a page as documents."""
return self._read_parent_blocks(page_id)
def query_database_data(
self, database_id: str, query_dict: Dict[str, Any] = {}
) -> str:
"""Get all the pages from a Notion database."""
res = requests.post\
(
DATABASE_URL_TMPL.format(database_id=database_id),
headers=self.headers,
json=query_dict,
)
data = res.json()
database_content_list = []
if 'results' not in data or data["results"] is None:
return ""
for result in data["results"]:
properties = result['properties']
data = {}
for property_name, property_value in properties.items():
type = property_value['type']
if type == 'multi_select':
value = []
multi_select_list = property_value[type]
for multi_select in multi_select_list:
value.append(multi_select['name'])
elif type == 'rich_text' or type == 'title':
if len(property_value[type]) > 0:
value = property_value[type][0]['plain_text']
else:
value = ''
elif type == 'select' or type == 'status':
if property_value[type]:
value = property_value[type]['name']
else:
value = ''
else:
value = property_value[type]
data[property_name] = value
database_content_list.append(json.dumps(data))
return "\n\n".join(database_content_list)
def query_database(
self, database_id: str, query_dict: Dict[str, Any] = {}
) -> List[str]:
"""Get all the pages from a Notion database."""
res = requests.post\
(
DATABASE_URL_TMPL.format(database_id=database_id),
headers=self.headers,
json=query_dict,
)
data = res.json()
page_ids = []
for result in data["results"]:
page_id = result["id"]
page_ids.append(page_id)
return page_ids
def search(self, query: str) -> List[str]:
"""Search Notion page given a text query."""
done = False
next_cursor: Optional[str] = None
page_ids = []
while not done:
query_dict = {
"query": query,
}
if next_cursor is not None:
query_dict["start_cursor"] = next_cursor
res = requests.post(SEARCH_URL, headers=self.headers, json=query_dict)
data = res.json()
for result in data["results"]:
page_id = result["id"]
page_ids.append(page_id)
if data["next_cursor"] is None:
done = True
break
else:
next_cursor = data["next_cursor"]
return page_ids
def load_data(
self, page_ids: List[str] = [], database_id: Optional[str] = None
) -> List[Document]:
"""Load data from the input directory.
Args:
page_ids (List[str]): List of page ids to load.
Returns:
List[Document]: List of documents.
"""
if not page_ids and not database_id:
raise ValueError("Must specify either `page_ids` or `database_id`.")
docs = []
if database_id is not None:
# get all the pages in the database
page_ids = self.query_database(database_id)
for page_id in page_ids:
page_text = self.read_page(page_id)
docs.append(Document(page_text))
else:
for page_id in page_ids:
page_text = self.read_page(page_id)
docs.append(Document(page_text))
return docs
def load_data_as_documents(
self, page_ids: List[str] = [], database_id: Optional[str] = None
) -> List[Document]:
if not page_ids and not database_id:
raise ValueError("Must specify either `page_ids` or `database_id`.")
docs = []
if database_id is not None:
# get all the pages in the database
page_text = self.query_database_data(database_id)
docs.append(Document(page_text))
else:
for page_id in page_ids:
page_text_list = self.read_page_as_documents(page_id)
for page_text in page_text_list:
docs.append(Document(page_text))
return docs
def get_page_last_edited_time(self, page_id: str) -> str:
retrieve_page_url = RETRIEVE_PAGE_URL_TMPL.format(page_id=page_id)
query_dict: Dict[str, Any] = {}
res = requests.request(
"GET", retrieve_page_url, headers=self.headers, json=query_dict
)
data = res.json()
return data["last_edited_time"]
def get_database_last_edited_time(self, database_id: str) -> str:
retrieve_page_url = RETRIEVE_DATABASE_URL_TMPL.format(database_id=database_id)
query_dict: Dict[str, Any] = {}
res = requests.request(
"GET", retrieve_page_url, headers=self.headers, json=query_dict
)
data = res.json()
return data["last_edited_time"]
if __name__ == "__main__":
reader = NotionPageReader()
logger.info(reader.search("What I"))

View File

@@ -1,12 +1,13 @@
import logging
from langchain.chat_models.base import BaseChatModel
from langchain.schema import HumanMessage
from langchain.schema import HumanMessage, OutputParserException
from core.constant import llm_constant
from core.llm.llm_builder import LLMBuilder
from core.llm.streamable_open_ai import StreamableOpenAI
from core.llm.token_calculator import TokenCalculator
from core.prompt.output_parser.rule_config_generator import RuleConfigGeneratorOutputParser
from core.prompt.output_parser.suggested_questions_after_answer import SuggestedQuestionsAfterAnswerOutputParser
from core.prompt.prompt_template import OutLinePromptTemplate
@@ -118,3 +119,48 @@ class LLMGenerator:
questions = []
return questions
@classmethod
def generate_rule_config(cls, tenant_id: str, audiences: str, hoping_to_solve: str) -> dict:
output_parser = RuleConfigGeneratorOutputParser()
prompt = OutLinePromptTemplate(
template=output_parser.get_format_instructions(),
input_variables=["audiences", "hoping_to_solve"],
partial_variables={
"variable": '{variable}',
"lanA": '{lanA}',
"lanB": '{lanB}',
"topic": '{topic}'
},
validate_template=False
)
_input = prompt.format_prompt(audiences=audiences, hoping_to_solve=hoping_to_solve)
llm: StreamableOpenAI = LLMBuilder.to_llm(
tenant_id=tenant_id,
model_name=generate_base_model,
temperature=0,
max_tokens=512
)
if isinstance(llm, BaseChatModel):
query = [HumanMessage(content=_input.to_string())]
else:
query = _input.to_string()
try:
output = llm(query)
rule_config = output_parser.parse(output)
except OutputParserException:
raise ValueError('Please give a valid input for intended audience or hoping to solve problems.')
except Exception:
logging.exception("Error generating prompt")
rule_config = {
"prompt": "",
"variables": [],
"opening_statement": ""
}
return rule_config

View File

@@ -0,0 +1,111 @@
"""Markdown parser.
Contains parser for md files.
"""
import re
from pathlib import Path
from typing import Any, Dict, List, Optional, Tuple, Union, cast
from llama_index.readers.file.base_parser import BaseParser
class MarkdownParser(BaseParser):
"""Markdown parser.
Extract text from markdown files.
Returns dictionary with keys as headers and values as the text between headers.
"""
def __init__(
self,
*args: Any,
remove_hyperlinks: bool = True,
remove_images: bool = True,
**kwargs: Any,
) -> None:
"""Init params."""
super().__init__(*args, **kwargs)
self._remove_hyperlinks = remove_hyperlinks
self._remove_images = remove_images
def markdown_to_tups(self, markdown_text: str) -> List[Tuple[Optional[str], str]]:
"""Convert a markdown file to a dictionary.
The keys are the headers and the values are the text under each header.
"""
markdown_tups: List[Tuple[Optional[str], str]] = []
lines = markdown_text.split("\n")
current_header = None
current_text = ""
for line in lines:
header_match = re.match(r"^#+\s", line)
if header_match:
if current_header is not None:
markdown_tups.append((current_header, current_text))
current_header = line
current_text = ""
else:
current_text += line + "\n"
markdown_tups.append((current_header, current_text))
if current_header is not None:
# pass linting, assert keys are defined
markdown_tups = [
(re.sub(r"#", "", cast(str, key)).strip(), re.sub(r"<.*?>", "", value))
for key, value in markdown_tups
]
else:
markdown_tups = [
(key, re.sub("\n", "", value)) for key, value in markdown_tups
]
return markdown_tups
def remove_images(self, content: str) -> str:
"""Get a dictionary of a markdown file from its path."""
pattern = r"!{1}\[\[(.*)\]\]"
content = re.sub(pattern, "", content)
return content
def remove_hyperlinks(self, content: str) -> str:
"""Get a dictionary of a markdown file from its path."""
pattern = r"\[(.*?)\]\((.*?)\)"
content = re.sub(pattern, r"\1", content)
return content
def _init_parser(self) -> Dict:
"""Initialize the parser with the config."""
return {}
def parse_tups(
self, filepath: Path, errors: str = "ignore"
) -> List[Tuple[Optional[str], str]]:
"""Parse file into tuples."""
with open(filepath, "r", encoding="utf-8") as f:
content = f.read()
if self._remove_hyperlinks:
content = self.remove_hyperlinks(content)
if self._remove_images:
content = self.remove_images(content)
markdown_tups = self.markdown_to_tups(content)
return markdown_tups
def parse_file(
self, filepath: Path, errors: str = "ignore"
) -> Union[str, List[str]]:
"""Parse file into string."""
tups = self.parse_tups(filepath, errors=errors)
results = []
# TODO: don't include headers right now
for header, value in tups:
if header is None:
results.append(value)
else:
results.append(f"\n\n{header}\n{value}")
return results

View File

@@ -0,0 +1,31 @@
from pathlib import Path
import json
from typing import Dict
from openpyxl import load_workbook
from llama_index.readers.file.base_parser import BaseParser
from flask import current_app
class XLSXParser(BaseParser):
"""XLSX parser."""
def _init_parser(self) -> Dict:
"""Init parser"""
return {}
def parse_file(self, file: Path, errors: str = "ignore") -> str:
data = []
keys = []
with open(file, "r") as fp:
wb = load_workbook(filename=file, read_only=True)
# loop over all sheets
for sheet in wb:
for row in sheet.iter_rows(values_only=True):
if all(v is None for v in row):
continue
if keys == []:
keys = list(map(str, row))
else:
data.append(json.dumps(dict(zip(keys, list(map(str, row)))), ensure_ascii=False))
return '\n\n'.join(data)

View File

@@ -5,6 +5,8 @@ import tempfile
import time
from pathlib import Path
from typing import Optional, List
from flask_login import current_user
from langchain.text_splitter import RecursiveCharacterTextSplitter
from llama_index import SimpleDirectoryReader
@@ -14,9 +16,12 @@ from llama_index.node_parser import SimpleNodeParser, NodeParser
from llama_index.readers.file.base import DEFAULT_FILE_EXTRACTOR
from llama_index.readers.file.markdown_parser import MarkdownParser
from core.data_source.notion import NotionPageReader
from core.index.readers.xlsx_parser import XLSXParser
from core.docstore.dataset_docstore import DatesetDocumentStore
from core.index.keyword_table_index import KeywordTableIndex
from core.index.readers.html_parser import HTMLParser
from core.index.readers.markdown_parser import MarkdownParser
from core.index.readers.pdf_parser import PDFParser
from core.index.spiltter.fixed_text_splitter import FixedRecursiveCharacterTextSplitter
from core.index.vector_index import VectorIndex
@@ -26,6 +31,7 @@ from extensions.ext_redis import redis_client
from extensions.ext_storage import storage
from models.dataset import Document, Dataset, DocumentSegment, DatasetProcessRule
from models.model import UploadFile
from models.source import DataSourceBinding
class IndexingRunner:
@@ -34,42 +40,43 @@ class IndexingRunner:
self.storage = storage
self.embedding_model_name = embedding_model_name
def run(self, document: Document):
def run(self, documents: List[Document]):
"""Run the indexing process."""
# get dataset
dataset = Dataset.query.filter_by(
id=document.dataset_id
).first()
for document in documents:
# get dataset
dataset = Dataset.query.filter_by(
id=document.dataset_id
).first()
if not dataset:
raise ValueError("no dataset found")
if not dataset:
raise ValueError("no dataset found")
# load file
text_docs = self._load_data(document)
# load file
text_docs = self._load_data(document)
# get the process rule
processing_rule = db.session.query(DatasetProcessRule). \
filter(DatasetProcessRule.id == document.dataset_process_rule_id). \
first()
# get the process rule
processing_rule = db.session.query(DatasetProcessRule). \
filter(DatasetProcessRule.id == document.dataset_process_rule_id). \
first()
# get node parser for splitting
node_parser = self._get_node_parser(processing_rule)
# get node parser for splitting
node_parser = self._get_node_parser(processing_rule)
# split to nodes
nodes = self._step_split(
text_docs=text_docs,
node_parser=node_parser,
dataset=dataset,
document=document,
processing_rule=processing_rule
)
# split to nodes
nodes = self._step_split(
text_docs=text_docs,
node_parser=node_parser,
dataset=dataset,
document=document,
processing_rule=processing_rule
)
# build index
self._build_index(
dataset=dataset,
document=document,
nodes=nodes
)
# build index
self._build_index(
dataset=dataset,
document=document,
nodes=nodes
)
def run_in_splitting_status(self, document: Document):
"""Run the indexing process when the index_status is splitting."""
@@ -163,38 +170,98 @@ class IndexingRunner:
nodes=nodes
)
def indexing_estimate(self, file_detail: UploadFile, tmp_processing_rule: dict) -> dict:
def file_indexing_estimate(self, file_details: List[UploadFile], tmp_processing_rule: dict) -> dict:
"""
Estimate the indexing for the document.
"""
# load data from file
text_docs = self._load_data_from_file(file_detail)
processing_rule = DatasetProcessRule(
mode=tmp_processing_rule["mode"],
rules=json.dumps(tmp_processing_rule["rules"])
)
# get node parser for splitting
node_parser = self._get_node_parser(processing_rule)
# split to nodes
nodes = self._split_to_nodes(
text_docs=text_docs,
node_parser=node_parser,
processing_rule=processing_rule
)
tokens = 0
preview_texts = []
for node in nodes:
if len(preview_texts) < 5:
preview_texts.append(node.get_text())
total_segments = 0
for file_detail in file_details:
# load data from file
text_docs = self._load_data_from_file(file_detail)
tokens += TokenCalculator.get_num_tokens(self.embedding_model_name, node.get_text())
processing_rule = DatasetProcessRule(
mode=tmp_processing_rule["mode"],
rules=json.dumps(tmp_processing_rule["rules"])
)
# get node parser for splitting
node_parser = self._get_node_parser(processing_rule)
# split to nodes
nodes = self._split_to_nodes(
text_docs=text_docs,
node_parser=node_parser,
processing_rule=processing_rule
)
total_segments += len(nodes)
for node in nodes:
if len(preview_texts) < 5:
preview_texts.append(node.get_text())
tokens += TokenCalculator.get_num_tokens(self.embedding_model_name, node.get_text())
return {
"total_segments": len(nodes),
"total_segments": total_segments,
"tokens": tokens,
"total_price": '{:f}'.format(TokenCalculator.get_token_price(self.embedding_model_name, tokens)),
"currency": TokenCalculator.get_currency(self.embedding_model_name),
"preview": preview_texts
}
def notion_indexing_estimate(self, notion_info_list: list, tmp_processing_rule: dict) -> dict:
"""
Estimate the indexing for the document.
"""
# load data from notion
tokens = 0
preview_texts = []
total_segments = 0
for notion_info in notion_info_list:
workspace_id = notion_info['workspace_id']
data_source_binding = DataSourceBinding.query.filter(
db.and_(
DataSourceBinding.tenant_id == current_user.current_tenant_id,
DataSourceBinding.provider == 'notion',
DataSourceBinding.disabled == False,
DataSourceBinding.source_info['workspace_id'] == f'"{workspace_id}"'
)
).first()
if not data_source_binding:
raise ValueError('Data source binding not found.')
reader = NotionPageReader(integration_token=data_source_binding.access_token)
for page in notion_info['pages']:
if page['type'] == 'page':
page_ids = [page['page_id']]
documents = reader.load_data_as_documents(page_ids=page_ids)
elif page['type'] == 'database':
documents = reader.load_data_as_documents(database_id=page['page_id'])
else:
documents = []
processing_rule = DatasetProcessRule(
mode=tmp_processing_rule["mode"],
rules=json.dumps(tmp_processing_rule["rules"])
)
# get node parser for splitting
node_parser = self._get_node_parser(processing_rule)
# split to nodes
nodes = self._split_to_nodes(
text_docs=documents,
node_parser=node_parser,
processing_rule=processing_rule
)
total_segments += len(nodes)
for node in nodes:
if len(preview_texts) < 5:
preview_texts.append(node.get_text())
tokens += TokenCalculator.get_num_tokens(self.embedding_model_name, node.get_text())
return {
"total_segments": total_segments,
"tokens": tokens,
"total_price": '{:f}'.format(TokenCalculator.get_token_price(self.embedding_model_name, tokens)),
"currency": TokenCalculator.get_currency(self.embedding_model_name),
@@ -203,25 +270,50 @@ class IndexingRunner:
def _load_data(self, document: Document) -> List[Document]:
# load file
if document.data_source_type != "upload_file":
if document.data_source_type not in ["upload_file", "notion_import"]:
return []
data_source_info = document.data_source_info_dict
if not data_source_info or 'upload_file_id' not in data_source_info:
raise ValueError("no upload file found")
text_docs = []
if document.data_source_type == 'upload_file':
if not data_source_info or 'upload_file_id' not in data_source_info:
raise ValueError("no upload file found")
file_detail = db.session.query(UploadFile). \
filter(UploadFile.id == data_source_info['upload_file_id']). \
one_or_none()
text_docs = self._load_data_from_file(file_detail)
file_detail = db.session.query(UploadFile). \
filter(UploadFile.id == data_source_info['upload_file_id']). \
one_or_none()
text_docs = self._load_data_from_file(file_detail)
elif document.data_source_type == 'notion_import':
if not data_source_info or 'notion_page_id' not in data_source_info \
or 'notion_workspace_id' not in data_source_info:
raise ValueError("no notion page found")
workspace_id = data_source_info['notion_workspace_id']
page_id = data_source_info['notion_page_id']
page_type = data_source_info['type']
data_source_binding = DataSourceBinding.query.filter(
db.and_(
DataSourceBinding.tenant_id == document.tenant_id,
DataSourceBinding.provider == 'notion',
DataSourceBinding.disabled == False,
DataSourceBinding.source_info['workspace_id'] == f'"{workspace_id}"'
)
).first()
if not data_source_binding:
raise ValueError('Data source binding not found.')
if page_type == 'page':
# add page last_edited_time to data_source_info
self._get_notion_page_last_edited_time(page_id, data_source_binding.access_token, document)
text_docs = self._load_page_data_from_notion(page_id, data_source_binding.access_token)
elif page_type == 'database':
# add page last_edited_time to data_source_info
self._get_notion_database_last_edited_time(page_id, data_source_binding.access_token, document)
text_docs = self._load_database_data_from_notion(page_id, data_source_binding.access_token)
# update document status to splitting
self._update_document_index_status(
document_id=document.id,
after_indexing_status="splitting",
extra_update_params={
Document.file_id: file_detail.id,
Document.word_count: sum([len(text_doc.text) for text_doc in text_docs]),
Document.parsing_completed_at: datetime.datetime.utcnow()
}
@@ -247,15 +339,52 @@ class IndexingRunner:
file_extractor = DEFAULT_FILE_EXTRACTOR.copy()
file_extractor[".markdown"] = MarkdownParser()
file_extractor[".md"] = MarkdownParser()
file_extractor[".html"] = HTMLParser()
file_extractor[".htm"] = HTMLParser()
file_extractor[".pdf"] = PDFParser({'upload_file': upload_file})
file_extractor[".xlsx"] = XLSXParser()
loader = SimpleDirectoryReader(input_files=[filepath], file_extractor=file_extractor)
text_docs = loader.load_data()
return text_docs
def _load_page_data_from_notion(self, page_id: str, access_token: str) -> List[Document]:
page_ids = [page_id]
reader = NotionPageReader(integration_token=access_token)
text_docs = reader.load_data_as_documents(page_ids=page_ids)
return text_docs
def _load_database_data_from_notion(self, database_id: str, access_token: str) -> List[Document]:
reader = NotionPageReader(integration_token=access_token)
text_docs = reader.load_data_as_documents(database_id=database_id)
return text_docs
def _get_notion_page_last_edited_time(self, page_id: str, access_token: str, document: Document):
reader = NotionPageReader(integration_token=access_token)
last_edited_time = reader.get_page_last_edited_time(page_id)
data_source_info = document.data_source_info_dict
data_source_info['last_edited_time'] = last_edited_time
update_params = {
Document.data_source_info: json.dumps(data_source_info)
}
Document.query.filter_by(id=document.id).update(update_params)
db.session.commit()
def _get_notion_database_last_edited_time(self, page_id: str, access_token: str, document: Document):
reader = NotionPageReader(integration_token=access_token)
last_edited_time = reader.get_database_last_edited_time(page_id)
data_source_info = document.data_source_info_dict
data_source_info['last_edited_time'] = last_edited_time
update_params = {
Document.data_source_info: json.dumps(data_source_info)
}
Document.query.filter_by(id=document.id).update(update_params)
db.session.commit()
def _get_node_parser(self, processing_rule: DatasetProcessRule) -> NodeParser:
"""
Get the NodeParser object according to the processing rule.
@@ -305,7 +434,7 @@ class IndexingRunner:
embedding_model_name=self.embedding_model_name,
document_id=document.id
)
# add document segments
doc_store.add_documents(nodes)
# update document status to indexing

View File

@@ -95,7 +95,8 @@ class AzureProvider(BaseProvider):
if not models:
raise ValidateFailedError("Please add deployments for 'text-davinci-003', "
"'gpt-3.5-turbo', 'text-embedding-ada-002'.")
"'gpt-3.5-turbo', 'text-embedding-ada-002' (required) "
"and 'gpt-4', 'gpt-35-turbo-16k' (optional).")
fixed_model_ids = [
'text-davinci-003',
@@ -110,6 +111,8 @@ class AzureProvider(BaseProvider):
if missing_model_ids:
raise ValidateFailedError("Please add deployments for '{}'.".format(", ".join(missing_model_ids)))
except ValidateFailedError as e:
raise e
except AzureAuthenticationError:
raise ValidateFailedError('Validation failed, please check your API Key.')
except (requests.ConnectionError, requests.RequestException):

View File

@@ -0,0 +1,32 @@
from typing import Any
from langchain.schema import BaseOutputParser, OutputParserException
from core.prompt.prompts import RULE_CONFIG_GENERATE_TEMPLATE
from libs.json_in_md_parser import parse_and_check_json_markdown
class RuleConfigGeneratorOutputParser(BaseOutputParser):
def get_format_instructions(self) -> str:
return RULE_CONFIG_GENERATE_TEMPLATE
def parse(self, text: str) -> Any:
try:
expected_keys = ["prompt", "variables", "opening_statement"]
parsed = parse_and_check_json_markdown(text, expected_keys)
if not isinstance(parsed["prompt"], str):
raise ValueError("Expected 'prompt' to be a string.")
if not isinstance(parsed["variables"], list):
raise ValueError(
f"Expected 'variables' to be a list."
)
if not isinstance(parsed["opening_statement"], str):
raise ValueError(
f"Expected 'opening_statement' to be a str."
)
return parsed
except Exception as e:
raise OutputParserException(
f"Parsing text\n{text}\n of rule config generator raised following error:\n{e}"
)

View File

@@ -61,3 +61,60 @@ QUERY_KEYWORD_EXTRACT_TEMPLATE_TMPL = (
QUERY_KEYWORD_EXTRACT_TEMPLATE = QueryKeywordExtractPrompt(
QUERY_KEYWORD_EXTRACT_TEMPLATE_TMPL
)
RULE_CONFIG_GENERATE_TEMPLATE = """Given MY INTENDED AUDIENCES and HOPING TO SOLVE using a language model, please select \
the model prompt that best suits the input.
You will be provided with the prompt, variables, and an opening statement.
Only the content enclosed in double curly braces, such as {{variable}}, in the prompt can be considered as a variable; \
otherwise, it cannot exist as a variable in the variables.
If you believe revising the original input will result in a better response from the language model, you may \
suggest revisions.
<< FORMATTING >>
Return a markdown code snippet with a JSON object formatted to look like, \
no any other string out of markdown code snippet:
```json
{{{{
"prompt": string \\ generated prompt
"variables": list of string \\ variables
"opening_statement": string \\ an opening statement to guide users on how to ask questions with generated prompt \
and fill in variables, with a welcome sentence, and keep TLDR.
}}}}
```
<< EXAMPLES >>
[EXAMPLE A]
```json
{
"prompt": "Write a letter about love",
"variables": [],
"opening_statement": "Hi! I'm your love letter writer AI."
}
```
[EXAMPLE B]
```json
{
"prompt": "Translate from {{lanA}} to {{lanB}}",
"variables": ["lanA", "lanB"],
"opening_statement": "Welcome to use translate app"
}
```
[EXAMPLE C]
```json
{
"prompt": "Write a story about {{topic}}",
"variables": ["topic"],
"opening_statement": "I'm your story writer"
}
```
<< MY INTENDED AUDIENCES >>
{audiences}
<< HOPING TO SOLVE >>
{hoping_to_solve}
<< OUTPUT >>
"""

View File

@@ -27,7 +27,8 @@ class VectorStore:
self._client = WeaviateVectorStoreClient(
endpoint=app.config['WEAVIATE_ENDPOINT'],
api_key=app.config['WEAVIATE_API_KEY'],
grpc_enabled=app.config['WEAVIATE_GRPC_ENABLED']
grpc_enabled=app.config['WEAVIATE_GRPC_ENABLED'],
batch_size=app.config['WEAVIATE_BATCH_SIZE']
)
elif self._vector_store == 'qdrant':
self._client = QdrantVectorStoreClient(

View File

@@ -18,21 +18,33 @@ from llama_index.readers.weaviate.utils import (
class WeaviateVectorStoreClient(BaseVectorStoreClient):
def __init__(self, endpoint: str, api_key: str, grpc_enabled: bool):
self._client = self.init_from_config(endpoint, api_key, grpc_enabled)
def __init__(self, endpoint: str, api_key: str, grpc_enabled: bool, batch_size: int):
self._client = self.init_from_config(endpoint, api_key, grpc_enabled, batch_size)
def init_from_config(self, endpoint: str, api_key: str, grpc_enabled: bool):
def init_from_config(self, endpoint: str, api_key: str, grpc_enabled: bool, batch_size: int):
auth_config = weaviate.auth.AuthApiKey(api_key=api_key)
weaviate.connect.connection.has_grpc = grpc_enabled
return weaviate.Client(
client = weaviate.Client(
url=endpoint,
auth_client_secret=auth_config,
timeout_config=(5, 60),
startup_period=None
)
client.batch.configure(
# `batch_size` takes an `int` value to enable auto-batching
# (`None` is used for manual batching)
batch_size=batch_size,
# dynamically update the `batch_size` based on import speed
dynamic=True,
# `timeout_retries` takes an `int` value to retry on time outs
timeout_retries=3,
)
return client
def get_index(self, service_context: ServiceContext, config: dict) -> GPTVectorStoreIndex:
index_struct = WeaviateIndexDict()

View File

@@ -0,0 +1,44 @@
import json
from typing import List
from langchain.schema import OutputParserException
def parse_json_markdown(json_string: str) -> dict:
# Remove the triple backticks if present
json_string = json_string.strip()
start_index = json_string.find("```json")
end_index = json_string.find("```", start_index + len("```json"))
if start_index != -1 and end_index != -1:
extracted_content = json_string[start_index + len("```json"):end_index].strip()
# Parse the JSON string into a Python dictionary
parsed = json.loads(extracted_content)
elif start_index != -1 and end_index == -1 and json_string.endswith("``"):
end_index = json_string.find("``", start_index + len("```json"))
extracted_content = json_string[start_index + len("```json"):end_index].strip()
# Parse the JSON string into a Python dictionary
parsed = json.loads(extracted_content)
elif json_string.startswith("{"):
# Parse the JSON string into a Python dictionary
parsed = json.loads(json_string)
else:
raise Exception("Could not find JSON block in the output.")
return parsed
def parse_and_check_json_markdown(text: str, expected_keys: List[str]) -> dict:
try:
json_obj = parse_json_markdown(text)
except json.JSONDecodeError as e:
raise OutputParserException(f"Got invalid JSON object. Error: {e}")
for key in expected_keys:
if key not in json_obj:
raise OutputParserException(
f"Got invalid return object. Expected key `{key}` "
f"to be present, but got {json_obj}"
)
return json_obj

View File

@@ -1,7 +1,12 @@
import json
import urllib.parse
from dataclasses import dataclass
import requests
from flask_login import current_user
from extensions.ext_database import db
from models.source import DataSourceBinding
@dataclass
@@ -134,3 +139,5 @@ class GoogleOAuth(OAuth):
name=None,
email=raw_info['email']
)

View File

@@ -0,0 +1,312 @@
import json
import urllib.parse
import requests
from flask_login import current_user
from extensions.ext_database import db
from models.source import DataSourceBinding
class OAuthDataSource:
def __init__(self, client_id: str, client_secret: str, redirect_uri: str):
self.client_id = client_id
self.client_secret = client_secret
self.redirect_uri = redirect_uri
def get_authorization_url(self):
raise NotImplementedError()
def get_access_token(self, code: str):
raise NotImplementedError()
class NotionOAuth(OAuthDataSource):
_AUTH_URL = 'https://api.notion.com/v1/oauth/authorize'
_TOKEN_URL = 'https://api.notion.com/v1/oauth/token'
_NOTION_PAGE_SEARCH = "https://api.notion.com/v1/search"
_NOTION_BLOCK_SEARCH = "https://api.notion.com/v1/blocks"
_NOTION_BOT_USER = "https://api.notion.com/v1/users/me"
def get_authorization_url(self):
params = {
'client_id': self.client_id,
'response_type': 'code',
'redirect_uri': self.redirect_uri,
'owner': 'user'
}
return f"{self._AUTH_URL}?{urllib.parse.urlencode(params)}"
def get_access_token(self, code: str):
data = {
'code': code,
'grant_type': 'authorization_code',
'redirect_uri': self.redirect_uri
}
headers = {'Accept': 'application/json'}
auth = (self.client_id, self.client_secret)
response = requests.post(self._TOKEN_URL, data=data, auth=auth, headers=headers)
response_json = response.json()
access_token = response_json.get('access_token')
if not access_token:
raise ValueError(f"Error in Notion OAuth: {response_json}")
workspace_name = response_json.get('workspace_name')
workspace_icon = response_json.get('workspace_icon')
workspace_id = response_json.get('workspace_id')
# get all authorized pages
pages = self.get_authorized_pages(access_token)
source_info = {
'workspace_name': workspace_name,
'workspace_icon': workspace_icon,
'workspace_id': workspace_id,
'pages': pages,
'total': len(pages)
}
# save data source binding
data_source_binding = DataSourceBinding.query.filter(
db.and_(
DataSourceBinding.tenant_id == current_user.current_tenant_id,
DataSourceBinding.provider == 'notion',
DataSourceBinding.access_token == access_token
)
).first()
if data_source_binding:
data_source_binding.source_info = source_info
data_source_binding.disabled = False
db.session.commit()
else:
new_data_source_binding = DataSourceBinding(
tenant_id=current_user.current_tenant_id,
access_token=access_token,
source_info=source_info,
provider='notion'
)
db.session.add(new_data_source_binding)
db.session.commit()
def save_internal_access_token(self, access_token: str):
workspace_name = self.notion_workspace_name(access_token)
workspace_icon = None
workspace_id = current_user.current_tenant_id
# get all authorized pages
pages = self.get_authorized_pages(access_token)
source_info = {
'workspace_name': workspace_name,
'workspace_icon': workspace_icon,
'workspace_id': workspace_id,
'pages': pages,
'total': len(pages)
}
# save data source binding
data_source_binding = DataSourceBinding.query.filter(
db.and_(
DataSourceBinding.tenant_id == current_user.current_tenant_id,
DataSourceBinding.provider == 'notion',
DataSourceBinding.access_token == access_token
)
).first()
if data_source_binding:
data_source_binding.source_info = source_info
data_source_binding.disabled = False
db.session.commit()
else:
new_data_source_binding = DataSourceBinding(
tenant_id=current_user.current_tenant_id,
access_token=access_token,
source_info=source_info,
provider='notion'
)
db.session.add(new_data_source_binding)
db.session.commit()
def sync_data_source(self, binding_id: str):
# save data source binding
data_source_binding = DataSourceBinding.query.filter(
db.and_(
DataSourceBinding.tenant_id == current_user.current_tenant_id,
DataSourceBinding.provider == 'notion',
DataSourceBinding.id == binding_id,
DataSourceBinding.disabled == False
)
).first()
if data_source_binding:
# get all authorized pages
pages = self.get_authorized_pages(data_source_binding.access_token)
source_info = data_source_binding.source_info
new_source_info = {
'workspace_name': source_info['workspace_name'],
'workspace_icon': source_info['workspace_icon'],
'workspace_id': source_info['workspace_id'],
'pages': pages,
'total': len(pages)
}
data_source_binding.source_info = new_source_info
data_source_binding.disabled = False
db.session.commit()
else:
raise ValueError('Data source binding not found')
def get_authorized_pages(self, access_token: str):
pages = []
page_results = self.notion_page_search(access_token)
database_results = self.notion_database_search(access_token)
# get page detail
for page_result in page_results:
page_id = page_result['id']
if 'Name' in page_result['properties']:
if len(page_result['properties']['Name']['title']) > 0:
page_name = page_result['properties']['Name']['title'][0]['plain_text']
else:
page_name = 'Untitled'
elif 'title' in page_result['properties']:
if len(page_result['properties']['title']['title']) > 0:
page_name = page_result['properties']['title']['title'][0]['plain_text']
else:
page_name = 'Untitled'
elif 'Title' in page_result['properties']:
if len(page_result['properties']['Title']['title']) > 0:
page_name = page_result['properties']['Title']['title'][0]['plain_text']
else:
page_name = 'Untitled'
else:
page_name = 'Untitled'
page_icon = page_result['icon']
if page_icon:
icon_type = page_icon['type']
if icon_type == 'external' or icon_type == 'file':
url = page_icon[icon_type]['url']
icon = {
'type': 'url',
'url': url if url.startswith('http') else f'https://www.notion.so{url}'
}
else:
icon = {
'type': 'emoji',
'emoji': page_icon[icon_type]
}
else:
icon = None
parent = page_result['parent']
parent_type = parent['type']
if parent_type == 'block_id':
parent_id = self.notion_block_parent_page_id(access_token, parent[parent_type])
elif parent_type == 'workspace':
parent_id = 'root'
else:
parent_id = parent[parent_type]
page = {
'page_id': page_id,
'page_name': page_name,
'page_icon': icon,
'parent_id': parent_id,
'type': 'page'
}
pages.append(page)
# get database detail
for database_result in database_results:
page_id = database_result['id']
if len(database_result['title']) > 0:
page_name = database_result['title'][0]['plain_text']
else:
page_name = 'Untitled'
page_icon = database_result['icon']
if page_icon:
icon_type = page_icon['type']
if icon_type == 'external' or icon_type == 'file':
url = page_icon[icon_type]['url']
icon = {
'type': 'url',
'url': url if url.startswith('http') else f'https://www.notion.so{url}'
}
else:
icon = {
'type': icon_type,
icon_type: page_icon[icon_type]
}
else:
icon = None
parent = database_result['parent']
parent_type = parent['type']
if parent_type == 'block_id':
parent_id = self.notion_block_parent_page_id(access_token, parent[parent_type])
elif parent_type == 'workspace':
parent_id = 'root'
else:
parent_id = parent[parent_type]
page = {
'page_id': page_id,
'page_name': page_name,
'page_icon': icon,
'parent_id': parent_id,
'type': 'database'
}
pages.append(page)
return pages
def notion_page_search(self, access_token: str):
data = {
'filter': {
"value": "page",
"property": "object"
}
}
headers = {
'Content-Type': 'application/json',
'Authorization': f"Bearer {access_token}",
'Notion-Version': '2022-06-28',
}
response = requests.post(url=self._NOTION_PAGE_SEARCH, json=data, headers=headers)
response_json = response.json()
if 'results' in response_json:
results = response_json['results']
else:
results = []
return results
def notion_block_parent_page_id(self, access_token: str, block_id: str):
headers = {
'Authorization': f"Bearer {access_token}",
'Notion-Version': '2022-06-28',
}
response = requests.get(url=f'{self._NOTION_BLOCK_SEARCH}/{block_id}', headers=headers)
response_json = response.json()
parent = response_json['parent']
parent_type = parent['type']
if parent_type == 'block_id':
return self.notion_block_parent_page_id(access_token, parent[parent_type])
return parent[parent_type]
def notion_workspace_name(self, access_token: str):
headers = {
'Authorization': f"Bearer {access_token}",
'Notion-Version': '2022-06-28',
}
response = requests.get(url=self._NOTION_BOT_USER, headers=headers)
response_json = response.json()
if 'object' in response_json and response_json['object'] == 'user':
user_type = response_json['type']
user_info = response_json[user_type]
if 'workspace_name' in user_info:
return user_info['workspace_name']
return 'workspace'
def notion_database_search(self, access_token: str):
data = {
'filter': {
"value": "database",
"property": "object"
}
}
headers = {
'Content-Type': 'application/json',
'Authorization': f"Bearer {access_token}",
'Notion-Version': '2022-06-28',
}
response = requests.post(url=self._NOTION_PAGE_SEARCH, json=data, headers=headers)
response_json = response.json()
if 'results' in response_json:
results = response_json['results']
else:
results = []
return results

View File

@@ -0,0 +1,32 @@
"""add last active at
Revision ID: 614f77cecc48
Revises: a45f4dfde53b
Create Date: 2023-06-15 13:33:00.357467
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '614f77cecc48'
down_revision = 'a45f4dfde53b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('accounts', schema=None) as batch_op:
batch_op.add_column(sa.Column('last_active_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('accounts', schema=None) as batch_op:
batch_op.drop_column('last_active_at')
# ### end Alembic commands ###

View File

@@ -0,0 +1,46 @@
"""e08af0a69ccefbb59fa80c778efee300bb780980
Revision ID: e32f6ccb87c6
Revises: a45f4dfde53b
Create Date: 2023-06-06 19:58:33.103819
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'e32f6ccb87c6'
down_revision = '614f77cecc48'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('data_source_bindings',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
sa.Column('access_token', sa.String(length=255), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('source_info', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('disabled', sa.Boolean(), server_default=sa.text('false'), nullable=True),
sa.PrimaryKeyConstraint('id', name='source_binding_pkey')
)
with op.batch_alter_table('data_source_bindings', schema=None) as batch_op:
batch_op.create_index('source_binding_tenant_id_idx', ['tenant_id'], unique=False)
batch_op.create_index('source_info_idx', ['source_info'], unique=False, postgresql_using='gin')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('data_source_bindings', schema=None) as batch_op:
batch_op.drop_index('source_info_idx', postgresql_using='gin')
batch_op.drop_index('source_binding_tenant_id_idx')
op.drop_table('data_source_bindings')
# ### end Alembic commands ###

View File

@@ -32,6 +32,7 @@ class Account(UserMixin, db.Model):
timezone = db.Column(db.String(255))
last_login_at = db.Column(db.DateTime)
last_login_ip = db.Column(db.String(255))
last_active_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)'))
status = db.Column(db.String(16), nullable=False, server_default=db.text("'active'::character varying"))
initialized_at = db.Column(db.DateTime)
created_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)'))

View File

@@ -190,7 +190,7 @@ class Document(db.Model):
doc_type = db.Column(db.String(40), nullable=True)
doc_metadata = db.Column(db.JSON, nullable=True)
DATA_SOURCES = ['upload_file']
DATA_SOURCES = ['upload_file', 'notion_import']
@property
def display_status(self):
@@ -242,6 +242,8 @@ class Document(db.Model):
'created_at': file_detail.created_at.timestamp()
}
}
elif self.data_source_type == 'notion_import':
return json.loads(self.data_source_info)
return {}
@property

View File

@@ -304,6 +304,10 @@ class Conversation(db.Model):
def app(self):
return db.session.query(App).filter(App.id == self.app_id).first()
@property
def in_debug_mode(self):
return self.override_model_configs is not None
class Message(db.Model):
__tablename__ = 'messages'
@@ -370,6 +374,10 @@ class Message(db.Model):
return None
@property
def in_debug_mode(self):
return self.override_model_configs is not None
class MessageFeedback(db.Model):
__tablename__ = 'message_feedbacks'

21
api/models/source.py Normal file
View File

@@ -0,0 +1,21 @@
from sqlalchemy.dialects.postgresql import UUID
from extensions.ext_database import db
from sqlalchemy.dialects.postgresql import JSONB
class DataSourceBinding(db.Model):
__tablename__ = 'data_source_bindings'
__table_args__ = (
db.PrimaryKeyConstraint('id', name='source_binding_pkey'),
db.Index('source_binding_tenant_id_idx', 'tenant_id'),
db.Index('source_info_idx', "source_info", postgresql_using='gin')
)
id = db.Column(UUID, server_default=db.text('uuid_generate_v4()'))
tenant_id = db.Column(UUID, nullable=False)
access_token = db.Column(db.String(255), nullable=False)
provider = db.Column(db.String(255), nullable=False)
source_info = db.Column(JSONB, nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)'))
updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)'))
disabled = db.Column(db.Boolean, nullable=True, server_default=db.text('false'))

View File

@@ -29,4 +29,6 @@ sentry-sdk[flask]~=1.21.1
jieba==0.42.1
celery==5.2.7
redis~=4.5.4
pypdf==3.8.1
pypdf==3.8.1
openpyxl==3.1.2
chardet~=5.1.0

View File

@@ -267,9 +267,10 @@ class TenantService:
}
if action not in ['add', 'remove', 'update']:
raise InvalidActionError("Invalid action.")
if operator.id == member.id:
raise CannotOperateSelfError("Cannot operate self.")
if member:
if operator.id == member.id:
raise CannotOperateSelfError("Cannot operate self.")
ta_operator = TenantAccountJoin.query.filter_by(
tenant_id=tenant.id,
@@ -365,6 +366,7 @@ class RegisterService:
account = Account.query.filter_by(email=email).first()
if not account:
TenantService.check_member_permission(tenant, inviter, None, 'add')
name = email.split('@')[0]
account = AccountService.create_account(email, name)
account.status = AccountStatus.PENDING.value

View File

@@ -33,6 +33,10 @@ class CompletionService:
# is streaming mode
inputs = args['inputs']
query = args['query']
if not query:
raise ValueError('query is required')
conversation_id = args['conversation_id'] if 'conversation_id' in args else None
conversation = None

View File

@@ -3,7 +3,7 @@ import logging
import datetime
import time
import random
from typing import Optional
from typing import Optional, List
from extensions.ext_redis import redis_client
from flask_login import current_user
@@ -12,14 +12,17 @@ from events.dataset_event import dataset_was_deleted
from events.document_event import document_was_deleted
from extensions.ext_database import db
from models.account import Account
from models.dataset import Dataset, Document, DatasetQuery, DatasetProcessRule, AppDatasetJoin
from models.dataset import Dataset, Document, DatasetQuery, DatasetProcessRule, AppDatasetJoin, DocumentSegment
from models.model import UploadFile
from models.source import DataSourceBinding
from services.errors.account import NoPermissionError
from services.errors.dataset import DatasetNameDuplicateError
from services.errors.document import DocumentIndexingError
from services.errors.file import FileNotExistsError
from tasks.clean_notion_document_task import clean_notion_document_task
from tasks.deal_dataset_vector_index_task import deal_dataset_vector_index_task
from tasks.document_indexing_task import document_indexing_task
from tasks.document_indexing_update_task import document_indexing_update_task
class DatasetService:
@@ -75,7 +78,7 @@ class DatasetService:
raise DatasetNameDuplicateError(
f'Dataset with name {name} already exists.')
dataset = Dataset(name=name, indexing_technique=indexing_technique, data_source_type='upload_file')
dataset = Dataset(name=name, indexing_technique=indexing_technique)
# dataset = Dataset(name=name, provider=provider, config=config)
dataset.created_by = account.id
dataset.updated_by = account.id
@@ -277,6 +280,32 @@ class DocumentService:
return document
@staticmethod
def get_document_by_id(document_id: str) -> Optional[Document]:
document = db.session.query(Document).filter(
Document.id == document_id
).first()
return document
@staticmethod
def get_document_by_dataset_id(dataset_id: str) -> List[Document]:
documents = db.session.query(Document).filter(
Document.dataset_id == dataset_id,
Document.enabled == True
).all()
return documents
@staticmethod
def get_batch_documents(dataset_id: str, batch: str) -> List[Document]:
documents = db.session.query(Document).filter(
Document.batch == batch,
Document.dataset_id == dataset_id,
Document.tenant_id == current_user.current_tenant_id
).all()
return documents
@staticmethod
def get_document_file_detail(file_id: str):
file_detail = db.session.query(UploadFile). \
filter(UploadFile.id == file_id). \
@@ -335,9 +364,9 @@ class DocumentService:
@staticmethod
def get_documents_position(dataset_id):
documents = Document.query.filter_by(dataset_id=dataset_id).all()
if documents:
return len(documents) + 1
document = Document.query.filter_by(dataset_id=dataset_id).order_by(Document.position.desc()).first()
if document:
return document.position + 1
else:
return 1
@@ -345,6 +374,11 @@ class DocumentService:
def save_document_with_dataset_id(dataset: Dataset, document_data: dict,
account: Account, dataset_process_rule: Optional[DatasetProcessRule] = None,
created_from: str = 'web'):
# if dataset is empty, update dataset data_source_type
if not dataset.data_source_type:
dataset.data_source_type = document_data["data_source"]["type"]
db.session.commit()
if not dataset.indexing_technique:
if 'indexing_technique' not in document_data \
or document_data['indexing_technique'] not in Dataset.INDEXING_TECHNIQUE_LIST:
@@ -354,9 +388,150 @@ class DocumentService:
if dataset.indexing_technique == 'high_quality':
IndexBuilder.get_default_service_context(dataset.tenant_id)
documents = []
batch = time.strftime('%Y%m%d%H%M%S') + str(random.randint(100000, 999999))
if 'original_document_id' in document_data and document_data["original_document_id"]:
document = DocumentService.update_document_with_dataset_id(dataset, document_data, account)
documents.append(document)
else:
# save process rule
if not dataset_process_rule:
process_rule = document_data["process_rule"]
if process_rule["mode"] == "custom":
dataset_process_rule = DatasetProcessRule(
dataset_id=dataset.id,
mode=process_rule["mode"],
rules=json.dumps(process_rule["rules"]),
created_by=account.id
)
elif process_rule["mode"] == "automatic":
dataset_process_rule = DatasetProcessRule(
dataset_id=dataset.id,
mode=process_rule["mode"],
rules=json.dumps(DatasetProcessRule.AUTOMATIC_RULES),
created_by=account.id
)
db.session.add(dataset_process_rule)
db.session.commit()
position = DocumentService.get_documents_position(dataset.id)
document_ids = []
if document_data["data_source"]["type"] == "upload_file":
upload_file_list = document_data["data_source"]["info_list"]['file_info_list']['file_ids']
for file_id in upload_file_list:
file = db.session.query(UploadFile).filter(
UploadFile.tenant_id == dataset.tenant_id,
UploadFile.id == file_id
).first()
# raise error if file not found
if not file:
raise FileNotExistsError()
file_name = file.name
data_source_info = {
"upload_file_id": file_id,
}
document = DocumentService.save_document(dataset, dataset_process_rule.id,
document_data["data_source"]["type"],
data_source_info, created_from, position,
account, file_name, batch)
db.session.add(document)
db.session.flush()
document_ids.append(document.id)
documents.append(document)
position += 1
elif document_data["data_source"]["type"] == "notion_import":
notion_info_list = document_data["data_source"]['info_list']['notion_info_list']
exist_page_ids = []
exist_document = dict()
documents = Document.query.filter_by(
dataset_id=dataset.id,
tenant_id=current_user.current_tenant_id,
data_source_type='notion_import',
enabled=True
).all()
if documents:
for document in documents:
data_source_info = json.loads(document.data_source_info)
exist_page_ids.append(data_source_info['notion_page_id'])
exist_document[data_source_info['notion_page_id']] = document.id
for notion_info in notion_info_list:
workspace_id = notion_info['workspace_id']
data_source_binding = DataSourceBinding.query.filter(
db.and_(
DataSourceBinding.tenant_id == current_user.current_tenant_id,
DataSourceBinding.provider == 'notion',
DataSourceBinding.disabled == False,
DataSourceBinding.source_info['workspace_id'] == f'"{workspace_id}"'
)
).first()
if not data_source_binding:
raise ValueError('Data source binding not found.')
for page in notion_info['pages']:
if page['page_id'] not in exist_page_ids:
data_source_info = {
"notion_workspace_id": workspace_id,
"notion_page_id": page['page_id'],
"notion_page_icon": page['page_icon'],
"type": page['type']
}
document = DocumentService.save_document(dataset, dataset_process_rule.id,
document_data["data_source"]["type"],
data_source_info, created_from, position,
account, page['page_name'], batch)
# if page['type'] == 'database':
# document.splitting_completed_at = datetime.datetime.utcnow()
# document.cleaning_completed_at = datetime.datetime.utcnow()
# document.parsing_completed_at = datetime.datetime.utcnow()
# document.completed_at = datetime.datetime.utcnow()
# document.indexing_status = 'completed'
# document.word_count = 0
# document.tokens = 0
# document.indexing_latency = 0
db.session.add(document)
db.session.flush()
# if page['type'] != 'database':
document_ids.append(document.id)
documents.append(document)
position += 1
else:
exist_document.pop(page['page_id'])
# delete not selected documents
if len(exist_document) > 0:
clean_notion_document_task.delay(list(exist_document.values()), dataset.id)
db.session.commit()
# trigger async task
document_indexing_task.delay(dataset.id, document_ids)
return documents, batch
@staticmethod
def save_document(dataset: Dataset, process_rule_id: str, data_source_type: str, data_source_info: dict,
created_from: str, position: int, account: Account, name: str, batch: str):
document = Document(
tenant_id=dataset.tenant_id,
dataset_id=dataset.id,
position=position,
data_source_type=data_source_type,
data_source_info=json.dumps(data_source_info),
dataset_process_rule_id=process_rule_id,
batch=batch,
name=name,
created_from=created_from,
created_by=account.id,
)
return document
@staticmethod
def update_document_with_dataset_id(dataset: Dataset, document_data: dict,
account: Account, dataset_process_rule: Optional[DatasetProcessRule] = None,
created_from: str = 'web'):
document = DocumentService.get_document(dataset.id, document_data["original_document_id"])
if document.display_status != 'available':
raise ValueError("Document is not available")
# save process rule
if not dataset_process_rule:
if 'process_rule' in document_data and document_data['process_rule']:
process_rule = document_data["process_rule"]
if process_rule["mode"] == "custom":
dataset_process_rule = DatasetProcessRule(
@@ -374,46 +549,70 @@ class DocumentService:
)
db.session.add(dataset_process_rule)
db.session.commit()
document.dataset_process_rule_id = dataset_process_rule.id
# update document data source
if 'data_source' in document_data and document_data['data_source']:
file_name = ''
data_source_info = {}
if document_data["data_source"]["type"] == "upload_file":
upload_file_list = document_data["data_source"]["info_list"]['file_info_list']['file_ids']
for file_id in upload_file_list:
file = db.session.query(UploadFile).filter(
UploadFile.tenant_id == dataset.tenant_id,
UploadFile.id == file_id
).first()
file_name = ''
data_source_info = {}
if document_data["data_source"]["type"] == "upload_file":
file_id = document_data["data_source"]["info"]
file = db.session.query(UploadFile).filter(
UploadFile.tenant_id == dataset.tenant_id,
UploadFile.id == file_id
).first()
# raise error if file not found
if not file:
raise FileNotExistsError()
file_name = file.name
data_source_info = {
"upload_file_id": file_id,
}
# save document
position = DocumentService.get_documents_position(dataset.id)
document = Document(
tenant_id=dataset.tenant_id,
dataset_id=dataset.id,
position=position,
data_source_type=document_data["data_source"]["type"],
data_source_info=json.dumps(data_source_info),
dataset_process_rule_id=dataset_process_rule.id,
batch=time.strftime('%Y%m%d%H%M%S') + str(random.randint(100000, 999999)),
name=file_name,
created_from=created_from,
created_by=account.id,
# created_api_request_id = db.Column(UUID, nullable=True)
)
# raise error if file not found
if not file:
raise FileNotExistsError()
file_name = file.name
data_source_info = {
"upload_file_id": file_id,
}
elif document_data["data_source"]["type"] == "notion_import":
notion_info_list = document_data["data_source"]['info_list']['notion_info_list']
for notion_info in notion_info_list:
workspace_id = notion_info['workspace_id']
data_source_binding = DataSourceBinding.query.filter(
db.and_(
DataSourceBinding.tenant_id == current_user.current_tenant_id,
DataSourceBinding.provider == 'notion',
DataSourceBinding.disabled == False,
DataSourceBinding.source_info['workspace_id'] == f'"{workspace_id}"'
)
).first()
if not data_source_binding:
raise ValueError('Data source binding not found.')
for page in notion_info['pages']:
data_source_info = {
"notion_workspace_id": workspace_id,
"notion_page_id": page['page_id'],
"notion_page_icon": page['page_icon'],
"type": page['type']
}
document.data_source_type = document_data["data_source"]["type"]
document.data_source_info = json.dumps(data_source_info)
document.name = file_name
# update document to be waiting
document.indexing_status = 'waiting'
document.completed_at = None
document.processing_started_at = None
document.parsing_completed_at = None
document.cleaning_completed_at = None
document.splitting_completed_at = None
document.updated_at = datetime.datetime.utcnow()
document.created_from = created_from
db.session.add(document)
db.session.commit()
# update document segment
update_params = {
DocumentSegment.status: 're_segment'
}
DocumentSegment.query.filter_by(document_id=document.id).update(update_params)
db.session.commit()
# trigger async task
document_indexing_task.delay(document.dataset_id, document.id)
document_indexing_update_task.delay(document.dataset_id, document.id)
return document
@@ -431,18 +630,33 @@ class DocumentService:
db.session.add(dataset)
db.session.flush()
document = DocumentService.save_document_with_dataset_id(dataset, document_data, account)
documents, batch = DocumentService.save_document_with_dataset_id(dataset, document_data, account)
cut_length = 18
cut_name = document.name[:cut_length]
dataset.name = cut_name + '...' if len(document.name) > cut_length else cut_name
dataset.description = 'useful for when you want to answer queries about the ' + document.name
cut_name = documents[0].name[:cut_length]
dataset.name = cut_name + '...'
dataset.description = 'useful for when you want to answer queries about the ' + documents[0].name
db.session.commit()
return dataset, document
return dataset, documents, batch
@classmethod
def document_create_args_validate(cls, args: dict):
if 'original_document_id' not in args or not args['original_document_id']:
DocumentService.data_source_args_validate(args)
DocumentService.process_rule_args_validate(args)
else:
if ('data_source' not in args and not args['data_source'])\
and ('process_rule' not in args and not args['process_rule']):
raise ValueError("Data source or Process rule is required")
else:
if 'data_source' in args and args['data_source']:
DocumentService.data_source_args_validate(args)
if 'process_rule' in args and args['process_rule']:
DocumentService.process_rule_args_validate(args)
@classmethod
def data_source_args_validate(cls, args: dict):
if 'data_source' not in args or not args['data_source']:
raise ValueError("Data source is required")
@@ -455,9 +669,92 @@ class DocumentService:
if args['data_source']['type'] not in Document.DATA_SOURCES:
raise ValueError("Data source type is invalid")
if 'info_list' not in args['data_source'] or not args['data_source']['info_list']:
raise ValueError("Data source info is required")
if args['data_source']['type'] == 'upload_file':
if 'info' not in args['data_source'] or not args['data_source']['info']:
raise ValueError("Data source info is required")
if 'file_info_list' not in args['data_source']['info_list'] or not args['data_source']['info_list']['file_info_list']:
raise ValueError("File source info is required")
if args['data_source']['type'] == 'notion_import':
if 'notion_info_list' not in args['data_source']['info_list'] or not args['data_source']['info_list']['notion_info_list']:
raise ValueError("Notion source info is required")
@classmethod
def process_rule_args_validate(cls, args: dict):
if 'process_rule' not in args or not args['process_rule']:
raise ValueError("Process rule is required")
if not isinstance(args['process_rule'], dict):
raise ValueError("Process rule is invalid")
if 'mode' not in args['process_rule'] or not args['process_rule']['mode']:
raise ValueError("Process rule mode is required")
if args['process_rule']['mode'] not in DatasetProcessRule.MODES:
raise ValueError("Process rule mode is invalid")
if args['process_rule']['mode'] == 'automatic':
args['process_rule']['rules'] = {}
else:
if 'rules' not in args['process_rule'] or not args['process_rule']['rules']:
raise ValueError("Process rule rules is required")
if not isinstance(args['process_rule']['rules'], dict):
raise ValueError("Process rule rules is invalid")
if 'pre_processing_rules' not in args['process_rule']['rules'] \
or args['process_rule']['rules']['pre_processing_rules'] is None:
raise ValueError("Process rule pre_processing_rules is required")
if not isinstance(args['process_rule']['rules']['pre_processing_rules'], list):
raise ValueError("Process rule pre_processing_rules is invalid")
unique_pre_processing_rule_dicts = {}
for pre_processing_rule in args['process_rule']['rules']['pre_processing_rules']:
if 'id' not in pre_processing_rule or not pre_processing_rule['id']:
raise ValueError("Process rule pre_processing_rules id is required")
if pre_processing_rule['id'] not in DatasetProcessRule.PRE_PROCESSING_RULES:
raise ValueError("Process rule pre_processing_rules id is invalid")
if 'enabled' not in pre_processing_rule or pre_processing_rule['enabled'] is None:
raise ValueError("Process rule pre_processing_rules enabled is required")
if not isinstance(pre_processing_rule['enabled'], bool):
raise ValueError("Process rule pre_processing_rules enabled is invalid")
unique_pre_processing_rule_dicts[pre_processing_rule['id']] = pre_processing_rule
args['process_rule']['rules']['pre_processing_rules'] = list(unique_pre_processing_rule_dicts.values())
if 'segmentation' not in args['process_rule']['rules'] \
or args['process_rule']['rules']['segmentation'] is None:
raise ValueError("Process rule segmentation is required")
if not isinstance(args['process_rule']['rules']['segmentation'], dict):
raise ValueError("Process rule segmentation is invalid")
if 'separator' not in args['process_rule']['rules']['segmentation'] \
or not args['process_rule']['rules']['segmentation']['separator']:
raise ValueError("Process rule segmentation separator is required")
if not isinstance(args['process_rule']['rules']['segmentation']['separator'], str):
raise ValueError("Process rule segmentation separator is invalid")
if 'max_tokens' not in args['process_rule']['rules']['segmentation'] \
or not args['process_rule']['rules']['segmentation']['max_tokens']:
raise ValueError("Process rule segmentation max_tokens is required")
if not isinstance(args['process_rule']['rules']['segmentation']['max_tokens'], int):
raise ValueError("Process rule segmentation max_tokens is invalid")
@classmethod
def estimate_args_validate(cls, args: dict):
if 'info_list' not in args or not args['info_list']:
raise ValueError("Data source info is required")
if not isinstance(args['info_list'], dict):
raise ValueError("Data info is invalid")
if 'process_rule' not in args or not args['process_rule']:
raise ValueError("Process rule is required")

View File

@@ -35,8 +35,7 @@ def clean_document_task(document_id: str, dataset_id: str):
index_node_ids = [segment.index_node_id for segment in segments]
# delete from vector index
if dataset.indexing_technique == "high_quality":
vector_index.del_nodes(index_node_ids)
vector_index.del_nodes(index_node_ids)
# delete from keyword index
if index_node_ids:
@@ -44,7 +43,7 @@ def clean_document_task(document_id: str, dataset_id: str):
for segment in segments:
db.session.delete(segment)
db.session.commit()
end_at = time.perf_counter()
logging.info(
click.style('Cleaned document when document deleted: {} latency: {}'.format(document_id, end_at - start_at), fg='green'))

View File

@@ -0,0 +1,58 @@
import logging
import time
from typing import List
import click
from celery import shared_task
from core.index.keyword_table_index import KeywordTableIndex
from core.index.vector_index import VectorIndex
from extensions.ext_database import db
from models.dataset import DocumentSegment, Dataset, Document
@shared_task
def clean_notion_document_task(document_ids: List[str], dataset_id: str):
"""
Clean document when document deleted.
:param document_ids: document ids
:param dataset_id: dataset id
Usage: clean_notion_document_task.delay(document_ids, dataset_id)
"""
logging.info(click.style('Start clean document when import form notion document deleted: {}'.format(dataset_id), fg='green'))
start_at = time.perf_counter()
try:
dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
if not dataset:
raise Exception('Document has no dataset')
vector_index = VectorIndex(dataset=dataset)
keyword_table_index = KeywordTableIndex(dataset=dataset)
for document_id in document_ids:
document = db.session.query(Document).filter(
Document.id == document_id
).first()
db.session.delete(document)
segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document_id).all()
index_node_ids = [segment.index_node_id for segment in segments]
# delete from vector index
vector_index.del_nodes(index_node_ids)
# delete from keyword index
if index_node_ids:
keyword_table_index.del_nodes(index_node_ids)
for segment in segments:
db.session.delete(segment)
db.session.commit()
end_at = time.perf_counter()
logging.info(
click.style('Clean document when import form notion document deleted end :: {} latency: {}'.format(
dataset_id, end_at - start_at),
fg='green'))
except Exception:
logging.exception("Cleaned document when import form notion document deleted failed")

View File

@@ -0,0 +1,109 @@
import datetime
import logging
import time
import click
from celery import shared_task
from werkzeug.exceptions import NotFound
from core.data_source.notion import NotionPageReader
from core.index.keyword_table_index import KeywordTableIndex
from core.index.vector_index import VectorIndex
from core.indexing_runner import IndexingRunner, DocumentIsPausedException
from core.llm.error import ProviderTokenNotInitError
from extensions.ext_database import db
from models.dataset import Document, Dataset, DocumentSegment
from models.source import DataSourceBinding
@shared_task
def document_indexing_sync_task(dataset_id: str, document_id: str):
"""
Async update document
:param dataset_id:
:param document_id:
Usage: document_indexing_sync_task.delay(dataset_id, document_id)
"""
logging.info(click.style('Start sync document: {}'.format(document_id), fg='green'))
start_at = time.perf_counter()
document = db.session.query(Document).filter(
Document.id == document_id,
Document.dataset_id == dataset_id
).first()
if not document:
raise NotFound('Document not found')
data_source_info = document.data_source_info_dict
if document.data_source_type == 'notion_import':
if not data_source_info or 'notion_page_id' not in data_source_info \
or 'notion_workspace_id' not in data_source_info:
raise ValueError("no notion page found")
workspace_id = data_source_info['notion_workspace_id']
page_id = data_source_info['notion_page_id']
page_edited_time = data_source_info['last_edited_time']
data_source_binding = DataSourceBinding.query.filter(
db.and_(
DataSourceBinding.tenant_id == document.tenant_id,
DataSourceBinding.provider == 'notion',
DataSourceBinding.disabled == False,
DataSourceBinding.source_info['workspace_id'] == f'"{workspace_id}"'
)
).first()
if not data_source_binding:
raise ValueError('Data source binding not found.')
reader = NotionPageReader(integration_token=data_source_binding.access_token)
last_edited_time = reader.get_page_last_edited_time(page_id)
# check the page is updated
if last_edited_time != page_edited_time:
document.indexing_status = 'parsing'
document.processing_started_at = datetime.datetime.utcnow()
db.session.commit()
# delete all document segment and index
try:
dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
if not dataset:
raise Exception('Dataset not found')
vector_index = VectorIndex(dataset=dataset)
keyword_table_index = KeywordTableIndex(dataset=dataset)
segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document_id).all()
index_node_ids = [segment.index_node_id for segment in segments]
# delete from vector index
vector_index.del_nodes(index_node_ids)
# delete from keyword index
if index_node_ids:
keyword_table_index.del_nodes(index_node_ids)
for segment in segments:
db.session.delete(segment)
end_at = time.perf_counter()
logging.info(
click.style('Cleaned document when document update data source or process rule: {} latency: {}'.format(document_id, end_at - start_at), fg='green'))
except Exception:
logging.exception("Cleaned document when document update data source or process rule failed")
try:
indexing_runner = IndexingRunner()
indexing_runner.run([document])
end_at = time.perf_counter()
logging.info(click.style('update document: {} latency: {}'.format(document.id, end_at - start_at), fg='green'))
except DocumentIsPausedException:
logging.info(click.style('Document update paused, document id: {}'.format(document.id), fg='yellow'))
except ProviderTokenNotInitError as e:
document.indexing_status = 'error'
document.error = str(e.description)
document.stopped_at = datetime.datetime.utcnow()
db.session.commit()
except Exception as e:
logging.exception("consume update document failed")
document.indexing_status = 'error'
document.error = str(e)
document.stopped_at = datetime.datetime.utcnow()
db.session.commit()

View File

@@ -13,32 +13,36 @@ from models.dataset import Document
@shared_task
def document_indexing_task(dataset_id: str, document_id: str):
def document_indexing_task(dataset_id: str, document_ids: list):
"""
Async process document
:param dataset_id:
:param document_id:
:param document_ids:
Usage: document_indexing_task.delay(dataset_id, document_id)
"""
logging.info(click.style('Start process document: {}'.format(document_id), fg='green'))
start_at = time.perf_counter()
documents = []
for document_id in document_ids:
logging.info(click.style('Start process document: {}'.format(document_id), fg='green'))
start_at = time.perf_counter()
document = db.session.query(Document).filter(
Document.id == document_id,
Document.dataset_id == dataset_id
).first()
document = db.session.query(Document).filter(
Document.id == document_id,
Document.dataset_id == dataset_id
).first()
if not document:
raise NotFound('Document not found')
if not document:
raise NotFound('Document not found')
document.indexing_status = 'parsing'
document.processing_started_at = datetime.datetime.utcnow()
document.indexing_status = 'parsing'
document.processing_started_at = datetime.datetime.utcnow()
documents.append(document)
db.session.add(document)
db.session.commit()
try:
indexing_runner = IndexingRunner()
indexing_runner.run(document)
indexing_runner.run(documents)
end_at = time.perf_counter()
logging.info(click.style('Processed document: {} latency: {}'.format(document.id, end_at - start_at), fg='green'))
except DocumentIsPausedException:

View File

@@ -0,0 +1,85 @@
import datetime
import logging
import time
import click
from celery import shared_task
from werkzeug.exceptions import NotFound
from core.index.keyword_table_index import KeywordTableIndex
from core.index.vector_index import VectorIndex
from core.indexing_runner import IndexingRunner, DocumentIsPausedException
from core.llm.error import ProviderTokenNotInitError
from extensions.ext_database import db
from models.dataset import Document, Dataset, DocumentSegment
@shared_task
def document_indexing_update_task(dataset_id: str, document_id: str):
"""
Async update document
:param dataset_id:
:param document_id:
Usage: document_indexing_update_task.delay(dataset_id, document_id)
"""
logging.info(click.style('Start update document: {}'.format(document_id), fg='green'))
start_at = time.perf_counter()
document = db.session.query(Document).filter(
Document.id == document_id,
Document.dataset_id == dataset_id
).first()
if not document:
raise NotFound('Document not found')
document.indexing_status = 'parsing'
document.processing_started_at = datetime.datetime.utcnow()
db.session.commit()
# delete all document segment and index
try:
dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
if not dataset:
raise Exception('Dataset not found')
vector_index = VectorIndex(dataset=dataset)
keyword_table_index = KeywordTableIndex(dataset=dataset)
segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document_id).all()
index_node_ids = [segment.index_node_id for segment in segments]
# delete from vector index
vector_index.del_nodes(index_node_ids)
# delete from keyword index
if index_node_ids:
keyword_table_index.del_nodes(index_node_ids)
for segment in segments:
db.session.delete(segment)
db.session.commit()
end_at = time.perf_counter()
logging.info(
click.style('Cleaned document when document update data source or process rule: {} latency: {}'.format(document_id, end_at - start_at), fg='green'))
except Exception:
logging.exception("Cleaned document when document update data source or process rule failed")
try:
indexing_runner = IndexingRunner()
indexing_runner.run([document])
end_at = time.perf_counter()
logging.info(click.style('update document: {} latency: {}'.format(document.id, end_at - start_at), fg='green'))
except DocumentIsPausedException:
logging.info(click.style('Document update paused, document id: {}'.format(document.id), fg='yellow'))
except ProviderTokenNotInitError as e:
document.indexing_status = 'error'
document.error = str(e.description)
document.stopped_at = datetime.datetime.utcnow()
db.session.commit()
except Exception as e:
logging.exception("consume update document failed")
document.indexing_status = 'error'
document.error = str(e)
document.stopped_at = datetime.datetime.utcnow()
db.session.commit()

View File

@@ -34,7 +34,7 @@ def recover_document_indexing_task(dataset_id: str, document_id: str):
try:
indexing_runner = IndexingRunner()
if document.indexing_status in ["waiting", "parsing", "cleaning"]:
indexing_runner.run(document)
indexing_runner.run([document])
elif document.indexing_status == "splitting":
indexing_runner.run_in_splitting_status(document)
elif document.indexing_status == "indexing":

View File

@@ -42,8 +42,7 @@ def remove_document_from_index_task(document_id: str):
keyword_table_index = KeywordTableIndex(dataset=dataset)
# delete from vector index
if dataset.indexing_technique == "high_quality":
vector_index.del_doc(document.id)
vector_index.del_doc(document.id)
# delete from keyword index
segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document.id).all()

View File

@@ -2,7 +2,7 @@ version: '3.1'
services:
# API service
api:
image: langgenius/dify-api:0.3.1
image: langgenius/dify-api:0.3.4
restart: always
environment:
# Startup mode, 'api' starts the API server.
@@ -110,7 +110,7 @@ services:
# worker service
# The Celery worker for processing the queue.
worker:
image: langgenius/dify-api:0.3.1
image: langgenius/dify-api:0.3.4
restart: always
environment:
# Startup mode, 'worker' starts the Celery worker for processing the queue.
@@ -156,7 +156,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:0.3.1
image: langgenius/dify-web:0.3.4
restart: always
environment:
EDITION: SELF_HOSTED
@@ -168,6 +168,8 @@ services:
# console or api domain.
# example: http://udify.app
APP_URL: ''
# The DSN for Sentry error reporting. If not set, Sentry error reporting will be disabled.
SENTRY_DSN: ''
# The postgres database.
db:

View File

@@ -12,8 +12,12 @@ After installing the SDK, you can use it in your project like this:
```js
import { DifyClient, ChatClient, CompletionClient } from 'dify-client'
const API_KEY = 'your-api-key-here';
const user = `random-user-id`;
const API_KEY = 'your-api-key-here'
const user = `random-user-id`
const inputs = {
name: 'test name a'
}
const query = "Please tell me a short story in 10 words or less."
// Create a completion client
const completionClient = new CompletionClient(API_KEY)
@@ -22,8 +26,15 @@ completionClient.createCompletionMessage(inputs, query, responseMode, user)
// Create a chat client
const chatClient = new ChatClient(API_KEY)
// Create a chat message
chatClient.createChatMessage(inputs, query, responseMode, user, conversationId)
// Create a chat message in stream mode
const response = await chatClient.createChatMessage(inputs, query, user, true, null)
const stream = response.data;
stream.on('data', data => {
console.log(data);
});
stream.on('end', () => {
console.log("stream done");
});
// Fetch conversations
chatClient.getConversations(user)
// Fetch conversation messages

View File

@@ -1,140 +1,188 @@
import axios from 'axios'
export const BASE_URL = 'https://api.dify.ai/v1'
import axios from "axios";
export const BASE_URL = "https://api.dify.ai/v1";
export const routes = {
application: {
method: 'GET',
url: () => `/parameters`
method: "GET",
url: () => `/parameters`,
},
feedback: {
method: 'POST',
url: (messageId) => `/messages/${messageId}/feedbacks`,
method: "POST",
url: (message_id) => `/messages/${message_id}/feedbacks`,
},
createCompletionMessage: {
method: 'POST',
method: "POST",
url: () => `/completion-messages`,
},
createChatMessage: {
method: 'POST',
method: "POST",
url: () => `/chat-messages`,
},
getConversationMessages: {
method: 'GET',
url: () => '/messages',
method: "GET",
url: () => "/messages",
},
getConversations: {
method: 'GET',
url: () => '/conversations',
method: "GET",
url: () => "/conversations",
},
renameConversation: {
method: 'PATCH',
url: (conversationId) => `/conversations/${conversationId}`,
}
}
method: "PATCH",
url: (conversation_id) => `/conversations/${conversation_id}`,
},
};
export class DifyClient {
constructor(apiKey, baseUrl = BASE_URL) {
this.apiKey = apiKey
this.baseUrl = baseUrl
this.apiKey = apiKey;
this.baseUrl = baseUrl;
}
updateApiKey(apiKey) {
this.apiKey = apiKey
this.apiKey = apiKey;
}
async sendRequest(method, endpoint, data = null, params = null, stream = false) {
async sendRequest(
method,
endpoint,
data = null,
params = null,
stream = false
) {
const headers = {
'Authorization': `Bearer ${this.apiKey}`,
'Content-Type': 'application/json',
}
Authorization: `Bearer ${this.apiKey}`,
"Content-Type": "application/json",
};
const url = `${this.baseUrl}${endpoint}`
let response
if (!stream) {
const url = `${this.baseUrl}${endpoint}`;
let response;
if (stream) {
response = await axios({
method,
url,
data,
params,
headers,
responseType: stream ? 'stream' : 'json',
})
responseType: "stream",
});
} else {
response = await fetch(url, {
headers,
response = await axios({
method,
body: JSON.stringify(data),
})
url,
data,
params,
headers,
responseType: "json",
});
}
return response
return response;
}
messageFeedback(messageId, rating, user) {
messageFeedback(message_id, rating, user) {
const data = {
rating,
user,
}
return this.sendRequest(routes.feedback.method, routes.feedback.url(messageId), data)
};
return this.sendRequest(
routes.feedback.method,
routes.feedback.url(message_id),
data
);
}
getApplicationParameters(user) {
const params = { user }
return this.sendRequest(routes.application.method, routes.application.url(), null, params)
const params = { user };
return this.sendRequest(
routes.application.method,
routes.application.url(),
null,
params
);
}
}
export class CompletionClient extends DifyClient {
createCompletionMessage(inputs, query, user, responseMode) {
createCompletionMessage(inputs, query, user, stream = false) {
const data = {
inputs,
query,
responseMode,
user,
}
return this.sendRequest(routes.createCompletionMessage.method, routes.createCompletionMessage.url(), data, null, responseMode === 'streaming')
response_mode: stream ? "streaming" : "blocking",
};
return this.sendRequest(
routes.createCompletionMessage.method,
routes.createCompletionMessage.url(),
data,
null,
stream
);
}
}
export class ChatClient extends DifyClient {
createChatMessage(inputs, query, user, responseMode = 'blocking', conversationId = null) {
createChatMessage(
inputs,
query,
user,
stream = false,
conversation_id = null
) {
const data = {
inputs,
query,
user,
responseMode,
}
if (conversationId)
data.conversation_id = conversationId
response_mode: stream ? "streaming" : "blocking",
};
if (conversation_id) data.conversation_id = conversation_id;
return this.sendRequest(routes.createChatMessage.method, routes.createChatMessage.url(), data, null, responseMode === 'streaming')
return this.sendRequest(
routes.createChatMessage.method,
routes.createChatMessage.url(),
data,
null,
stream
);
}
getConversationMessages(user, conversationId = '', firstId = null, limit = null) {
const params = { user }
getConversationMessages(
user,
conversation_id = "",
first_id = null,
limit = null
) {
const params = { user };
if (conversationId)
params.conversation_id = conversationId
if (conversation_id) params.conversation_id = conversation_id;
if (firstId)
params.first_id = firstId
if (first_id) params.first_id = first_id;
if (limit)
params.limit = limit
if (limit) params.limit = limit;
return this.sendRequest(routes.getConversationMessages.method, routes.getConversationMessages.url(), null, params)
return this.sendRequest(
routes.getConversationMessages.method,
routes.getConversationMessages.url(),
null,
params
);
}
getConversations(user, firstId = null, limit = null, pinned = null) {
const params = { user, first_id: firstId, limit, pinned }
return this.sendRequest(routes.getConversations.method, routes.getConversations.url(), null, params)
getConversations(user, first_id = null, limit = null, pinned = null) {
const params = { user, first_id: first_id, limit, pinned };
return this.sendRequest(
routes.getConversations.method,
routes.getConversations.url(),
null,
params
);
}
renameConversation(conversationId, name, user) {
const data = { name, user }
return this.sendRequest(routes.renameConversation.method, routes.renameConversation.url(conversationId), data)
renameConversation(conversation_id, name, user) {
const data = { name, user };
return this.sendRequest(
routes.renameConversation.method,
routes.renameConversation.url(conversation_id),
data
);
}
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "dify-client",
"version": "1.0.3",
"version": "2.0.0",
"description": "This is the Node.js SDK for the Dify.AI API, which allows you to easily integrate Dify.AI into your Node.js applications.",
"main": "index.js",
"type": "module",

View File

@@ -9,4 +9,7 @@ NEXT_PUBLIC_API_PREFIX=http://localhost:5001/console/api
# The URL for Web APP, refers to the Web App base URL of WEB service if web app domain is different from
# console or api domain.
# example: http://udify.app/api
NEXT_PUBLIC_PUBLIC_API_PREFIX=http://localhost:5001/api
NEXT_PUBLIC_PUBLIC_API_PREFIX=http://localhost:5001/api
# SENTRY
NEXT_PUBLIC_SENTRY_DSN=

View File

@@ -13,7 +13,7 @@ WORKDIR /app/web
COPY package.json /app/web/package.json
RUN npm install
RUN npm install --only=prod
COPY . /app/web/

View File

@@ -23,6 +23,9 @@ The `pages/api` directory is mapped to `/api/*`. Files in this directory are tre
This project uses [`next/font`](https://nextjs.org/docs/basic-features/font-optimization) to automatically optimize and load Inter, a custom Google Font.
## Lint Code
If your ide is VSCode, rename `web/.vscode/settings.example.json` to `web/.vscode/settings.json` for lint code setting.
## Learn More
To learn more about Next.js, take a look at the following resources:

View File

@@ -1,8 +1,9 @@
'use client'
import { FC, useRef } from 'react'
import React, { useEffect, useState } from 'react'
import type { FC } from 'react'
import React, { useEffect, useRef, useState } from 'react'
import { usePathname, useRouter, useSelectedLayoutSegments } from 'next/navigation'
import useSWR, { SWRConfig } from 'swr'
import * as Sentry from '@sentry/react'
import Header from '../components/header'
import { fetchAppList } from '@/service/apps'
import { fetchDatasets } from '@/service/datasets'
@@ -12,11 +13,29 @@ import { AppContextProvider } from '@/context/app-context'
import DatasetsContext from '@/context/datasets-context'
import type { LangGeniusVersionResponse, UserProfileResponse } from '@/models/common'
const isDevelopment = process.env.NODE_ENV === 'development'
export type ICommonLayoutProps = {
children: React.ReactNode
}
const CommonLayout: FC<ICommonLayoutProps> = ({ children }) => {
useEffect(() => {
const SENTRY_DSN = document?.body?.getAttribute('data-public-sentry-dsn')
if (!isDevelopment && SENTRY_DSN) {
Sentry.init({
dsn: SENTRY_DSN,
integrations: [
new Sentry.BrowserTracing({
}),
new Sentry.Replay(),
],
tracesSampleRate: 0.1,
replaysSessionSampleRate: 0.1,
replaysOnErrorSampleRate: 1.0,
})
}
}, [])
const router = useRouter()
const pathname = usePathname()
const segments = useSelectedLayoutSegments()
@@ -50,7 +69,7 @@ const CommonLayout: FC<ICommonLayoutProps> = ({ children }) => {
if (!appList || !userProfile || !langeniusVersionInfo)
return <Loading type='app' />
const curApp = appList?.data.find(opt => opt.id === appId)
const curAppId = segments[0] === 'app' && segments[2]
const currentDatasetId = segments[0] === 'datasets' && segments[2]
const currentDataset = datasetList?.data?.find(opt => opt.id === currentDatasetId)
@@ -70,12 +89,18 @@ const CommonLayout: FC<ICommonLayoutProps> = ({ children }) => {
return (
<SWRConfig value={{
shouldRetryOnError: false
shouldRetryOnError: false,
}}>
<AppContextProvider value={{ apps: appList.data, mutateApps, userProfile, mutateUserProfile, pageContainerRef }}>
<DatasetsContext.Provider value={{ datasets: datasetList?.data || [], mutateDatasets, currentDataset }}>
<div ref={pageContainerRef} className='relative flex flex-col h-full overflow-auto bg-gray-100'>
<Header isBordered={['/apps', '/datasets'].includes(pathname)} curApp={curApp as any} appItems={appList.data} userProfile={userProfile} onLogout={onLogout} langeniusVersionInfo={langeniusVersionInfo} />
<Header
isBordered={['/apps', '/datasets'].includes(pathname)}
curAppId={curAppId || ''}
userProfile={userProfile}
onLogout={onLogout}
langeniusVersionInfo={langeniusVersionInfo}
/>
{children}
</div>
</DatasetsContext.Provider>

View File

@@ -71,6 +71,7 @@ const CardView: FC<ICardViewProps> = ({ appId }) => {
<AppCard
className='mr-3 flex-1'
appInfo={response}
cardType='webapp'
onChangeStatus={onChangeSiteStatus}
onGenerateCode={onGenerateCode}
onSaveSiteConfig={onSaveSiteConfig} />

View File

@@ -3,8 +3,10 @@ import React, { useState } from 'react'
import dayjs from 'dayjs'
import quarterOfYear from 'dayjs/plugin/quarterOfYear'
import { useTranslation } from 'react-i18next'
import useSWR from 'swr'
import { fetchAppDetail } from '@/service/apps'
import type { PeriodParams } from '@/app/components/app/overview/appChart'
import { ConversationsChart, CostChart, EndUsersChart } from '@/app/components/app/overview/appChart'
import { AvgResponseTime, AvgSessionInteractions, ConversationsChart, CostChart, EndUsersChart, UserSatisfactionRate } from '@/app/components/app/overview/appChart'
import type { Item } from '@/app/components/base/select'
import { SimpleSelect } from '@/app/components/base/select'
import { TIME_PERIOD_LIST } from '@/app/components/app/log/filter'
@@ -20,13 +22,19 @@ export type IChartViewProps = {
}
export default function ChartView({ appId }: IChartViewProps) {
const detailParams = { url: '/apps', id: appId }
const { data: response } = useSWR(detailParams, fetchAppDetail)
const isChatApp = response?.mode === 'chat'
const { t } = useTranslation()
const [period, setPeriod] = useState<PeriodParams>({ name: t('appLog.filter.period.last7days'), query: { start: today.subtract(7, 'day').format(queryDateFormat), end: today.format(queryDateFormat) } })
const onSelect = (item: Item) => {
setPeriod({ name: item.name, query: { start: today.subtract(item.value as number, 'day').format(queryDateFormat), end: today.format(queryDateFormat) } })
setPeriod({ name: item.name, query: item.value === 'all' ? undefined : { start: today.subtract(item.value as number, 'day').format(queryDateFormat), end: today.format(queryDateFormat) } })
}
if (!response)
return null
return (
<div>
<div className='flex flex-row items-center mt-8 mb-4 text-gray-900 text-base'>
@@ -46,6 +54,20 @@ export default function ChartView({ appId }: IChartViewProps) {
<EndUsersChart period={period} id={appId} />
</div>
</div>
<div className='flex flex-row w-full mb-6'>
<div className='flex-1 mr-3'>
{isChatApp
? (
<AvgSessionInteractions period={period} id={appId} />
)
: (
<AvgResponseTime period={period} id={appId} />
)}
</div>
<div className='flex-1 ml-3'>
<UserSatisfactionRate period={period} id={appId} />
</div>
</div>
<CostChart period={period} id={appId} />
</div>
)

View File

@@ -19,16 +19,16 @@ import I18n from '@/context/i18n'
type IStatusType = 'normal' | 'verified' | 'error' | 'error-api-key-exceed-bill'
const STATUS_COLOR_MAP = {
normal: { color: '', bgColor: 'bg-primary-50', borderColor: 'border-primary-100' },
error: { color: 'text-red-600', bgColor: 'bg-red-50', borderColor: 'border-red-100' },
verified: { color: '', bgColor: 'bg-green-50', borderColor: 'border-green-100' },
'normal': { color: '', bgColor: 'bg-primary-50', borderColor: 'border-primary-100' },
'error': { color: 'text-red-600', bgColor: 'bg-red-50', borderColor: 'border-red-100' },
'verified': { color: '', bgColor: 'bg-green-50', borderColor: 'border-green-100' },
'error-api-key-exceed-bill': { color: 'text-red-600', bgColor: 'bg-red-50', borderColor: 'border-red-100' },
}
const CheckCircleIcon: FC<{ className?: string }> = ({ className }) => {
return <svg width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg" className={className ?? ''}>
<rect width="20" height="20" rx="10" fill="#DEF7EC" />
<path fill-rule="evenodd" clip-rule="evenodd" d="M14.6947 6.70495C14.8259 6.83622 14.8996 7.01424 14.8996 7.19985C14.8996 7.38547 14.8259 7.56348 14.6947 7.69475L9.0947 13.2948C8.96343 13.426 8.78541 13.4997 8.5998 13.4997C8.41418 13.4997 8.23617 13.426 8.1049 13.2948L5.3049 10.4948C5.17739 10.3627 5.10683 10.1859 5.10842 10.0024C5.11002 9.81883 5.18364 9.64326 5.31342 9.51348C5.44321 9.38369 5.61878 9.31007 5.80232 9.30848C5.98585 9.30688 6.16268 9.37744 6.2947 9.50495L8.5998 11.8101L13.7049 6.70495C13.8362 6.57372 14.0142 6.5 14.1998 6.5C14.3854 6.5 14.5634 6.57372 14.6947 6.70495Z" fill="#046C4E" />
<path fillRule="evenodd" clipRule="evenodd" d="M14.6947 6.70495C14.8259 6.83622 14.8996 7.01424 14.8996 7.19985C14.8996 7.38547 14.8259 7.56348 14.6947 7.69475L9.0947 13.2948C8.96343 13.426 8.78541 13.4997 8.5998 13.4997C8.41418 13.4997 8.23617 13.426 8.1049 13.2948L5.3049 10.4948C5.17739 10.3627 5.10683 10.1859 5.10842 10.0024C5.11002 9.81883 5.18364 9.64326 5.31342 9.51348C5.44321 9.38369 5.61878 9.31007 5.80232 9.30848C5.98585 9.30688 6.16268 9.37744 6.2947 9.50495L8.5998 11.8101L13.7049 6.70495C13.8362 6.57372 14.0142 6.5 14.1998 6.5C14.3854 6.5 14.5634 6.57372 14.6947 6.70495Z" fill="#046C4E" />
</svg>
}
@@ -81,11 +81,11 @@ const EditKeyDiv: FC<IEditKeyDiv> = ({ className = '', showInPopover = false, on
catch (err: any) {
if (err.status === 400) {
err.json().then(({ code }: any) => {
if (code === 'provider_request_failed') {
if (code === 'provider_request_failed')
setEditStatus('error-api-key-exceed-bill')
}
})
} else {
}
else {
setEditStatus('error')
}
}
@@ -96,19 +96,19 @@ const EditKeyDiv: FC<IEditKeyDiv> = ({ className = '', showInPopover = false, on
const renderErrorMessage = () => {
if (validating) {
return (
<div className={`text-primary-600 mt-2 text-xs`}>
<div className={'text-primary-600 mt-2 text-xs'}>
{t('common.provider.validating')}
</div>
)
}
if (editStatus === 'error-api-key-exceed-bill') {
return (
<div className={`text-[#D92D20] mt-2 text-xs`}>
<div className={'text-[#D92D20] mt-2 text-xs'}>
{t('common.provider.apiKeyExceedBill')}
{locale === 'en' ? ' ' : ''}
<Link
<Link
className='underline'
href="https://platform.openai.com/account/api-keys"
href="https://platform.openai.com/account/api-keys"
target={'_blank'}>
{locale === 'en' ? 'this link' : '这篇文档'}
</Link>
@@ -117,7 +117,7 @@ const EditKeyDiv: FC<IEditKeyDiv> = ({ className = '', showInPopover = false, on
}
if (editStatus === 'error') {
return (
<div className={`text-[#D92D20] mt-2 text-xs`}>
<div className={'text-[#D92D20] mt-2 text-xs'}>
{t('common.provider.invalidKey')}
</div>
)

View File

@@ -21,7 +21,7 @@ export type AppCardProps = {
const AppCard = ({
app,
onDelete
onDelete,
}: AppCardProps) => {
const { t } = useTranslation()
const { notify } = useContext(ToastContext)

View File

@@ -3,13 +3,13 @@
import { useEffect, useRef } from 'react'
import useSWRInfinite from 'swr/infinite'
import { debounce } from 'lodash-es'
import { useTranslation } from 'react-i18next'
import AppCard from './AppCard'
import NewAppCard from './NewAppCard'
import { AppListResponse } from '@/models/app'
import type { AppListResponse } from '@/models/app'
import { fetchAppList } from '@/service/apps'
import { useSelector } from '@/context/app-context'
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
import { useTranslation } from 'react-i18next'
const getKey = (pageIndex: number, previousPageData: AppListResponse) => {
if (!pageIndex || previousPageData.has_more)
@@ -25,8 +25,8 @@ const Apps = () => {
const anchorRef = useRef<HTMLAnchorElement>(null)
useEffect(() => {
document.title = `${t('app.title')} - Dify`;
if(localStorage.getItem(NEED_REFRESH_APP_LIST_KEY) === '1') {
document.title = `${t('app.title')} - Dify`
if (localStorage.getItem(NEED_REFRESH_APP_LIST_KEY) === '1') {
localStorage.removeItem(NEED_REFRESH_APP_LIST_KEY)
mutate()
}
@@ -41,9 +41,8 @@ const Apps = () => {
if (!loadingStateRef.current) {
const { scrollTop, clientHeight } = pageContainerRef.current!
const anchorOffset = anchorRef.current!.offsetTop
if (anchorOffset - scrollTop - clientHeight < 100) {
if (anchorOffset - scrollTop - clientHeight < 100)
setSize(size => size + 1)
}
}
}, 50)

View File

@@ -52,7 +52,7 @@ const NewAppDialog = ({ show, onSuccess, onClose }: NewAppDialogProps) => {
mutateTemplates()
setIsWithTemplate(false)
}
}, [show])
}, [mutateTemplates, show])
const isCreatingRef = useRef(false)
const onCreate: MouseEventHandler = useCallback(async () => {
@@ -97,7 +97,6 @@ const NewAppDialog = ({ show, onSuccess, onClose }: NewAppDialogProps) => {
return <>
{showEmojiPicker && <EmojiPicker
onSelect={(icon, icon_background) => {
console.log(icon, icon_background)
setEmoji({ icon, icon_background })
setShowEmojiPicker(false)
}}

View File

@@ -0,0 +1,16 @@
import React from 'react'
import Settings from '@/app/components/datasets/documents/detail/settings'
export type IProps = {
params: { datasetId: string; documentId: string }
}
const DocumentSettings = async ({
params: { datasetId, documentId },
}: IProps) => {
return (
<Settings datasetId={datasetId} documentId={documentId} />
)
}
export default DocumentSettings

View File

@@ -1,14 +1,14 @@
'use client'
import type { FC } from 'react'
import React, { useEffect } from 'react'
import { usePathname, useSelectedLayoutSegments } from 'next/navigation'
import { usePathname } from 'next/navigation'
import useSWR from 'swr'
import { useTranslation } from 'react-i18next'
import { getLocaleOnClient } from '@/i18n/client'
import {
Cog8ToothIcon,
// CommandLineIcon,
Squares2X2Icon,
// eslint-disable-next-line sort-imports
PuzzlePieceIcon,
DocumentTextIcon,
} from '@heroicons/react/24/outline'
@@ -18,15 +18,17 @@ import {
DocumentTextIcon as DocumentTextSolidIcon,
} from '@heroicons/react/24/solid'
import Link from 'next/link'
import s from './style.module.css'
import { fetchDataDetail, fetchDatasetRelatedApps } from '@/service/datasets'
import type { RelatedApp } from '@/models/datasets'
import s from './style.module.css'
import { getLocaleOnClient } from '@/i18n/client'
import AppSideBar from '@/app/components/app-sidebar'
import Divider from '@/app/components/base/divider'
import Indicator from '@/app/components/header/indicator'
import AppIcon from '@/app/components/base/app-icon'
import Loading from '@/app/components/base/loading'
import DatasetDetailContext from '@/context/dataset-detail'
import { DataSourceType } from '@/models/datasets'
// import { fetchDatasetDetail } from '@/service/datasets'
@@ -38,10 +40,10 @@ export type IAppDetailLayoutProps = {
const LikedItem: FC<{ type?: 'plugin' | 'app'; appStatus?: boolean; detail: RelatedApp }> = ({
type = 'app',
appStatus = true,
detail
detail,
}) => {
return (
<Link prefetch className={s.itemWrapper} href={`/app/${detail?.id}/overview`}>
<Link className={s.itemWrapper} href={`/app/${detail?.id}/overview`}>
<div className={s.iconWrapper}>
<AppIcon size='tiny' />
{type === 'app' && (
@@ -58,7 +60,7 @@ const LikedItem: FC<{ type?: 'plugin' | 'app'; appStatus?: boolean; detail: Rela
const TargetIcon: FC<{ className?: string }> = ({ className }) => {
return <svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg" className={className ?? ''}>
<g clip-path="url(#clip0_4610_6951)">
<path d="M10.6666 5.33325V3.33325L12.6666 1.33325L13.3332 2.66659L14.6666 3.33325L12.6666 5.33325H10.6666ZM10.6666 5.33325L7.9999 7.99988M14.6666 7.99992C14.6666 11.6818 11.6818 14.6666 7.99992 14.6666C4.31802 14.6666 1.33325 11.6818 1.33325 7.99992C1.33325 4.31802 4.31802 1.33325 7.99992 1.33325M11.3333 7.99992C11.3333 9.84087 9.84087 11.3333 7.99992 11.3333C6.15897 11.3333 4.66659 9.84087 4.66659 7.99992C4.66659 6.15897 6.15897 4.66659 7.99992 4.66659" stroke="#344054" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round" />
<path d="M10.6666 5.33325V3.33325L12.6666 1.33325L13.3332 2.66659L14.6666 3.33325L12.6666 5.33325H10.6666ZM10.6666 5.33325L7.9999 7.99988M14.6666 7.99992C14.6666 11.6818 11.6818 14.6666 7.99992 14.6666C4.31802 14.6666 1.33325 11.6818 1.33325 7.99992C1.33325 4.31802 4.31802 1.33325 7.99992 1.33325M11.3333 7.99992C11.3333 9.84087 9.84087 11.3333 7.99992 11.3333C6.15897 11.3333 4.66659 9.84087 4.66659 7.99992C4.66659 6.15897 6.15897 4.66659 7.99992 4.66659" stroke="#344054" strokeWidth="1.25" strokeLinecap="round" strokeLinejoin="round" />
</g>
<defs>
<clipPath id="clip0_4610_6951">
@@ -70,7 +72,7 @@ const TargetIcon: FC<{ className?: string }> = ({ className }) => {
const TargetSolidIcon: FC<{ className?: string }> = ({ className }) => {
return <svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg" className={className ?? ''}>
<path fill-rule="evenodd" clip-rule="evenodd" d="M12.7733 0.67512C12.9848 0.709447 13.1669 0.843364 13.2627 1.03504L13.83 2.16961L14.9646 2.73689C15.1563 2.83273 15.2902 3.01486 15.3245 3.22639C15.3588 3.43792 15.2894 3.65305 15.1379 3.80458L13.1379 5.80458C13.0128 5.92961 12.8433 5.99985 12.6665 5.99985H10.9426L8.47124 8.47124C8.21089 8.73159 7.78878 8.73159 7.52843 8.47124C7.26808 8.21089 7.26808 7.78878 7.52843 7.52843L9.9998 5.05707V3.33318C9.9998 3.15637 10.07 2.9868 10.1951 2.86177L12.1951 0.861774C12.3466 0.710244 12.5617 0.640794 12.7733 0.67512Z" fill="#155EEF" />
<path fillRule="evenodd" clipRule="evenodd" d="M12.7733 0.67512C12.9848 0.709447 13.1669 0.843364 13.2627 1.03504L13.83 2.16961L14.9646 2.73689C15.1563 2.83273 15.2902 3.01486 15.3245 3.22639C15.3588 3.43792 15.2894 3.65305 15.1379 3.80458L13.1379 5.80458C13.0128 5.92961 12.8433 5.99985 12.6665 5.99985H10.9426L8.47124 8.47124C8.21089 8.73159 7.78878 8.73159 7.52843 8.47124C7.26808 8.21089 7.26808 7.78878 7.52843 7.52843L9.9998 5.05707V3.33318C9.9998 3.15637 10.07 2.9868 10.1951 2.86177L12.1951 0.861774C12.3466 0.710244 12.5617 0.640794 12.7733 0.67512Z" fill="#155EEF" />
<path d="M1.99984 7.99984C1.99984 4.68613 4.68613 1.99984 7.99984 1.99984C8.36803 1.99984 8.6665 1.70136 8.6665 1.33317C8.6665 0.964981 8.36803 0.666504 7.99984 0.666504C3.94975 0.666504 0.666504 3.94975 0.666504 7.99984C0.666504 12.0499 3.94975 15.3332 7.99984 15.3332C12.0499 15.3332 15.3332 12.0499 15.3332 7.99984C15.3332 7.63165 15.0347 7.33317 14.6665 7.33317C14.2983 7.33317 13.9998 7.63165 13.9998 7.99984C13.9998 11.3135 11.3135 13.9998 7.99984 13.9998C4.68613 13.9998 1.99984 11.3135 1.99984 7.99984Z" fill="#155EEF" />
<path d="M5.33317 7.99984C5.33317 6.52708 6.52708 5.33317 7.99984 5.33317C8.36803 5.33317 8.6665 5.03469 8.6665 4.6665C8.6665 4.29831 8.36803 3.99984 7.99984 3.99984C5.7907 3.99984 3.99984 5.7907 3.99984 7.99984C3.99984 10.209 5.7907 11.9998 7.99984 11.9998C10.209 11.9998 11.9998 10.209 11.9998 7.99984C11.9998 7.63165 11.7014 7.33317 11.3332 7.33317C10.965 7.33317 10.6665 7.63165 10.6665 7.99984C10.6665 9.4726 9.4726 10.6665 7.99984 10.6665C6.52708 10.6665 5.33317 9.4726 5.33317 7.99984Z" fill="#155EEF" />
</svg>
@@ -79,7 +81,7 @@ const TargetSolidIcon: FC<{ className?: string }> = ({ className }) => {
const BookOpenIcon: FC<{ className?: string }> = ({ className }) => {
return <svg width="12" height="12" viewBox="0 0 12 12" fill="none" xmlns="http://www.w3.org/2000/svg" className={className ?? ''}>
<path opacity="0.12" d="M1 3.1C1 2.53995 1 2.25992 1.10899 2.04601C1.20487 1.85785 1.35785 1.70487 1.54601 1.60899C1.75992 1.5 2.03995 1.5 2.6 1.5H2.8C3.9201 1.5 4.48016 1.5 4.90798 1.71799C5.28431 1.90973 5.59027 2.21569 5.78201 2.59202C6 3.01984 6 3.5799 6 4.7V10.5L5.94997 10.425C5.60265 9.90398 5.42899 9.64349 5.19955 9.45491C4.99643 9.28796 4.76238 9.1627 4.5108 9.0863C4.22663 9 3.91355 9 3.28741 9H2.6C2.03995 9 1.75992 9 1.54601 8.89101C1.35785 8.79513 1.20487 8.64215 1.10899 8.45399C1 8.24008 1 7.96005 1 7.4V3.1Z" fill="#155EEF" />
<path d="M6 10.5L5.94997 10.425C5.60265 9.90398 5.42899 9.64349 5.19955 9.45491C4.99643 9.28796 4.76238 9.1627 4.5108 9.0863C4.22663 9 3.91355 9 3.28741 9H2.6C2.03995 9 1.75992 9 1.54601 8.89101C1.35785 8.79513 1.20487 8.64215 1.10899 8.45399C1 8.24008 1 7.96005 1 7.4V3.1C1 2.53995 1 2.25992 1.10899 2.04601C1.20487 1.85785 1.35785 1.70487 1.54601 1.60899C1.75992 1.5 2.03995 1.5 2.6 1.5H2.8C3.9201 1.5 4.48016 1.5 4.90798 1.71799C5.28431 1.90973 5.59027 2.21569 5.78201 2.59202C6 3.01984 6 3.5799 6 4.7M6 10.5V4.7M6 10.5L6.05003 10.425C6.39735 9.90398 6.57101 9.64349 6.80045 9.45491C7.00357 9.28796 7.23762 9.1627 7.4892 9.0863C7.77337 9 8.08645 9 8.71259 9H9.4C9.96005 9 10.2401 9 10.454 8.89101C10.6422 8.79513 10.7951 8.64215 10.891 8.45399C11 8.24008 11 7.96005 11 7.4V3.1C11 2.53995 11 2.25992 10.891 2.04601C10.7951 1.85785 10.6422 1.70487 10.454 1.60899C10.2401 1.5 9.96005 1.5 9.4 1.5H9.2C8.07989 1.5 7.51984 1.5 7.09202 1.71799C6.71569 1.90973 6.40973 2.21569 6.21799 2.59202C6 3.01984 6 3.5799 6 4.7" stroke="#155EEF" stroke-linecap="round" stroke-linejoin="round" />
<path d="M6 10.5L5.94997 10.425C5.60265 9.90398 5.42899 9.64349 5.19955 9.45491C4.99643 9.28796 4.76238 9.1627 4.5108 9.0863C4.22663 9 3.91355 9 3.28741 9H2.6C2.03995 9 1.75992 9 1.54601 8.89101C1.35785 8.79513 1.20487 8.64215 1.10899 8.45399C1 8.24008 1 7.96005 1 7.4V3.1C1 2.53995 1 2.25992 1.10899 2.04601C1.20487 1.85785 1.35785 1.70487 1.54601 1.60899C1.75992 1.5 2.03995 1.5 2.6 1.5H2.8C3.9201 1.5 4.48016 1.5 4.90798 1.71799C5.28431 1.90973 5.59027 2.21569 5.78201 2.59202C6 3.01984 6 3.5799 6 4.7M6 10.5V4.7M6 10.5L6.05003 10.425C6.39735 9.90398 6.57101 9.64349 6.80045 9.45491C7.00357 9.28796 7.23762 9.1627 7.4892 9.0863C7.77337 9 8.08645 9 8.71259 9H9.4C9.96005 9 10.2401 9 10.454 8.89101C10.6422 8.79513 10.7951 8.64215 10.891 8.45399C11 8.24008 11 7.96005 11 7.4V3.1C11 2.53995 11 2.25992 10.891 2.04601C10.7951 1.85785 10.6422 1.70487 10.454 1.60899C10.2401 1.5 9.96005 1.5 9.4 1.5H9.2C8.07989 1.5 7.51984 1.5 7.09202 1.71799C6.71569 1.90973 6.40973 2.21569 6.21799 2.59202C6 3.01984 6 3.5799 6 4.7" stroke="#155EEF" strokeLinecap="round" strokeLinejoin="round" />
</svg>
}
@@ -91,7 +93,7 @@ const DatasetDetailLayout: FC<IAppDetailLayoutProps> = (props) => {
const pathname = usePathname()
const hideSideBar = /documents\/create$/.test(pathname)
const { t } = useTranslation()
const { data: datasetRes, error } = useSWR({
const { data: datasetRes, error, mutate: mutateDatasetRes } = useSWR({
action: 'fetchDataDetail',
datasetId,
}, apiParams => fetchDataDetail(apiParams.datasetId))
@@ -109,9 +111,8 @@ const DatasetDetailLayout: FC<IAppDetailLayoutProps> = (props) => {
]
useEffect(() => {
if (datasetRes) {
if (datasetRes)
document.title = `${datasetRes.name || 'Dataset'} - Dify`
}
}, [datasetRes])
const ExtraInfo: FC = () => {
@@ -119,32 +120,34 @@ const DatasetDetailLayout: FC<IAppDetailLayoutProps> = (props) => {
return <div className='w-full'>
<Divider className='mt-5' />
{relatedApps?.data?.length ? (
<>
<div className={s.subTitle}>{relatedApps?.total || '--'} {t('common.datasetMenus.relatedApp')}</div>
{relatedApps?.data?.map((item) => (<LikedItem detail={item} />))}
</>
) : (
<div className='mt-5 p-3'>
<div className='flex items-center justify-start gap-2'>
<div className={s.emptyIconDiv}>
<Squares2X2Icon className='w-3 h-3 text-gray-500' />
</div>
<div className={s.emptyIconDiv}>
<PuzzlePieceIcon className='w-3 h-3 text-gray-500' />
{relatedApps?.data?.length
? (
<>
<div className={s.subTitle}>{relatedApps?.total || '--'} {t('common.datasetMenus.relatedApp')}</div>
{relatedApps?.data?.map(item => (<LikedItem detail={item} />))}
</>
)
: (
<div className='mt-5 p-3'>
<div className='flex items-center justify-start gap-2'>
<div className={s.emptyIconDiv}>
<Squares2X2Icon className='w-3 h-3 text-gray-500' />
</div>
<div className={s.emptyIconDiv}>
<PuzzlePieceIcon className='w-3 h-3 text-gray-500' />
</div>
</div>
<div className='text-xs text-gray-500 mt-2'>{t('common.datasetMenus.emptyTip')}</div>
<a
className='inline-flex items-center text-xs text-primary-600 mt-2 cursor-pointer'
href={`https://docs.dify.ai/${locale === 'zh-Hans' ? 'v/zh-hans' : ''}/application/prompt-engineering`}
target='_blank'
>
<BookOpenIcon className='mr-1' />
{t('common.datasetMenus.viewDoc')}
</a>
</div>
<div className='text-xs text-gray-500 mt-2'>{t('common.datasetMenus.emptyTip')}</div>
<a
className='inline-flex items-center text-xs text-primary-600 mt-2 cursor-pointer'
href={`https://docs.dify.ai/${locale === 'en' ? '' : 'v/zh-hans'}/application/prompt-engineering`}
target='_blank'
>
<BookOpenIcon className='mr-1' />
{t('common.datasetMenus.viewDoc')}
</a>
</div>
)}
)}
</div>
}
@@ -160,9 +163,13 @@ const DatasetDetailLayout: FC<IAppDetailLayoutProps> = (props) => {
desc={datasetRes?.description || '--'}
navigation={navigation}
extraInfo={<ExtraInfo />}
iconType='dataset'
iconType={datasetRes?.data_source_type === DataSourceType.NOTION ? 'notion' : 'dataset'}
/>}
<DatasetDetailContext.Provider value={{ indexingTechnique: datasetRes?.indexing_technique }}>
<DatasetDetailContext.Provider value={{
indexingTechnique: datasetRes?.indexing_technique,
dataset: datasetRes,
mutateDatasetRes: () => mutateDatasetRes(),
}}>
<div className="bg-white grow">{children}</div>
</DatasetDetailContext.Provider>
</div>

View File

@@ -1,20 +1,17 @@
'use client'
import { useContext, useContextSelector } from 'use-context-selector'
import { useContext } from 'use-context-selector'
import Link from 'next/link'
import useSWR from 'swr'
import type { MouseEventHandler } from 'react'
import { useCallback, useState } from 'react'
import { useTranslation } from 'react-i18next'
import classNames from 'classnames'
import style from '../list.module.css'
import type { App } from '@/types/app'
import Confirm from '@/app/components/base/confirm'
import { ToastContext } from '@/app/components/base/toast'
import { deleteDataset, fetchDatasets } from '@/service/datasets'
import { deleteDataset } from '@/service/datasets'
import AppIcon from '@/app/components/base/app-icon'
import AppsContext from '@/context/app-context'
import { DataSet } from '@/models/datasets'
import classNames from 'classnames'
import type { DataSet } from '@/models/datasets'
export type DatasetCardProps = {
dataset: DataSet
@@ -23,7 +20,7 @@ export type DatasetCardProps = {
const DatasetCard = ({
dataset,
onDelete
onDelete,
}: DatasetCardProps) => {
const { t } = useTranslation()
const { notify } = useContext(ToastContext)

View File

@@ -2,12 +2,12 @@
import { useEffect, useRef } from 'react'
import useSWRInfinite from 'swr/infinite'
import { debounce } from 'lodash-es';
import { DataSetListResponse } from '@/models/datasets';
import { debounce } from 'lodash-es'
import NewDatasetCard from './NewDatasetCard'
import DatasetCard from './DatasetCard';
import { fetchDatasets } from '@/service/datasets';
import { useSelector } from '@/context/app-context';
import DatasetCard from './DatasetCard'
import type { DataSetListResponse } from '@/models/datasets'
import { fetchDatasets } from '@/service/datasets'
import { useSelector } from '@/context/app-context'
const getKey = (pageIndex: number, previousPageData: DataSetListResponse) => {
if (!pageIndex || previousPageData.has_more)
@@ -30,9 +30,8 @@ const Datasets = () => {
if (!loadingStateRef.current) {
const { scrollTop, clientHeight } = pageContainerRef.current!
const anchorOffset = anchorRef.current!.offsetTop
if (anchorOffset - scrollTop - clientHeight < 100) {
if (anchorOffset - scrollTop - clientHeight < 100)
setSize(size => size + 1)
}
}
}, 50)
@@ -43,7 +42,7 @@ const Datasets = () => {
return (
<nav className='grid content-start grid-cols-1 gap-4 px-12 pt-8 sm:grid-cols-2 lg:grid-cols-4 grow shrink-0'>
{data?.map(({ data: datasets }) => datasets.map(dataset => (
<DatasetCard key={dataset.id} dataset={dataset} onDelete={mutate} />)
<DatasetCard key={dataset.id} dataset={dataset} onDelete={mutate} />),
))}
<NewDatasetCard ref={anchorRef} />
</nav>
@@ -51,4 +50,3 @@ const Datasets = () => {
}
export default Datasets

View File

@@ -1,7 +1,7 @@
import AppList from "@/app/components/explore/app-list"
import React from 'react'
import AppList from '@/app/components/explore/app-list'
const Apps = ({ }) => {
const Apps = () => {
return <AppList />
}

View File

@@ -1,13 +1,14 @@
import React, { FC } from 'react'
import type { FC } from 'react'
import React from 'react'
import Main from '@/app/components/explore/installed-app'
export interface IInstalledAppProps {
export type IInstalledAppProps = {
params: {
appId: string
}
}
const InstalledApp: FC<IInstalledAppProps> = ({ params: {appId} }) => {
const InstalledApp: FC<IInstalledAppProps> = ({ params: { appId } }) => {
return (
<Main id={appId} />
)

View File

@@ -5,7 +5,6 @@ import type { IMainProps } from '@/app/components/share/chat'
import Main from '@/app/components/share/chat'
const Chat: FC<IMainProps> = () => {
return (
<Main />
)

View File

@@ -14,32 +14,51 @@ export function randomString(length: number) {
}
export type IAppBasicProps = {
iconType?: 'app' | 'api' | 'dataset'
icon?: string,
icon_background?: string,
iconType?: 'app' | 'api' | 'dataset' | 'webapp' | 'notion'
icon?: string
icon_background?: string
name: string
type: string | React.ReactNode
hoverTip?: string
textStyle?: { main?: string; extra?: string }
}
const AlgorithmSvg = <svg width="18" height="18" viewBox="0 0 18 18" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M8.5 3.5C8.5 4.60457 9.39543 5.5 10.5 5.5C11.6046 5.5 12.5 4.60457 12.5 3.5C12.5 2.39543 11.6046 1.5 10.5 1.5C9.39543 1.5 8.5 2.39543 8.5 3.5Z" stroke="#5850EC" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round" />
<path d="M12.5 9C12.5 10.1046 13.3954 11 14.5 11C15.6046 11 16.5 10.1046 16.5 9C16.5 7.89543 15.6046 7 14.5 7C13.3954 7 12.5 7.89543 12.5 9Z" stroke="#5850EC" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round" />
<path d="M8.5 3.5H5.5L3.5 6.5" stroke="#5850EC" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round" />
<path d="M8.5 14.5C8.5 15.6046 9.39543 16.5 10.5 16.5C11.6046 16.5 12.5 15.6046 12.5 14.5C12.5 13.3954 11.6046 12.5 10.5 12.5C9.39543 12.5 8.5 13.3954 8.5 14.5Z" stroke="#5850EC" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round" />
<path d="M8.5 14.5H5.5L3.5 11.5" stroke="#5850EC" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round" />
<path d="M12.5 9H1.5" stroke="#5850EC" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round" />
const ApiSvg = <svg width="18" height="18" viewBox="0 0 18 18" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M8.5 3.5C8.5 4.60457 9.39543 5.5 10.5 5.5C11.6046 5.5 12.5 4.60457 12.5 3.5C12.5 2.39543 11.6046 1.5 10.5 1.5C9.39543 1.5 8.5 2.39543 8.5 3.5Z" stroke="#5850EC" strokeWidth="1.5" strokeLinecap="round" strokeLinejoin="round" />
<path d="M12.5 9C12.5 10.1046 13.3954 11 14.5 11C15.6046 11 16.5 10.1046 16.5 9C16.5 7.89543 15.6046 7 14.5 7C13.3954 7 12.5 7.89543 12.5 9Z" stroke="#5850EC" strokeWidth="1.5" strokeLinecap="round" strokeLinejoin="round" />
<path d="M8.5 3.5H5.5L3.5 6.5" stroke="#5850EC" strokeWidth="1.5" strokeLinecap="round" strokeLinejoin="round" />
<path d="M8.5 14.5C8.5 15.6046 9.39543 16.5 10.5 16.5C11.6046 16.5 12.5 15.6046 12.5 14.5C12.5 13.3954 11.6046 12.5 10.5 12.5C9.39543 12.5 8.5 13.3954 8.5 14.5Z" stroke="#5850EC" strokeWidth="1.5" strokeLinecap="round" strokeLinejoin="round" />
<path d="M8.5 14.5H5.5L3.5 11.5" stroke="#5850EC" strokeWidth="1.5" strokeLinecap="round" strokeLinejoin="round" />
<path d="M12.5 9H1.5" stroke="#5850EC" strokeWidth="1.5" strokeLinecap="round" strokeLinejoin="round" />
</svg>
const DatasetSvg = <svg width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd" clip-rule="evenodd" d="M0.833497 5.13481C0.833483 4.69553 0.83347 4.31654 0.858973 4.0044C0.88589 3.67495 0.94532 3.34727 1.10598 3.03195C1.34567 2.56155 1.72812 2.17909 2.19852 1.93941C2.51384 1.77875 2.84152 1.71932 3.17097 1.6924C3.48312 1.6669 3.86209 1.66691 4.30137 1.66693L7.62238 1.66684C8.11701 1.66618 8.55199 1.66561 8.95195 1.80356C9.30227 1.92439 9.62134 2.12159 9.88607 2.38088C10.1883 2.67692 10.3823 3.06624 10.603 3.50894L11.3484 5.00008H14.3679C15.0387 5.00007 15.5924 5.00006 16.0434 5.03691C16.5118 5.07518 16.9424 5.15732 17.3468 5.36339C17.974 5.68297 18.4839 6.19291 18.8035 6.82011C19.0096 7.22456 19.0917 7.65515 19.13 8.12356C19.1668 8.57455 19.1668 9.12818 19.1668 9.79898V13.5345C19.1668 14.2053 19.1668 14.7589 19.13 15.2099C19.0917 15.6784 19.0096 16.1089 18.8035 16.5134C18.4839 17.1406 17.974 17.6505 17.3468 17.9701C16.9424 18.1762 16.5118 18.2583 16.0434 18.2966C15.5924 18.3334 15.0387 18.3334 14.3679 18.3334H5.63243C4.96163 18.3334 4.40797 18.3334 3.95698 18.2966C3.48856 18.2583 3.05798 18.1762 2.65353 17.9701C2.02632 17.6505 1.51639 17.1406 1.19681 16.5134C0.990734 16.1089 0.908597 15.6784 0.870326 15.2099C0.833478 14.7589 0.833487 14.2053 0.833497 13.5345V5.13481ZM7.51874 3.33359C8.17742 3.33359 8.30798 3.34447 8.4085 3.37914C8.52527 3.41942 8.63163 3.48515 8.71987 3.57158C8.79584 3.64598 8.86396 3.7579 9.15852 4.34704L9.48505 5.00008L2.50023 5.00008C2.50059 4.61259 2.50314 4.34771 2.5201 4.14012C2.5386 3.91374 2.57 3.82981 2.59099 3.7886C2.67089 3.6318 2.79837 3.50432 2.95517 3.42442C2.99638 3.40343 3.08031 3.37203 3.30669 3.35353C3.54281 3.33424 3.85304 3.33359 4.3335 3.33359H7.51874Z" fill="#444CE7" />
<path fillRule="evenodd" clipRule="evenodd" d="M0.833497 5.13481C0.833483 4.69553 0.83347 4.31654 0.858973 4.0044C0.88589 3.67495 0.94532 3.34727 1.10598 3.03195C1.34567 2.56155 1.72812 2.17909 2.19852 1.93941C2.51384 1.77875 2.84152 1.71932 3.17097 1.6924C3.48312 1.6669 3.86209 1.66691 4.30137 1.66693L7.62238 1.66684C8.11701 1.66618 8.55199 1.66561 8.95195 1.80356C9.30227 1.92439 9.62134 2.12159 9.88607 2.38088C10.1883 2.67692 10.3823 3.06624 10.603 3.50894L11.3484 5.00008H14.3679C15.0387 5.00007 15.5924 5.00006 16.0434 5.03691C16.5118 5.07518 16.9424 5.15732 17.3468 5.36339C17.974 5.68297 18.4839 6.19291 18.8035 6.82011C19.0096 7.22456 19.0917 7.65515 19.13 8.12356C19.1668 8.57455 19.1668 9.12818 19.1668 9.79898V13.5345C19.1668 14.2053 19.1668 14.7589 19.13 15.2099C19.0917 15.6784 19.0096 16.1089 18.8035 16.5134C18.4839 17.1406 17.974 17.6505 17.3468 17.9701C16.9424 18.1762 16.5118 18.2583 16.0434 18.2966C15.5924 18.3334 15.0387 18.3334 14.3679 18.3334H5.63243C4.96163 18.3334 4.40797 18.3334 3.95698 18.2966C3.48856 18.2583 3.05798 18.1762 2.65353 17.9701C2.02632 17.6505 1.51639 17.1406 1.19681 16.5134C0.990734 16.1089 0.908597 15.6784 0.870326 15.2099C0.833478 14.7589 0.833487 14.2053 0.833497 13.5345V5.13481ZM7.51874 3.33359C8.17742 3.33359 8.30798 3.34447 8.4085 3.37914C8.52527 3.41942 8.63163 3.48515 8.71987 3.57158C8.79584 3.64598 8.86396 3.7579 9.15852 4.34704L9.48505 5.00008L2.50023 5.00008C2.50059 4.61259 2.50314 4.34771 2.5201 4.14012C2.5386 3.91374 2.57 3.82981 2.59099 3.7886C2.67089 3.6318 2.79837 3.50432 2.95517 3.42442C2.99638 3.40343 3.08031 3.37203 3.30669 3.35353C3.54281 3.33424 3.85304 3.33359 4.3335 3.33359H7.51874Z" fill="#444CE7" />
</svg>
const WebappSvg = <svg width="16" height="18" viewBox="0 0 16 18" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M14.375 5.45825L7.99998 8.99992M7.99998 8.99992L1.62498 5.45825M7.99998 8.99992L8 16.1249M14.75 12.0439V5.95603C14.75 5.69904 14.75 5.57055 14.7121 5.45595C14.6786 5.35457 14.6239 5.26151 14.5515 5.18299C14.4697 5.09424 14.3574 5.03184 14.1328 4.90704L8.58277 1.8237C8.37007 1.70553 8.26372 1.64645 8.15109 1.62329C8.05141 1.60278 7.9486 1.60278 7.84891 1.62329C7.73628 1.64645 7.62993 1.70553 7.41723 1.8237L1.86723 4.90704C1.64259 5.03184 1.53026 5.09424 1.44847 5.18299C1.37612 5.26151 1.32136 5.35457 1.28786 5.45595C1.25 5.57055 1.25 5.69904 1.25 5.95603V12.0439C1.25 12.3008 1.25 12.4293 1.28786 12.5439C1.32136 12.6453 1.37612 12.7384 1.44847 12.8169C1.53026 12.9056 1.64259 12.968 1.86723 13.0928L7.41723 16.1762C7.62993 16.2943 7.73628 16.3534 7.84891 16.3766C7.9486 16.3971 8.05141 16.3971 8.15109 16.3766C8.26372 16.3534 8.37007 16.2943 8.58277 16.1762L14.1328 13.0928C14.3574 12.968 14.4697 12.9056 14.5515 12.8169C14.6239 12.7384 14.6786 12.6453 14.7121 12.5439C14.75 12.4293 14.75 12.3008 14.75 12.0439Z" stroke="#155EEF" strokeWidth="1.5" strokeLinecap="round" strokeLinejoin="round"/>
</svg>
const NotionSvg = <svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_6294_13848)">
<path fill-rule="evenodd" clip-rule="evenodd" d="M4.287 21.9133L1.70748 18.6999C1.08685 17.9267 0.75 16.976 0.75 15.9974V4.36124C0.75 2.89548 1.92269 1.67923 3.43553 1.57594L15.3991 0.759137C16.2682 0.699797 17.1321 0.930818 17.8461 1.41353L22.0494 4.25543C22.8018 4.76414 23.25 5.59574 23.25 6.48319V19.7124C23.25 21.1468 22.0969 22.3345 20.6157 22.4256L7.3375 23.243C6.1555 23.3158 5.01299 22.8178 4.287 21.9133Z" fill="white"/>
<path d="M8.43607 10.1842V10.0318C8.43607 9.64564 8.74535 9.32537 9.14397 9.29876L12.0475 9.10491L16.0628 15.0178V9.82823L15.0293 9.69046V9.6181C15.0293 9.22739 15.3456 8.90501 15.7493 8.88433L18.3912 8.74899V9.12918C18.3912 9.30765 18.2585 9.46031 18.0766 9.49108L17.4408 9.59861V18.0029L16.6429 18.2773C15.9764 18.5065 15.2343 18.2611 14.8527 17.6853L10.9545 11.803V17.4173L12.1544 17.647L12.1377 17.7583C12.0853 18.1069 11.7843 18.3705 11.4202 18.3867L8.43607 18.5195C8.39662 18.1447 8.67758 17.8093 9.06518 17.7686L9.45771 17.7273V10.2416L8.43607 10.1842Z" fill="black"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M15.5062 2.22521L3.5426 3.04201C2.82599 3.09094 2.27051 3.66706 2.27051 4.36136V15.9975C2.27051 16.6499 2.49507 17.2837 2.90883 17.7992L5.48835 21.0126C5.90541 21.5322 6.56174 21.8183 7.24076 21.7765L20.519 20.9591C21.1995 20.9172 21.7293 20.3716 21.7293 19.7125V6.48332C21.7293 6.07557 21.5234 5.69348 21.1777 5.45975L16.9743 2.61784C16.546 2.32822 16.0277 2.1896 15.5062 2.22521ZM4.13585 4.54287C3.96946 4.41968 4.04865 4.16303 4.25768 4.14804L15.5866 3.33545C15.9476 3.30956 16.3063 3.40896 16.5982 3.61578L18.8713 5.22622C18.9576 5.28736 18.9171 5.41935 18.8102 5.42516L6.8129 6.07764C6.44983 6.09739 6.09144 5.99073 5.80276 5.77699L4.13585 4.54287ZM6.25018 8.12315C6.25018 7.7334 6.56506 7.41145 6.9677 7.38952L19.6523 6.69871C20.0447 6.67734 20.375 6.97912 20.375 7.35898V18.8141C20.375 19.2031 20.0613 19.5247 19.6594 19.5476L7.05516 20.2648C6.61845 20.2896 6.25018 19.954 6.25018 19.5312V8.12315Z" fill="black"/>
</g>
<defs>
<clipPath id="clip0_6294_13848">
<rect width="24" height="24" fill="white"/>
</clipPath>
</defs>
</svg>
const ICON_MAP = {
'app': <AppIcon className='border !border-[rgba(0,0,0,0.05)]' />,
'api': <AppIcon innerIcon={AlgorithmSvg} className='border !bg-purple-50 !border-purple-200' />,
'dataset': <AppIcon innerIcon={DatasetSvg} className='!border-[0.5px] !border-indigo-100 !bg-indigo-25' />
app: <AppIcon className='border !border-[rgba(0,0,0,0.05)]' />,
api: <AppIcon innerIcon={ApiSvg} className='border !bg-purple-50 !border-purple-200' />,
dataset: <AppIcon innerIcon={DatasetSvg} className='!border-[0.5px] !border-indigo-100 !bg-indigo-25' />,
webapp: <AppIcon innerIcon={WebappSvg} className='border !bg-primary-100 !border-primary-200' />,
notion: <AppIcon innerIcon={NotionSvg} className='!border-[0.5px] !border-indigo-100 !bg-white' />,
}
export default function AppBasic({ icon, icon_background, name, type, hoverTip, textStyle, iconType = 'app' }: IAppBasicProps) {
@@ -50,8 +69,8 @@ export default function AppBasic({ icon, icon_background, name, type, hoverTip,
<AppIcon icon={icon} background={icon_background} />
</div>
)}
{iconType !== 'app' &&
<div className='flex-shrink-0 mr-3'>
{iconType !== 'app'
&& <div className='flex-shrink-0 mr-3'>
{ICON_MAP[iconType]}
</div>

View File

@@ -4,7 +4,7 @@ import NavLink from './navLink'
import AppBasic from './basic'
export type IAppDetailNavProps = {
iconType?: 'app' | 'dataset'
iconType?: 'app' | 'dataset' | 'notion'
title: string
desc: string
icon: string
@@ -18,7 +18,6 @@ export type IAppDetailNavProps = {
extraInfo?: React.ReactNode
}
const AppDetailNav: FC<IAppDetailNavProps> = ({ title, desc, icon, icon_background, navigation, extraInfo, iconType = 'app' }) => {
return (
<div className="flex flex-col w-56 overflow-y-auto bg-white border-r border-gray-200 shrink-0">

View File

@@ -18,7 +18,6 @@ export default function NavLink({
return (
<Link
prefetch
key={name}
href={href}
className={classNames(

View File

@@ -1,6 +1,6 @@
'use client'
import type { FC } from 'react'
import React, { useEffect, useRef, useState } from 'react'
import React, { useEffect, useLayoutEffect, useRef, useState } from 'react'
import { useContext } from 'use-context-selector'
import cn from 'classnames'
import { HandThumbDownIcon, HandThumbUpIcon } from '@heroicons/react/24/outline'
@@ -22,7 +22,7 @@ import useBreakpoints, { MediaType } from '@/hooks/use-breakpoints'
const stopIcon = (
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.00004 0.583313C3.45621 0.583313 0.583374 3.45615 0.583374 6.99998C0.583374 10.5438 3.45621 13.4166 7.00004 13.4166C10.5439 13.4166 13.4167 10.5438 13.4167 6.99998C13.4167 3.45615 10.5439 0.583313 7.00004 0.583313ZM4.73029 4.98515C4.66671 5.10993 4.66671 5.27328 4.66671 5.59998V8.39998C4.66671 8.72668 4.66671 8.89003 4.73029 9.01481C4.78621 9.12457 4.87545 9.21381 4.98521 9.26973C5.10999 9.33331 5.27334 9.33331 5.60004 9.33331H8.40004C8.72674 9.33331 8.89009 9.33331 9.01487 9.26973C9.12463 9.21381 9.21387 9.12457 9.2698 9.01481C9.33337 8.89003 9.33337 8.72668 9.33337 8.39998V5.59998C9.33337 5.27328 9.33337 5.10993 9.2698 4.98515C9.21387 4.87539 9.12463 4.78615 9.01487 4.73023C8.89009 4.66665 8.72674 4.66665 8.40004 4.66665H5.60004C5.27334 4.66665 5.10999 4.66665 4.98521 4.73023C4.87545 4.78615 4.78621 4.87539 4.73029 4.98515Z" fill="#667085" />
<path fillRule="evenodd" clipRule="evenodd" d="M7.00004 0.583313C3.45621 0.583313 0.583374 3.45615 0.583374 6.99998C0.583374 10.5438 3.45621 13.4166 7.00004 13.4166C10.5439 13.4166 13.4167 10.5438 13.4167 6.99998C13.4167 3.45615 10.5439 0.583313 7.00004 0.583313ZM4.73029 4.98515C4.66671 5.10993 4.66671 5.27328 4.66671 5.59998V8.39998C4.66671 8.72668 4.66671 8.89003 4.73029 9.01481C4.78621 9.12457 4.87545 9.21381 4.98521 9.26973C5.10999 9.33331 5.27334 9.33331 5.60004 9.33331H8.40004C8.72674 9.33331 8.89009 9.33331 9.01487 9.26973C9.12463 9.21381 9.21387 9.12457 9.2698 9.01481C9.33337 8.89003 9.33337 8.72668 9.33337 8.39998V5.59998C9.33337 5.27328 9.33337 5.10993 9.2698 4.98515C9.21387 4.87539 9.12463 4.78615 9.01487 4.73023C8.89009 4.66665 8.72674 4.66665 8.40004 4.66665H5.60004C5.27334 4.66665 5.10999 4.66665 4.98521 4.73023C4.87545 4.78615 4.78621 4.87539 4.73029 4.98515Z" fill="#667085" />
</svg>
)
export type Feedbacktype = {
@@ -53,6 +53,7 @@ export type IChatProps = {
displayScene?: DisplayScene
useCurrentUserAvatar?: boolean
isResponsing?: boolean
canStopResponsing?: boolean
abortResponsing?: () => void
controlClearQuery?: number
controlFocus?: number
@@ -132,8 +133,8 @@ const EditIcon: FC<{ className?: string }> = ({ className }) => {
export const EditIconSolid: FC<{ className?: string }> = ({ className }) => {
return <svg width="12" height="12" viewBox="0 0 12 12" fill="none" xmlns="http://www.w3.org/2000/svg" className={className}>
<path fill-rule="evenodd" clipRule="evenodd" d="M10.8374 8.63108C11.0412 8.81739 11.0554 9.13366 10.8691 9.33747L10.369 9.88449C10.0142 10.2725 9.52293 10.5001 9.00011 10.5001C8.47746 10.5001 7.98634 10.2727 7.63157 9.8849C7.45561 9.69325 7.22747 9.59515 7.00014 9.59515C6.77271 9.59515 6.54446 9.69335 6.36846 9.88517C6.18177 10.0886 5.86548 10.1023 5.66201 9.91556C5.45853 9.72888 5.44493 9.41259 5.63161 9.20911C5.98678 8.82201 6.47777 8.59515 7.00014 8.59515C7.52251 8.59515 8.0135 8.82201 8.36867 9.20911L8.36924 9.20974C8.54486 9.4018 8.77291 9.50012 9.00011 9.50012C9.2273 9.50012 9.45533 9.40182 9.63095 9.20979L10.131 8.66276C10.3173 8.45895 10.6336 8.44476 10.8374 8.63108Z" fill="#6B7280" />
<path fill-rule="evenodd" clipRule="evenodd" d="M7.89651 1.39656C8.50599 0.787085 9.49414 0.787084 10.1036 1.39656C10.7131 2.00604 10.7131 2.99419 10.1036 3.60367L3.82225 9.88504C3.81235 9.89494 3.80254 9.90476 3.79281 9.91451C3.64909 10.0585 3.52237 10.1855 3.3696 10.2791C3.23539 10.3613 3.08907 10.4219 2.93602 10.4587C2.7618 10.5005 2.58242 10.5003 2.37897 10.5001C2.3652 10.5001 2.35132 10.5001 2.33732 10.5001H1.50005C1.22391 10.5001 1.00005 10.2763 1.00005 10.0001V9.16286C1.00005 9.14886 1.00004 9.13497 1.00003 9.1212C0.999836 8.91776 0.999669 8.73838 1.0415 8.56416C1.07824 8.4111 1.13885 8.26479 1.22109 8.13058C1.31471 7.97781 1.44166 7.85109 1.58566 7.70736C1.5954 7.69764 1.60523 7.68783 1.61513 7.67793L7.89651 1.39656Z" fill="#6B7280" />
<path fillRule="evenodd" clipRule="evenodd" d="M10.8374 8.63108C11.0412 8.81739 11.0554 9.13366 10.8691 9.33747L10.369 9.88449C10.0142 10.2725 9.52293 10.5001 9.00011 10.5001C8.47746 10.5001 7.98634 10.2727 7.63157 9.8849C7.45561 9.69325 7.22747 9.59515 7.00014 9.59515C6.77271 9.59515 6.54446 9.69335 6.36846 9.88517C6.18177 10.0886 5.86548 10.1023 5.66201 9.91556C5.45853 9.72888 5.44493 9.41259 5.63161 9.20911C5.98678 8.82201 6.47777 8.59515 7.00014 8.59515C7.52251 8.59515 8.0135 8.82201 8.36867 9.20911L8.36924 9.20974C8.54486 9.4018 8.77291 9.50012 9.00011 9.50012C9.2273 9.50012 9.45533 9.40182 9.63095 9.20979L10.131 8.66276C10.3173 8.45895 10.6336 8.44476 10.8374 8.63108Z" fill="#6B7280" />
<path fillRule="evenodd" clipRule="evenodd" d="M7.89651 1.39656C8.50599 0.787085 9.49414 0.787084 10.1036 1.39656C10.7131 2.00604 10.7131 2.99419 10.1036 3.60367L3.82225 9.88504C3.81235 9.89494 3.80254 9.90476 3.79281 9.91451C3.64909 10.0585 3.52237 10.1855 3.3696 10.2791C3.23539 10.3613 3.08907 10.4219 2.93602 10.4587C2.7618 10.5005 2.58242 10.5003 2.37897 10.5001C2.3652 10.5001 2.35132 10.5001 2.33732 10.5001H1.50005C1.22391 10.5001 1.00005 10.2763 1.00005 10.0001V9.16286C1.00005 9.14886 1.00004 9.13497 1.00003 9.1212C0.999836 8.91776 0.999669 8.73838 1.0415 8.56416C1.07824 8.4111 1.13885 8.26479 1.22109 8.13058C1.31471 7.97781 1.44166 7.85109 1.58566 7.70736C1.5954 7.69764 1.60523 7.68783 1.61513 7.67793L7.89651 1.39656Z" fill="#6B7280" />
</svg>
}
@@ -412,6 +413,7 @@ const Chat: FC<IChatProps> = ({
displayScene,
useCurrentUserAvatar,
isResponsing,
canStopResponsing,
abortResponsing,
controlClearQuery,
controlFocus,
@@ -474,6 +476,16 @@ const Chat: FC<IChatProps> = ({
const isMobile = media === MediaType.mobile
const sendBtn = <div className={cn(!(!query || query.trim() === '') && s.sendBtnActive, `${s.sendBtn} w-8 h-8 cursor-pointer rounded-md`)} onClick={handleSend}></div>
const suggestionListRef = useRef<HTMLDivElement>(null)
const [hasScrollbar, setHasScrollbar] = useState(false)
useLayoutEffect(() => {
if (suggestionListRef.current) {
const listDom = suggestionListRef.current
const hasScrollbar = listDom.scrollWidth > listDom.clientWidth
setHasScrollbar(hasScrollbar)
}
}, [suggestionList])
return (
<div className={cn(!feedbackDisabled && 'px-3.5', 'h-full')}>
{/* Chat List */}
@@ -498,7 +510,7 @@ const Chat: FC<IChatProps> = ({
{
!isHideSendInput && (
<div className={cn(!feedbackDisabled && '!left-3.5 !right-3.5', 'absolute z-10 bottom-0 left-0 right-0')}>
{isResponsing && (
{(isResponsing && canStopResponsing) && (
<div className='flex justify-center mb-4'>
<Button className='flex items-center space-x-1 bg-white' onClick={() => abortResponsing?.()}>
{stopIcon}
@@ -523,7 +535,8 @@ const Chat: FC<IChatProps> = ({
background: 'linear-gradient(270deg, rgba(243, 244, 246, 0) 0%, #F3F4F6 100%)',
}}></div>
</div>
<div className='flex justify-center overflow-x-scroll pb-2'>
{/* has scrollbar would hide part of first item */}
<div ref={suggestionListRef} className={cn(!hasScrollbar && 'justify-center', 'flex overflow-x-auto pb-2')}>
{suggestionList?.map((item, index) => (
<div className='shrink-0 flex justify-center mr-2'>
<Button

View File

@@ -1,10 +1,11 @@
'use client'
import React, { FC } from 'react'
import type { FC } from 'react'
import React from 'react'
const SuggestedQuestionsAfterAnswerIcon: FC = () => {
return (
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd" clip-rule="evenodd" d="M10.8275 1.33325H5.17245C4.63581 1.33324 4.19289 1.33324 3.8321 1.36272C3.45737 1.39333 3.1129 1.45904 2.78934 1.6239C2.28758 1.87956 1.87963 2.28751 1.62397 2.78928C1.45911 3.11284 1.3934 3.4573 1.36278 3.83204C1.3333 4.19283 1.33331 4.63574 1.33332 5.17239L1.33328 9.42497C1.333 9.95523 1.33278 10.349 1.42418 10.6901C1.67076 11.6103 2.38955 12.3291 3.3098 12.5757C3.51478 12.6306 3.73878 12.6525 3.99998 12.6611L3.99998 13.5806C3.99995 13.7374 3.99992 13.8973 4.01182 14.0283C4.0232 14.1536 4.05333 14.3901 4.21844 14.5969C4.40843 14.8349 4.69652 14.9734 5.00106 14.973C5.26572 14.9728 5.46921 14.8486 5.57416 14.7792C5.6839 14.7066 5.80872 14.6067 5.93117 14.5087L7.53992 13.2217C7.88564 12.9451 7.98829 12.8671 8.09494 12.8126C8.20192 12.7579 8.3158 12.718 8.43349 12.6938C8.55081 12.6697 8.67974 12.6666 9.12248 12.6666H10.8275C11.3642 12.6666 11.8071 12.6666 12.1679 12.6371C12.5426 12.6065 12.8871 12.5408 13.2106 12.3759C13.7124 12.1203 14.1203 11.7123 14.376 11.2106C14.5409 10.887 14.6066 10.5425 14.6372 10.1678C14.6667 9.80701 14.6667 9.36411 14.6667 8.82747V5.17237C14.6667 4.63573 14.6667 4.19283 14.6372 3.83204C14.6066 3.4573 14.5409 3.11284 14.376 2.78928C14.1203 2.28751 13.7124 1.87956 13.2106 1.6239C12.8871 1.45904 12.5426 1.39333 12.1679 1.36272C11.8071 1.33324 11.3642 1.33324 10.8275 1.33325ZM8.99504 4.99992C8.99504 4.44763 9.44275 3.99992 9.99504 3.99992C10.5473 3.99992 10.995 4.44763 10.995 4.99992C10.995 5.5522 10.5473 5.99992 9.99504 5.99992C9.44275 5.99992 8.99504 5.5522 8.99504 4.99992ZM4.92837 7.79996C5.222 7.57974 5.63816 7.63837 5.85961 7.93051C5.90071 7.98295 5.94593 8.03229 5.99199 8.08035C6.09019 8.18282 6.23775 8.32184 6.42882 8.4608C6.81353 8.74059 7.3454 8.99996 7.99504 8.99996C8.64469 8.99996 9.17655 8.74059 9.56126 8.4608C9.75233 8.32184 9.89989 8.18282 9.99809 8.08035C10.0441 8.0323 10.0894 7.98294 10.1305 7.93051C10.3519 7.63837 10.7681 7.57974 11.0617 7.79996C11.3563 8.02087 11.416 8.43874 11.195 8.73329C11.1967 8.73112 11.1928 8.7361 11.186 8.74466C11.1697 8.7651 11.1372 8.80597 11.1261 8.81916C11.087 8.86575 11.0317 8.92884 10.9607 9.00289C10.8194 9.15043 10.6128 9.34474 10.3455 9.53912C9.81353 9.92599 9.01206 10.3333 7.99504 10.3333C6.97802 10.3333 6.17655 9.92599 5.64459 9.53912C5.37733 9.34474 5.17072 9.15043 5.02934 9.00289C4.95837 8.92884 4.90305 8.86575 4.86395 8.81916C4.84438 8.79585 4.82881 8.77659 4.81731 8.76207C4.58702 8.46455 4.61798 8.03275 4.92837 7.79996ZM5.99504 3.99992C5.44275 3.99992 4.99504 4.44763 4.99504 4.99992C4.99504 5.5522 5.44275 5.99992 5.99504 5.99992C6.54732 5.99992 6.99504 5.5522 6.99504 4.99992C6.99504 4.44763 6.54732 3.99992 5.99504 3.99992Z" fill="#06AED4" />
<path fillRule="evenodd" clipRule="evenodd" d="M10.8275 1.33325H5.17245C4.63581 1.33324 4.19289 1.33324 3.8321 1.36272C3.45737 1.39333 3.1129 1.45904 2.78934 1.6239C2.28758 1.87956 1.87963 2.28751 1.62397 2.78928C1.45911 3.11284 1.3934 3.4573 1.36278 3.83204C1.3333 4.19283 1.33331 4.63574 1.33332 5.17239L1.33328 9.42497C1.333 9.95523 1.33278 10.349 1.42418 10.6901C1.67076 11.6103 2.38955 12.3291 3.3098 12.5757C3.51478 12.6306 3.73878 12.6525 3.99998 12.6611L3.99998 13.5806C3.99995 13.7374 3.99992 13.8973 4.01182 14.0283C4.0232 14.1536 4.05333 14.3901 4.21844 14.5969C4.40843 14.8349 4.69652 14.9734 5.00106 14.973C5.26572 14.9728 5.46921 14.8486 5.57416 14.7792C5.6839 14.7066 5.80872 14.6067 5.93117 14.5087L7.53992 13.2217C7.88564 12.9451 7.98829 12.8671 8.09494 12.8126C8.20192 12.7579 8.3158 12.718 8.43349 12.6938C8.55081 12.6697 8.67974 12.6666 9.12248 12.6666H10.8275C11.3642 12.6666 11.8071 12.6666 12.1679 12.6371C12.5426 12.6065 12.8871 12.5408 13.2106 12.3759C13.7124 12.1203 14.1203 11.7123 14.376 11.2106C14.5409 10.887 14.6066 10.5425 14.6372 10.1678C14.6667 9.80701 14.6667 9.36411 14.6667 8.82747V5.17237C14.6667 4.63573 14.6667 4.19283 14.6372 3.83204C14.6066 3.4573 14.5409 3.11284 14.376 2.78928C14.1203 2.28751 13.7124 1.87956 13.2106 1.6239C12.8871 1.45904 12.5426 1.39333 12.1679 1.36272C11.8071 1.33324 11.3642 1.33324 10.8275 1.33325ZM8.99504 4.99992C8.99504 4.44763 9.44275 3.99992 9.99504 3.99992C10.5473 3.99992 10.995 4.44763 10.995 4.99992C10.995 5.5522 10.5473 5.99992 9.99504 5.99992C9.44275 5.99992 8.99504 5.5522 8.99504 4.99992ZM4.92837 7.79996C5.222 7.57974 5.63816 7.63837 5.85961 7.93051C5.90071 7.98295 5.94593 8.03229 5.99199 8.08035C6.09019 8.18282 6.23775 8.32184 6.42882 8.4608C6.81353 8.74059 7.3454 8.99996 7.99504 8.99996C8.64469 8.99996 9.17655 8.74059 9.56126 8.4608C9.75233 8.32184 9.89989 8.18282 9.99809 8.08035C10.0441 8.0323 10.0894 7.98294 10.1305 7.93051C10.3519 7.63837 10.7681 7.57974 11.0617 7.79996C11.3563 8.02087 11.416 8.43874 11.195 8.73329C11.1967 8.73112 11.1928 8.7361 11.186 8.74466C11.1697 8.7651 11.1372 8.80597 11.1261 8.81916C11.087 8.86575 11.0317 8.92884 10.9607 9.00289C10.8194 9.15043 10.6128 9.34474 10.3455 9.53912C9.81353 9.92599 9.01206 10.3333 7.99504 10.3333C6.97802 10.3333 6.17655 9.92599 5.64459 9.53912C5.37733 9.34474 5.17072 9.15043 5.02934 9.00289C4.95837 8.92884 4.90305 8.86575 4.86395 8.81916C4.84438 8.79585 4.82881 8.77659 4.81731 8.76207C4.58702 8.46455 4.61798 8.03275 4.92837 7.79996ZM5.99504 3.99992C5.44275 3.99992 4.99504 4.44763 4.99504 4.99992C4.99504 5.5522 5.44275 5.99992 5.99504 5.99992C6.54732 5.99992 6.99504 5.5522 6.99504 4.99992C6.99504 4.44763 6.54732 3.99992 5.99504 3.99992Z" fill="#06AED4" />
</svg>
)
}

View File

@@ -1,14 +1,14 @@
'use client'
import type { FC } from 'react'
import React, { useEffect, useState } from 'react'
import React, { useEffect } from 'react'
import cn from 'classnames'
import { useTranslation } from 'react-i18next'
import { useBoolean, useClickAway } from 'ahooks'
import { ChevronDownIcon, Cog8ToothIcon, InformationCircleIcon } from '@heroicons/react/24/outline'
import ParamItem from './param-item'
import Radio from '@/app/components/base/radio'
import Panel from '@/app/components/base/panel'
import type { CompletionParams } from '@/models/debug'
import { Cog8ToothIcon, InformationCircleIcon, ChevronDownIcon } from '@heroicons/react/24/outline'
import { AppType } from '@/types/app'
import { TONE_LIST } from '@/config'
import Toast from '@/app/components/base/toast'
@@ -26,8 +26,10 @@ export type IConifgModelProps = {
const options = [
{ id: 'gpt-3.5-turbo', name: 'gpt-3.5-turbo', type: AppType.chat },
{ id: 'gpt-3.5-turbo-16k', name: 'gpt-3.5-turbo-16k', type: AppType.chat },
{ id: 'gpt-4', name: 'gpt-4', type: AppType.chat }, // 8k version
{ id: 'gpt-3.5-turbo', name: 'gpt-3.5-turbo', type: AppType.completion },
{ id: 'gpt-3.5-turbo-16k', name: 'gpt-3.5-turbo-16k', type: AppType.completion },
{ id: 'text-davinci-003', name: 'text-davinci-003', type: AppType.completion },
{ id: 'gpt-4', name: 'gpt-4', type: AppType.completion }, // 8k version
]
@@ -51,7 +53,7 @@ const ConifgModel: FC<IConifgModelProps> = ({
}) => {
const { t } = useTranslation()
const isChatApp = mode === AppType.chat
const availableModels = options.filter((item) => item.type === mode)
const availableModels = options.filter(item => item.type === mode)
const [isShowConfig, { setFalse: hideConfig, toggle: toogleShowConfig }] = useBoolean(false)
const configContentRef = React.useRef(null)
useClickAway(() => {
@@ -95,7 +97,7 @@ const ConifgModel: FC<IConifgModelProps> = ({
key: 'max_tokens',
tip: t('common.model.params.maxTokenTip'),
step: 100,
max: modelId === 'gpt-4' ? 8000 : 4000,
max: (modelId === 'gpt-4' || modelId === 'gpt-3.5-turbo-16k') ? 8000 : 4000,
},
]
@@ -116,14 +118,14 @@ const ConifgModel: FC<IConifgModelProps> = ({
onShowUseGPT4Confirm()
return
}
if(id !== 'gpt-4' && completionParams.max_tokens > 4000) {
if (id !== 'gpt-4' && completionParams.max_tokens > 4000) {
Toast.notify({
type: 'warning',
message: t('common.model.params.setToCurrentModelMaxTokenTip')
message: t('common.model.params.setToCurrentModelMaxTokenTip'),
})
onCompletionParamsChange({
...completionParams,
max_tokens: 4000
max_tokens: 4000,
})
}
setModelId(id)
@@ -153,7 +155,7 @@ const ConifgModel: FC<IConifgModelProps> = ({
setToneId(id)
onCompletionParamsChange({
...tone.config,
max_tokens: completionParams.max_tokens
max_tokens: completionParams.max_tokens,
} as CompletionParams)
}
}
@@ -178,7 +180,7 @@ const ConifgModel: FC<IConifgModelProps> = ({
return (
<div className='relative' ref={configContentRef}>
<div
className={cn(`flex items-center border h-8 px-2.5 space-x-2 rounded-lg`, disabled ? diabledStyle : ableStyle)}
className={cn('flex items-center border h-8 px-2.5 space-x-2 rounded-lg', disabled ? diabledStyle : ableStyle)}
onClick={() => !disabled && toogleShowConfig()}
>
<ModelIcon />
@@ -206,18 +208,18 @@ const ConifgModel: FC<IConifgModelProps> = ({
<div className="flex items-center justify-between my-5 h-9">
<div>{t('appDebug.modelConfig.model')}</div>
{/* model selector */}
<div className="relative" style={{zIndex: 30}}>
<div ref={triggerRef} onClick={() => !selectModelDisabled && toogleOption()} className={cn(selectModelDisabled ? 'cursor-not-allowed' : 'cursor-pointer', "flex items-center h-9 px-3 space-x-2 rounded-lg bg-gray-50 ")}>
<div className="relative" style={{ zIndex: 30 }}>
<div ref={triggerRef} onClick={() => !selectModelDisabled && toogleOption()} className={cn(selectModelDisabled ? 'cursor-not-allowed' : 'cursor-pointer', 'flex items-center h-9 px-3 space-x-2 rounded-lg bg-gray-50 ')}>
<ModelIcon />
<div className="text-sm gray-900">{selectedModel?.name}</div>
{!selectModelDisabled && <ChevronDownIcon className={cn(isShowOption && 'rotate-180', 'w-[14px] h-[14px] text-gray-500')} />}
</div>
{isShowOption && (
<div className={cn(isChatApp ? 'w-[159px]' : 'w-[179px]', "absolute right-0 bg-gray-50 rounded-lg shadow")}>
<div className={cn(isChatApp ? 'min-w-[159px]' : 'w-[179px]', 'absolute right-0 bg-gray-50 rounded-lg shadow')}>
{availableModels.map(item => (
<div key={item.id} onClick={handleSelectModel(item.id)} className="flex items-center h-9 px-3 rounded-lg cursor-pointer hover:bg-gray-100">
<ModelIcon className='mr-2' />
<div className="text-sm gray-900">{item.name}</div>
<ModelIcon className='shrink-0 mr-2' />
<div className="text-sm gray-900 whitespace-nowrap">{item.name}</div>
</div>
))}
</div>

View File

@@ -1,26 +1,30 @@
'use client'
import type { FC } from 'react'
import React from 'react'
import { useTranslation } from 'react-i18next'
import { useBoolean } from 'ahooks'
import cn from 'classnames'
import ConfirmAddVar from './confirm-add-var'
import s from './style.module.css'
import BlockInput from '@/app/components/base/block-input'
import type { PromptVariable } from '@/models/debug'
import Tooltip from '@/app/components/base/tooltip'
import { AppType } from '@/types/app'
import { getNewVar } from '@/utils/var'
import { useTranslation } from 'react-i18next'
import { useBoolean } from 'ahooks'
import ConfirmAddVar from './confirm-add-var'
export type IPromptProps = {
mode: AppType
promptTemplate: string
promptVariables: PromptVariable[]
onChange: (promp: string, promptVariables: PromptVariable[]) => void
readonly?: boolean
onChange?: (promp: string, promptVariables: PromptVariable[]) => void
}
const Prompt: FC<IPromptProps> = ({
mode,
promptTemplate,
promptVariables,
readonly = false,
onChange,
}) => {
const { t } = useTranslation()
@@ -45,35 +49,39 @@ const Prompt: FC<IPromptProps> = ({
showConfirmAddVar()
return
}
onChange(newTemplates, [])
onChange?.(newTemplates, [])
}
const handleAutoAdd = (isAdd: boolean) => {
return () => {
onChange(newTemplates, isAdd ? newPromptVariables : [])
onChange?.(newTemplates, isAdd ? newPromptVariables : [])
hideConfirmAddVar()
}
}
return (
<div className='relative rounded-xl border border-[#2D0DEE] bg-gray-25'>
<div className={cn(!readonly ? `${s.gradientBorder}` : 'bg-gray-50', 'relative rounded-xl')}>
<div className="flex items-center h-11 pl-3 gap-1">
<svg width="14" height="13" viewBox="0 0 14 13" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fillRule="evenodd" clipRule="evenodd" d="M3.00001 0.100098C3.21218 0.100098 3.41566 0.184383 3.56569 0.334412C3.71572 0.484441 3.80001 0.687924 3.80001 0.900098V1.7001H4.60001C4.81218 1.7001 5.01566 1.78438 5.16569 1.93441C5.31572 2.08444 5.40001 2.28792 5.40001 2.5001C5.40001 2.71227 5.31572 2.91575 5.16569 3.06578C5.01566 3.21581 4.81218 3.3001 4.60001 3.3001H3.80001V4.1001C3.80001 4.31227 3.71572 4.51575 3.56569 4.66578C3.41566 4.81581 3.21218 4.9001 3.00001 4.9001C2.78783 4.9001 2.58435 4.81581 2.43432 4.66578C2.28429 4.51575 2.20001 4.31227 2.20001 4.1001V3.3001H1.40001C1.18783 3.3001 0.98435 3.21581 0.834321 3.06578C0.684292 2.91575 0.600006 2.71227 0.600006 2.5001C0.600006 2.28792 0.684292 2.08444 0.834321 1.93441C0.98435 1.78438 1.18783 1.7001 1.40001 1.7001H2.20001V0.900098C2.20001 0.687924 2.28429 0.484441 2.43432 0.334412C2.58435 0.184383 2.78783 0.100098 3.00001 0.100098ZM3.00001 8.1001C3.21218 8.1001 3.41566 8.18438 3.56569 8.33441C3.71572 8.48444 3.80001 8.68792 3.80001 8.9001V9.7001H4.60001C4.81218 9.7001 5.01566 9.78438 5.16569 9.93441C5.31572 10.0844 5.40001 10.2879 5.40001 10.5001C5.40001 10.7123 5.31572 10.9158 5.16569 11.0658C5.01566 11.2158 4.81218 11.3001 4.60001 11.3001H3.80001V12.1001C3.80001 12.3123 3.71572 12.5158 3.56569 12.6658C3.41566 12.8158 3.21218 12.9001 3.00001 12.9001C2.78783 12.9001 2.58435 12.8158 2.43432 12.6658C2.28429 12.5158 2.20001 12.3123 2.20001 12.1001V11.3001H1.40001C1.18783 11.3001 0.98435 11.2158 0.834321 11.0658C0.684292 10.9158 0.600006 10.7123 0.600006 10.5001C0.600006 10.2879 0.684292 10.0844 0.834321 9.93441C0.98435 9.78438 1.18783 9.7001 1.40001 9.7001H2.20001V8.9001C2.20001 8.68792 2.28429 8.48444 2.43432 8.33441C2.58435 8.18438 2.78783 8.1001 3.00001 8.1001ZM8.60001 0.100098C8.77656 0.100041 8.94817 0.158388 9.0881 0.266047C9.22802 0.373706 9.32841 0.52463 9.37361 0.695298L10.3168 4.2601L13 5.8073C13.1216 5.87751 13.2226 5.9785 13.2928 6.10011C13.363 6.22173 13.4 6.35967 13.4 6.5001C13.4 6.64052 13.363 6.77847 13.2928 6.90008C13.2226 7.02169 13.1216 7.12268 13 7.1929L10.3168 8.7409L9.37281 12.3049C9.32753 12.4754 9.22716 12.6262 9.08732 12.7337C8.94748 12.8413 8.77602 12.8996 8.59961 12.8996C8.42319 12.8996 8.25173 12.8413 8.11189 12.7337C7.97205 12.6262 7.87169 12.4754 7.82641 12.3049L6.88321 8.7401L4.20001 7.1929C4.0784 7.12268 3.97742 7.02169 3.90721 6.90008C3.837 6.77847 3.80004 6.64052 3.80004 6.5001C3.80004 6.35967 3.837 6.22173 3.90721 6.10011C3.97742 5.9785 4.0784 5.87751 4.20001 5.8073L6.88321 4.2593L7.82721 0.695298C7.87237 0.524762 7.97263 0.373937 8.1124 0.266291C8.25216 0.158646 8.42359 0.100217 8.60001 0.100098Z" fill="#5850EC" />
</svg>
<div className="h2">{mode === AppType.chat ? t('appDebug.chatSubTitle') : t('appDebug.completionSubTitle')}</div>
<Tooltip
htmlContent={<div className='w-[180px]'>
{t('appDebug.promptTip')}
</div>}
selector='config-prompt-tooltip'>
<svg width="16" height="17" viewBox="0 0 16 17" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M8.66667 11.1667H8V8.5H7.33333M8 5.83333H8.00667M14 8.5C14 9.28793 13.8448 10.0681 13.5433 10.7961C13.2417 11.5241 12.7998 12.1855 12.2426 12.7426C11.6855 13.2998 11.0241 13.7417 10.2961 14.0433C9.56815 14.3448 8.78793 14.5 8 14.5C7.21207 14.5 6.43185 14.3448 5.7039 14.0433C4.97595 13.7417 4.31451 13.2998 3.75736 12.7426C3.20021 12.1855 2.75825 11.5241 2.45672 10.7961C2.15519 10.0681 2 9.28793 2 8.5C2 6.9087 2.63214 5.38258 3.75736 4.25736C4.88258 3.13214 6.4087 2.5 8 2.5C9.5913 2.5 11.1174 3.13214 12.2426 4.25736C13.3679 5.38258 14 6.9087 14 8.5Z" stroke="#9CA3AF" strokeWidth="1.5" strokeLinecap="round" strokeLinejoin="round" />
</svg>
</Tooltip>
{!readonly && (
<Tooltip
htmlContent={<div className='w-[180px]'>
{t('appDebug.promptTip')}
</div>}
selector='config-prompt-tooltip'>
<svg width="16" height="17" viewBox="0 0 16 17" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M8.66667 11.1667H8V8.5H7.33333M8 5.83333H8.00667M14 8.5C14 9.28793 13.8448 10.0681 13.5433 10.7961C13.2417 11.5241 12.7998 12.1855 12.2426 12.7426C11.6855 13.2998 11.0241 13.7417 10.2961 14.0433C9.56815 14.3448 8.78793 14.5 8 14.5C7.21207 14.5 6.43185 14.3448 5.7039 14.0433C4.97595 13.7417 4.31451 13.2998 3.75736 12.7426C3.20021 12.1855 2.75825 11.5241 2.45672 10.7961C2.15519 10.0681 2 9.28793 2 8.5C2 6.9087 2.63214 5.38258 3.75736 4.25736C4.88258 3.13214 6.4087 2.5 8 2.5C9.5913 2.5 11.1174 3.13214 12.2426 4.25736C13.3679 5.38258 14 6.9087 14 8.5Z" stroke="#9CA3AF" strokeWidth="1.5" strokeLinecap="round" strokeLinejoin="round" />
</svg>
</Tooltip>
)}
</div>
<BlockInput
readonly={readonly}
value={promptTemplate}
onConfirm={(value: string, vars: string[]) => {
handleChange(value, vars)
@@ -82,7 +90,7 @@ const Prompt: FC<IPromptProps> = ({
{isShowConfirmAddVar && (
<ConfirmAddVar
varNameArr={newPromptVariables.map((v) => v.name)}
varNameArr={newPromptVariables.map(v => v.name)}
onConfrim={handleAutoAdd(true)}
onCancel={handleAutoAdd(false)}
onHide={hideConfirmAddVar}

View File

@@ -0,0 +1,15 @@
.gradientBorder {
background: radial-gradient(circle at 100% 100%, #fcfcfd 0, #fcfcfd 10px, transparent 10px) 0% 0%/12px 12px no-repeat,
radial-gradient(circle at 0 100%, #fcfcfd 0, #fcfcfd 10px, transparent 10px) 100% 0%/12px 12px no-repeat,
radial-gradient(circle at 100% 0, #fcfcfd 0, #fcfcfd 10px, transparent 10px) 0% 100%/12px 12px no-repeat,
radial-gradient(circle at 0 0, #fcfcfd 0, #fcfcfd 10px, transparent 10px) 100% 100%/12px 12px no-repeat,
linear-gradient(#fcfcfd, #fcfcfd) 50% 50%/calc(100% - 4px) calc(100% - 24px) no-repeat,
linear-gradient(#fcfcfd, #fcfcfd) 50% 50%/calc(100% - 24px) calc(100% - 4px) no-repeat,
radial-gradient(at 100% 100%, rgba(45,13,238,0.8) 0%, transparent 70%),
radial-gradient(at 100% 0%, rgba(45,13,238,0.8) 0%, transparent 70%),
radial-gradient(at 0% 0%, rgba(42,135,245,0.8) 0%, transparent 70%),
radial-gradient(at 0% 100%, rgba(42,135,245,0.8) 0%, transparent 70%);
border-radius: 12px;
padding: 2px;
box-sizing: border-box;
}

View File

@@ -1,53 +1,75 @@
'use client'
import React, { FC, useState } from 'react'
import type { FC } from 'react'
import React from 'react'
import { useTranslation } from 'react-i18next'
import { PlusIcon } from '@heroicons/react/24/outline'
import { ReactSortable } from 'react-sortablejs'
import RemoveIcon from '../../base/icons/remove-icon'
import s from './style.module.css'
export type Options = string[]
export interface IConfigSelectProps {
export type IConfigSelectProps = {
options: Options
onChange: (options: Options) => void
}
const ConfigSelect: FC<IConfigSelectProps> = ({
options,
onChange
onChange,
}) => {
const { t } = useTranslation()
const optionList = options.map((content, index) => {
return ({
id: index,
name: content,
})
})
return (
<div>
{options.length > 0 && (
<div className='mb-1 space-y-1 '>
{options.map((o, index) => (
<div className={`${s.inputWrap} relative`}>
<input
key={index}
type="input"
value={o || ''}
onChange={e => {
let value = e.target.value
onChange(options.map((item, i) => {
if (index === i) {
return value
}
return item
}))
}}
className={`${s.input} w-full px-3 text-sm leading-9 text-gray-900 border-0 grow h-9 bg-transparent focus:outline-none cursor-pointer`}
/>
<RemoveIcon
className={`${s.deleteBtn} absolute top-1/2 translate-y-[-50%] right-1.5 items-center justify-center w-6 h-6 rounded-md cursor-pointer hover:bg-[#FEE4E2]`}
onClick={() => {
onChange(options.filter((_, i) => index !== i))
}}
/>
</div>
))}
<div className='mb-1'>
<ReactSortable
className="space-y-1"
list={optionList}
setList={list => onChange(list.map(item => item.name))}
handle='.handle'
ghostClass="opacity-50"
animation={150}
>
{options.map((o, index) => (
<div className={`${s.inputWrap} relative`} key={index}>
<div className='handle flex items-center justify-center w-4 h-4 cursor-grab'>
<svg width="6" height="10" viewBox="0 0 6 10" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fillRule="evenodd" clipRule="evenodd" d="M1 2C1.55228 2 2 1.55228 2 1C2 0.447715 1.55228 0 1 0C0.447715 0 0 0.447715 0 1C0 1.55228 0.447715 2 1 2ZM1 6C1.55228 6 2 5.55228 2 5C2 4.44772 1.55228 4 1 4C0.447715 4 0 4.44772 0 5C0 5.55228 0.447715 6 1 6ZM6 1C6 1.55228 5.55228 2 5 2C4.44772 2 4 1.55228 4 1C4 0.447715 4.44772 0 5 0C5.55228 0 6 0.447715 6 1ZM5 6C5.55228 6 6 5.55228 6 5C6 4.44772 5.55228 4 5 4C4.44772 4 4 4.44772 4 5C4 5.55228 4.44772 6 5 6ZM2 9C2 9.55229 1.55228 10 1 10C0.447715 10 0 9.55229 0 9C0 8.44771 0.447715 8 1 8C1.55228 8 2 8.44771 2 9ZM5 10C5.55228 10 6 9.55229 6 9C6 8.44771 5.55228 8 5 8C4.44772 8 4 8.44771 4 9C4 9.55229 4.44772 10 5 10Z" fill="#98A2B3"/>
</svg>
</div>
<input
key={index}
type="input"
value={o || ''}
onChange={(e) => {
const value = e.target.value
onChange(options.map((item, i) => {
if (index === i)
return value
return item
}))
}}
className={`${s.input} w-full px-1.5 text-sm leading-9 text-gray-900 border-0 grow h-9 bg-transparent focus:outline-none cursor-pointer`}
/>
<RemoveIcon
className={`${s.deleteBtn} absolute top-1/2 translate-y-[-50%] right-1.5 items-center justify-center w-6 h-6 rounded-md cursor-pointer hover:bg-[#FEE4E2]`}
onClick={() => {
onChange(options.filter((_, i) => index !== i))
}}
/>
</div>
))}
</ReactSortable>
</div>
)}

View File

@@ -1,6 +1,9 @@
.inputWrap {
display: flex;
align-items: center;
border-radius: 8px;
border: 1px solid #EAECF0;
padding-left: 10px;
cursor: pointer;
}

View File

@@ -2,29 +2,28 @@
import type { FC } from 'react'
import React, { useState } from 'react'
import { useTranslation } from 'react-i18next'
import Panel from '../base/feature-panel'
import Tooltip from '@/app/components/base/tooltip'
import type { PromptVariable } from '@/models/debug'
import { Cog8ToothIcon, TrashIcon } from '@heroicons/react/24/outline'
import { useBoolean } from 'ahooks'
import EditModel from './config-model'
import { DEFAULT_VALUE_MAX_LEN, getMaxVarNameLength } from '@/config'
import { getNewVar } from '@/utils/var'
import Panel from '../base/feature-panel'
import OperationBtn from '../base/operation-btn'
import Switch from '@/app/components/base/switch'
import IconTypeIcon from './input-type-icon'
import { checkKeys } from '@/utils/var'
import Toast from '@/app/components/base/toast'
import s from './style.module.css'
import VarIcon from '../base/icons/var-icon'
import EditModel from './config-model'
import IconTypeIcon from './input-type-icon'
import s from './style.module.css'
import Tooltip from '@/app/components/base/tooltip'
import type { PromptVariable } from '@/models/debug'
import { DEFAULT_VALUE_MAX_LEN, getMaxVarNameLength } from '@/config'
import { checkKeys, getNewVar } from '@/utils/var'
import Switch from '@/app/components/base/switch'
import Toast from '@/app/components/base/toast'
export type IConfigVarProps = {
promptVariables: PromptVariable[]
onPromptVariablesChange: (promptVariables: PromptVariable[]) => void
readonly?: boolean
onPromptVariablesChange?: (promptVariables: PromptVariable[]) => void
}
const ConfigVar: FC<IConfigVarProps> = ({ promptVariables, onPromptVariablesChange }) => {
const ConfigVar: FC<IConfigVarProps> = ({ promptVariables, readonly, onPromptVariablesChange }) => {
const { t } = useTranslation()
const hasVar = promptVariables.length > 0
const promptVariableObj = (() => {
@@ -39,16 +38,17 @@ const ConfigVar: FC<IConfigVarProps> = ({ promptVariables, onPromptVariablesChan
if (!(key in promptVariableObj))
return
const newPromptVariables = promptVariables.map((item) => {
if (item.key === key)
if (item.key === key) {
return {
...item,
[updateKey]: newValue
[updateKey]: newValue,
}
}
return item
})
onPromptVariablesChange(newPromptVariables)
onPromptVariablesChange?.(newPromptVariables)
}
const batchUpdatePromptVariable = (key: string, updateKeys: string[], newValues: any[]) => {
@@ -66,53 +66,55 @@ const ConfigVar: FC<IConfigVarProps> = ({ promptVariables, onPromptVariablesChan
return item
})
onPromptVariablesChange(newPromptVariables)
onPromptVariablesChange?.(newPromptVariables)
}
const updatePromptKey = (index: number, newKey: string) => {
const { isValid, errorKey, errorMessageKey } = checkKeys([newKey], true)
if (!isValid) {
Toast.notify({
type: 'error',
message: t(`appDebug.varKeyError.${errorMessageKey}`, { key: errorKey })
message: t(`appDebug.varKeyError.${errorMessageKey}`, { key: errorKey }),
})
return
}
const newPromptVariables = promptVariables.map((item, i) => {
if (i === index)
if (i === index) {
return {
...item,
key: newKey,
}
}
return item
})
onPromptVariablesChange(newPromptVariables)
onPromptVariablesChange?.(newPromptVariables)
}
const updatePromptNameIfNameEmpty = (index: number, newKey: string) => {
if (!newKey) return
if (!newKey)
return
const newPromptVariables = promptVariables.map((item, i) => {
if (i === index && !item.name)
if (i === index && !item.name) {
return {
...item,
name: newKey,
}
}
return item
})
onPromptVariablesChange(newPromptVariables)
onPromptVariablesChange?.(newPromptVariables)
}
const handleAddVar = () => {
const newVar = getNewVar('')
onPromptVariablesChange([...promptVariables, newVar])
onPromptVariablesChange?.([...promptVariables, newVar])
}
const handleRemoveVar = (index: number) => {
onPromptVariablesChange(promptVariables.filter((_, i) => i !== index))
onPromptVariablesChange?.(promptVariables.filter((_, i) => i !== index))
}
const [currKey, setCurrKey] = useState<string | null>(null)
@@ -132,16 +134,18 @@ const ConfigVar: FC<IConfigVarProps> = ({ promptVariables, onPromptVariablesChan
title={
<div className='flex items-center gap-2'>
<div>{t('appDebug.variableTitle')}</div>
<Tooltip htmlContent={<div className='w-[180px]'>
{t('appDebug.variableTip')}
</div>} selector='config-var-tooltip'>
<svg width="16" height="17" viewBox="0 0 16 17" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M8.66667 11.1667H8V8.5H7.33333M8 5.83333H8.00667M14 8.5C14 9.28793 13.8448 10.0681 13.5433 10.7961C13.2417 11.5241 12.7998 12.1855 12.2426 12.7426C11.6855 13.2998 11.0241 13.7417 10.2961 14.0433C9.56815 14.3448 8.78793 14.5 8 14.5C7.21207 14.5 6.43185 14.3448 5.7039 14.0433C4.97595 13.7417 4.31451 13.2998 3.75736 12.7426C3.20021 12.1855 2.75825 11.5241 2.45672 10.7961C2.15519 10.0681 2 9.28793 2 8.5C2 6.9087 2.63214 5.38258 3.75736 4.25736C4.88258 3.13214 6.4087 2.5 8 2.5C9.5913 2.5 11.1174 3.13214 12.2426 4.25736C13.3679 5.38258 14 6.9087 14 8.5Z" stroke="#9CA3AF" strokeWidth="1.5" strokeLinecap="round" strokeLinejoin="round" />
</svg>
</Tooltip>
{!readonly && (
<Tooltip htmlContent={<div className='w-[180px]'>
{t('appDebug.variableTip')}
</div>} selector='config-var-tooltip'>
<svg width="16" height="17" viewBox="0 0 16 17" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M8.66667 11.1667H8V8.5H7.33333M8 5.83333H8.00667M14 8.5C14 9.28793 13.8448 10.0681 13.5433 10.7961C13.2417 11.5241 12.7998 12.1855 12.2426 12.7426C11.6855 13.2998 11.0241 13.7417 10.2961 14.0433C9.56815 14.3448 8.78793 14.5 8 14.5C7.21207 14.5 6.43185 14.3448 5.7039 14.0433C4.97595 13.7417 4.31451 13.2998 3.75736 12.7426C3.20021 12.1855 2.75825 11.5241 2.45672 10.7961C2.15519 10.0681 2 9.28793 2 8.5C2 6.9087 2.63214 5.38258 3.75736 4.25736C4.88258 3.13214 6.4087 2.5 8 2.5C9.5913 2.5 11.1174 3.13214 12.2426 4.25736C13.3679 5.38258 14 6.9087 14 8.5Z" stroke="#9CA3AF" strokeWidth="1.5" strokeLinecap="round" strokeLinejoin="round" />
</svg>
</Tooltip>
)}
</div>
}
headerRight={<OperationBtn type="add" onClick={handleAddVar} />}
headerRight={!readonly ? <OperationBtn type="add" onClick={handleAddVar} /> : null}
>
{!hasVar && (
<div className='pt-2 pb-1 text-xs text-gray-500'>{t('appDebug.notSetVar')}</div>
@@ -153,8 +157,13 @@ const ConfigVar: FC<IConfigVarProps> = ({ promptVariables, onPromptVariablesChan
<tr className='uppercase'>
<td>{t('appDebug.variableTable.key')}</td>
<td>{t('appDebug.variableTable.name')}</td>
<td>{t('appDebug.variableTable.optional')}</td>
<td>{t('appDebug.variableTable.action')}</td>
{!readonly && (
<>
<td>{t('appDebug.variableTable.optional')}</td>
<td>{t('appDebug.variableTable.action')}</td>
</>
)}
</tr>
</thead>
<tbody className="text-gray-700">
@@ -163,42 +172,57 @@ const ConfigVar: FC<IConfigVarProps> = ({ promptVariables, onPromptVariablesChan
<td className="w-[160px] border-b border-gray-100 pl-3">
<div className='flex items-center space-x-1'>
<IconTypeIcon type={type} />
<input
type="text"
placeholder="key"
value={key}
onChange={e => updatePromptKey(index, e.target.value)}
onBlur={e => updatePromptNameIfNameEmpty(index, e.target.value)}
maxLength={getMaxVarNameLength(name)}
className="h-6 leading-6 block w-full rounded-md border-0 py-1.5 text-gray-900 placeholder:text-gray-400 focus:outline-none focus:ring-1 focus:ring-inset focus:ring-gray-200"
/>
{!readonly
? (
<input
type="text"
placeholder="key"
value={key}
onChange={e => updatePromptKey(index, e.target.value)}
onBlur={e => updatePromptNameIfNameEmpty(index, e.target.value)}
maxLength={getMaxVarNameLength(name)}
className="h-6 leading-6 block w-full rounded-md border-0 py-1.5 text-gray-900 placeholder:text-gray-400 focus:outline-none focus:ring-1 focus:ring-inset focus:ring-gray-200"
/>
)
: (
<div className='h-6 leading-6 text-[13px] text-gray-700'>{key}</div>
)}
</div>
</td>
<td className="py-1 border-b border-gray-100">
<input
type="text"
placeholder={key}
value={name}
onChange={e => updatePromptVariable(key, 'name', e.target.value)}
maxLength={getMaxVarNameLength(name)}
className="h-6 leading-6 block w-full rounded-md border-0 py-1.5 text-gray-900 placeholder:text-gray-400 focus:outline-none focus:ring-1 focus:ring-inset focus:ring-gray-200"
/>
</td>
<td className='w-[84px] border-b border-gray-100'>
<div className='flex items-center h-full'>
<Switch defaultValue={!required} size='md' onChange={(value) => updatePromptVariable(key, 'required', !value)} />
</div>
</td>
<td className='w-20 border-b border-gray-100'>
<div className='flex h-full items-center space-x-1'>
<div className='flex items-center justify-items-center w-6 h-6 text-gray-500 cursor-pointer' onClick={() => handleConfig(key)}>
<Cog8ToothIcon width={16} height={16} />
</div>
<div className='flex items-center justify-items-center w-6 h-6 text-gray-500 cursor-pointer' onClick={() => handleRemoveVar(index)} >
<TrashIcon width={16} height={16} />
</div>
</div>
{!readonly
? (
<input
type="text"
placeholder={key}
value={name}
onChange={e => updatePromptVariable(key, 'name', e.target.value)}
maxLength={getMaxVarNameLength(name)}
className="h-6 leading-6 block w-full rounded-md border-0 py-1.5 text-gray-900 placeholder:text-gray-400 focus:outline-none focus:ring-1 focus:ring-inset focus:ring-gray-200"
/>)
: (
<div className='h-6 leading-6 text-[13px] text-gray-700'>{name}</div>
)}
</td>
{!readonly && (
<>
<td className='w-[84px] border-b border-gray-100'>
<div className='flex items-center h-full'>
<Switch defaultValue={!required} size='md' onChange={value => updatePromptVariable(key, 'required', !value)} />
</div>
</td>
<td className='w-20 border-b border-gray-100'>
<div className='flex h-full items-center space-x-1'>
<div className='flex items-center justify-items-center w-6 h-6 text-gray-500 cursor-pointer' onClick={() => handleConfig(key)}>
<Cog8ToothIcon width={16} height={16} />
</div>
<div className='flex items-center justify-items-center w-6 h-6 text-gray-500 cursor-pointer' onClick={() => handleRemoveVar(index)} >
<TrashIcon width={16} height={16} />
</div>
</div>
</td>
</>
)}
</tr>
))}
</tbody>
@@ -212,11 +236,12 @@ const ConfigVar: FC<IConfigVarProps> = ({ promptVariables, onPromptVariablesChan
isShow={isShowEditModal}
onClose={hideEditModal}
onConfirm={({ type, value }) => {
if (type === 'string') {
if (type === 'string')
batchUpdatePromptVariable(currKey as string, ['type', 'max_length'], [type, value || DEFAULT_VALUE_MAX_LEN])
} else {
else
batchUpdatePromptVariable(currKey as string, ['type', 'options'], [type, value || []])
}
hideEditModal()
}}
/>

View File

@@ -0,0 +1,33 @@
'use client'
import type { FC } from 'react'
import React from 'react'
import { useTranslation } from 'react-i18next'
import Button from '@/app/components/base/button'
export type IAutomaticBtnProps = {
onClick: () => void
}
const leftIcon = (
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M4.31346 0.905711C4.21464 0.708087 4.01266 0.583252 3.79171 0.583252C3.57076 0.583252 3.36877 0.708087 3.26996 0.905711L2.81236 1.82091C2.64757 2.15048 2.59736 2.24532 2.53635 2.32447C2.47515 2.40386 2.40398 2.47503 2.32459 2.53623C2.24544 2.59724 2.1506 2.64745 1.82103 2.81224L0.905833 3.26984C0.708209 3.36865 0.583374 3.57064 0.583374 3.79159C0.583374 4.01254 0.708209 4.21452 0.905833 4.31333L1.82103 4.77094C2.1506 4.93572 2.24544 4.98593 2.32459 5.04694C2.40398 5.10814 2.47515 5.17931 2.53635 5.2587C2.59736 5.33785 2.64758 5.43269 2.81236 5.76226L3.26996 6.67746C3.36877 6.87508 3.57076 6.99992 3.79171 6.99992C4.01266 6.99992 4.21465 6.87508 4.31346 6.67746L4.77106 5.76226C4.93584 5.43269 4.98605 5.33786 5.04707 5.2587C5.10826 5.17931 5.17943 5.10814 5.25883 5.04694C5.33798 4.98593 5.43282 4.93572 5.76238 4.77094L6.67758 4.31333C6.87521 4.21452 7.00004 4.01254 7.00004 3.79159C7.00004 3.57064 6.87521 3.36865 6.67758 3.26984L5.76238 2.81224C5.43282 2.64745 5.33798 2.59724 5.25883 2.53623C5.17943 2.47503 5.10826 2.40386 5.04707 2.32447C4.98605 2.24532 4.93584 2.15048 4.77106 1.82091L4.31346 0.905711Z" fill="#444CE7"/>
<path d="M11.375 1.74992C11.375 1.42775 11.1139 1.16659 10.7917 1.16659C10.4695 1.16659 10.2084 1.42775 10.2084 1.74992V2.62492H9.33337C9.01121 2.62492 8.75004 2.88609 8.75004 3.20825C8.75004 3.53042 9.01121 3.79159 9.33337 3.79159H10.2084V4.66659C10.2084 4.98875 10.4695 5.24992 10.7917 5.24992C11.1139 5.24992 11.375 4.98875 11.375 4.66659V3.79159H12.25C12.5722 3.79159 12.8334 3.53042 12.8334 3.20825C12.8334 2.88609 12.5722 2.62492 12.25 2.62492H11.375V1.74992Z" fill="#444CE7"/>
<path d="M3.79171 9.33325C3.79171 9.01109 3.53054 8.74992 3.20837 8.74992C2.88621 8.74992 2.62504 9.01109 2.62504 9.33325V10.2083H1.75004C1.42787 10.2083 1.16671 10.4694 1.16671 10.7916C1.16671 11.1138 1.42787 11.3749 1.75004 11.3749H2.62504V12.2499C2.62504 12.5721 2.88621 12.8333 3.20837 12.8333C3.53054 12.8333 3.79171 12.5721 3.79171 12.2499V11.3749H4.66671C4.98887 11.3749 5.25004 11.1138 5.25004 10.7916C5.25004 10.4694 4.98887 10.2083 4.66671 10.2083H3.79171V9.33325Z" fill="#444CE7"/>
<path d="M10.4385 6.73904C10.3396 6.54142 10.1377 6.41659 9.91671 6.41659C9.69576 6.41659 9.49377 6.54142 9.39496 6.73904L8.84014 7.84869C8.67535 8.17826 8.62514 8.27309 8.56413 8.35225C8.50293 8.43164 8.43176 8.50281 8.35237 8.56401C8.27322 8.62502 8.17838 8.67523 7.84881 8.84001L6.73917 9.39484C6.54154 9.49365 6.41671 9.69564 6.41671 9.91659C6.41671 10.1375 6.54154 10.3395 6.73917 10.4383L7.84881 10.9932C8.17838 11.1579 8.27322 11.2082 8.35237 11.2692C8.43176 11.3304 8.50293 11.4015 8.56413 11.4809C8.62514 11.5601 8.67535 11.6549 8.84014 11.9845L9.39496 13.0941C9.49377 13.2918 9.69576 13.4166 9.91671 13.4166C10.1377 13.4166 10.3396 13.2918 10.4385 13.0941L10.9933 11.9845C11.1581 11.6549 11.2083 11.5601 11.2693 11.4809C11.3305 11.4015 11.4017 11.3304 11.481 11.2692C11.5602 11.2082 11.655 11.1579 11.9846 10.9932L13.0942 10.4383C13.2919 10.3395 13.4167 10.1375 13.4167 9.91659C13.4167 9.69564 13.2919 9.49365 13.0942 9.39484L11.9846 8.84001C11.655 8.67523 11.5602 8.62502 11.481 8.56401C11.4017 8.50281 11.3305 8.43164 11.2693 8.35225C11.2083 8.27309 11.1581 8.17826 10.9933 7.84869L10.4385 6.73904Z" fill="#444CE7"/>
</svg>
)
const AutomaticBtn: FC<IAutomaticBtnProps> = ({
onClick,
}) => {
const { t } = useTranslation()
return (
<Button className='flex space-x-2 items-center !h-8'
onClick={onClick}
>
{leftIcon}
<span className='text-xs font-semibold text-primary-600 uppercase'>{t('appDebug.operation.automatic')}</span>
</Button>
)
}
export default React.memo(AutomaticBtn)

View File

@@ -0,0 +1,205 @@
'use client'
import type { FC } from 'react'
import React from 'react'
import { useTranslation } from 'react-i18next'
import { useBoolean } from 'ahooks'
import Modal from '@/app/components/base/modal'
import Button from '@/app/components/base/button'
import Toast from '@/app/components/base/toast'
import { generateRule } from '@/service/debug'
import ConfigPrompt from '@/app/components/app/configuration/config-prompt'
import { AppType } from '@/types/app'
import ConfigVar from '@/app/components/app/configuration/config-var'
import OpeningStatement from '@/app/components/app/configuration/features/chat-group/opening-statement'
import GroupName from '@/app/components/app/configuration/base/group-name'
import Loading from '@/app/components/base/loading'
import Confirm from '@/app/components/base/confirm'
const noDataIcon = (
<svg width="56" height="56" viewBox="0 0 56 56" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M10.4998 51.3333V39.6666M10.4998 16.3333V4.66663M4.6665 10.5H16.3332M4.6665 45.5H16.3332M30.3332 6.99996L26.2868 17.5206C25.6287 19.2315 25.2997 20.0869 24.7881 20.8065C24.3346 21.4442 23.7774 22.0014 23.1397 22.4549C22.4202 22.9665 21.5647 23.2955 19.8538 23.9535L9.33317 28L19.8539 32.0464C21.5647 32.7044 22.4202 33.0334 23.1397 33.5451C23.7774 33.9985 24.3346 34.5557 24.7881 35.1934C25.2997 35.913 25.6287 36.7684 26.2868 38.4793L30.3332 49L34.3796 38.4793C35.0376 36.7684 35.3666 35.913 35.8783 35.1934C36.3317 34.5557 36.8889 33.9985 37.5266 33.5451C38.2462 33.0334 39.1016 32.7044 40.8125 32.0464L51.3332 28L40.8125 23.9535C39.1016 23.2955 38.2462 22.9665 37.5266 22.4549C36.8889 22.0014 36.3317 21.4442 35.8783 20.8065C35.3666 20.0869 35.0376 19.2315 34.3796 17.5206L30.3332 6.99996Z" stroke="#EAECF0" strokeWidth="3" strokeLinecap="round" strokeLinejoin="round"/>
</svg>
)
export type AutomaticRes = {
prompt: string
variables: string[]
opening_statement: string
}
export type IGetAutomaticResProps = {
mode: AppType
isShow: boolean
onClose: () => void
onFinished: (res: AutomaticRes) => void
}
const genIcon = (
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M3.6665 1.33332C3.6665 0.965133 3.36803 0.666656 2.99984 0.666656C2.63165 0.666656 2.33317 0.965133 2.33317 1.33332V2.33332H1.33317C0.964981 2.33332 0.666504 2.6318 0.666504 2.99999C0.666504 3.36818 0.964981 3.66666 1.33317 3.66666H2.33317V4.66666C2.33317 5.03485 2.63165 5.33332 2.99984 5.33332C3.36803 5.33332 3.6665 5.03485 3.6665 4.66666V3.66666H4.6665C5.03469 3.66666 5.33317 3.36818 5.33317 2.99999C5.33317 2.6318 5.03469 2.33332 4.6665 2.33332H3.6665V1.33332Z" fill="white"/>
<path d="M3.6665 11.3333C3.6665 10.9651 3.36803 10.6667 2.99984 10.6667C2.63165 10.6667 2.33317 10.9651 2.33317 11.3333V12.3333H1.33317C0.964981 12.3333 0.666504 12.6318 0.666504 13C0.666504 13.3682 0.964981 13.6667 1.33317 13.6667H2.33317V14.6667C2.33317 15.0348 2.63165 15.3333 2.99984 15.3333C3.36803 15.3333 3.6665 15.0348 3.6665 14.6667V13.6667H4.6665C5.03469 13.6667 5.33317 13.3682 5.33317 13C5.33317 12.6318 5.03469 12.3333 4.6665 12.3333H3.6665V11.3333Z" fill="white"/>
<path d="M9.28873 1.76067C9.18971 1.50321 8.94235 1.33332 8.6665 1.33332C8.39066 1.33332 8.1433 1.50321 8.04427 1.76067L6.88815 4.76658C6.68789 5.28727 6.62495 5.43732 6.53887 5.55838C6.4525 5.67986 6.34637 5.78599 6.2249 5.87236C6.10384 5.95844 5.95379 6.02137 5.43309 6.22164L2.42718 7.37776C2.16972 7.47678 1.99984 7.72414 1.99984 7.99999C1.99984 8.27584 2.16972 8.5232 2.42718 8.62222L5.43309 9.77834C5.95379 9.97861 6.10384 10.0415 6.2249 10.1276C6.34637 10.214 6.4525 10.3201 6.53887 10.4416C6.62495 10.5627 6.68789 10.7127 6.88816 11.2334L8.04427 14.2393C8.1433 14.4968 8.39066 14.6667 8.6665 14.6667C8.94235 14.6667 9.18971 14.4968 9.28873 14.2393L10.4449 11.2334C10.6451 10.7127 10.7081 10.5627 10.7941 10.4416C10.8805 10.3201 10.9866 10.214 11.1081 10.1276C11.2292 10.0415 11.3792 9.97861 11.8999 9.77834L14.9058 8.62222C15.1633 8.5232 15.3332 8.27584 15.3332 7.99999C15.3332 7.72414 15.1633 7.47678 14.9058 7.37776L11.8999 6.22164C11.3792 6.02137 11.2292 5.95844 11.1081 5.87236C10.9866 5.78599 10.8805 5.67986 10.7941 5.55838C10.7081 5.43732 10.6451 5.28727 10.4449 4.76658L9.28873 1.76067Z" fill="white"/>
</svg>
)
const GetAutomaticRes: FC<IGetAutomaticResProps> = ({
mode,
isShow,
onClose,
// appId,
onFinished,
}) => {
const { t } = useTranslation()
const [audiences, setAudiences] = React.useState<string>('')
const [hopingToSolve, setHopingToSolve] = React.useState<string>('')
const isValid = () => {
if (audiences.trim() === '') {
Toast.notify({
type: 'error',
message: t('appDebug.automatic.audiencesRequired'),
})
return false
}
if (hopingToSolve.trim() === '') {
Toast.notify({
type: 'error',
message: t('appDebug.automatic.problemRequired'),
})
return false
}
return true
}
const [isLoading, { setTrue: setLoadingTrue, setFalse: setLoadingFalse }] = useBoolean(false)
const [res, setRes] = React.useState<AutomaticRes | null>(null)
const renderLoading = (
<div className='grow flex flex-col items-center justify-center h-full space-y-3'>
<Loading />
<div className='text-[13px] text-gray-400'>{t('appDebug.automatic.loading')}</div>
</div>
)
const renderNoData = (
<div className='grow flex flex-col items-center justify-center h-full space-y-3'>
{noDataIcon}
<div className='text-[13px] text-gray-400'>{t('appDebug.automatic.noData')}</div>
</div>
)
const onGenerate = async () => {
if (!isValid())
return
if (isLoading)
return
setLoadingTrue()
try {
const res = await generateRule({
audiences,
hoping_to_solve: hopingToSolve,
})
setRes(res as AutomaticRes)
}
finally {
setLoadingFalse()
}
}
const [showConfirmOverwrite, setShowConfirmOverwrite] = React.useState(false)
return (
<Modal
isShow={isShow}
onClose={onClose}
className='min-w-[1120px] !p-0'
closable
>
<div className='flex h-[680px]'>
<div className='w-[480px] shrink-0 px-8 py-6 h-full overflow-y-auto border-r border-gray-100'>
<div>
<div className='mb-1 text-xl font-semibold text-primary-600'>{t('appDebug.automatic.title')}</div>
<div className='text-[13px] font-normal text-gray-500'>{t('appDebug.automatic.description')}</div>
</div>
{/* inputs */}
<div className='mt-12 space-y-5'>
<div className='space-y-2'>
<div className='text-[13px] font-medium text-gray-900'>{t('appDebug.automatic.intendedAudience')}</div>
<input className="w-full h-8 px-3 text-[13px] font-normal bg-gray-50 rounded-lg" placeholder={t('appDebug.automatic.intendedAudiencePlaceHolder') as string} value={audiences} onChange={e => setAudiences(e.target.value)} />
</div>
<div className='space-y-2'>
<div className='text-[13px] font-medium text-gray-900'>{t('appDebug.automatic.solveProblem')}</div>
<textarea className="w-full h-[200px] overflow-y-auto p-3 text-[13px] font-normal bg-gray-50 rounded-lg" placeholder={t('appDebug.automatic.solveProblemPlaceHolder') as string} value={hopingToSolve} onChange={e => setHopingToSolve(e.target.value)} />
</div>
<div className='mt-6 flex justify-end'>
<Button
className='flex space-x-2 items-center !h-8'
type='primary'
onClick={onGenerate}
disabled={isLoading}
>
{genIcon}
<span className='text-xs font-semibold text-white uppercase'>{t('appDebug.automatic.generate')}</span>
</Button>
</div>
</div>
</div>
{(!isLoading && res) && (
<div className='grow px-8 pt-6 h-full overflow-y-auto'>
<div className='mb-4 w-1/2 text-lg font-medium text-gray-900'>{t('appDebug.automatic.resTitle')}</div>
<ConfigPrompt
mode={mode}
promptTemplate={res?.prompt || ''}
promptVariables={[]}
readonly
/>
{(res?.variables?.length && res?.variables?.length > 0)
? (
<ConfigVar
promptVariables={res?.variables.map(key => ({ key, name: key, type: 'string', required: true })) || []}
readonly
/>
)
: ''}
{(mode === AppType.chat && res?.opening_statement) && (
<div className='mt-7'>
<GroupName name={t('appDebug.feature.groupChat.title')} />
<OpeningStatement
value={res?.opening_statement || ''}
readonly
/>
</div>
)}
<div className='sticky bottom-0 flex justify-end right-0 py-4'>
<Button onClick={onClose}>{t('common.operation.cancel')}</Button>
<Button type='primary' className='ml-2' onClick={() => {
setShowConfirmOverwrite(true)
}}>{t('appDebug.automatic.apply')}</Button>
</div>
</div>
)}
{isLoading && renderLoading}
{(!isLoading && !res) && renderNoData}
{showConfirmOverwrite && (
<Confirm
title={t('appDebug.automatic.overwriteTitle')}
content={t('appDebug.automatic.overwriteMessage')}
isShow={showConfirmOverwrite}
onClose={() => setShowConfirmOverwrite(false)}
onConfirm={() => {
setShowConfirmOverwrite(false)
onFinished(res as AutomaticRes)
}}
onCancel={() => setShowConfirmOverwrite(false)}
/>
)}
</div>
</Modal>
)
}
export default React.memo(GetAutomaticRes)

View File

@@ -1,9 +1,13 @@
'use client'
import React, { FC } from 'react'
import type { FC } from 'react'
import React from 'react'
import cn from 'classnames'
import s from './style.module.css'
import Switch from '@/app/components/base/switch'
export interface IFeatureItemProps {
export type IFeatureItemProps = {
icon: React.ReactNode
previewImgClassName?: string
title: string
description: string
value: boolean
@@ -12,13 +16,14 @@ export interface IFeatureItemProps {
const FeatureItem: FC<IFeatureItemProps> = ({
icon,
previewImgClassName,
title,
description,
value,
onChange
onChange,
}) => {
return (
<div className='flex justify-between p-3 rounded-xl border border-transparent bg-gray-50 hover:border-gray-200 cursor-pointer'>
<div className={cn(s.wrap, 'relative flex justify-between p-3 rounded-xl border border-transparent bg-gray-50 hover:border-gray-200 cursor-pointer')}>
<div className='flex space-x-3 mr-2'>
{/* icon */}
<div
@@ -36,6 +41,11 @@ const FeatureItem: FC<IFeatureItemProps> = ({
</div>
<Switch onChange={onChange} defaultValue={value} />
{
previewImgClassName && (
<div className={cn(s.preview, s[previewImgClassName])}>
</div>)
}
</div>
)
}

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 54 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 108 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 211 KiB

View File

@@ -0,0 +1,25 @@
.preview {
display: none;
position: fixed;
transform: translate(410px, -54px);
width: 280px;
height: 360px;
background: center center no-repeat;
background-size: contain;
}
.wrap:hover .preview {
display: block;
}
.openingStatementPreview {
background-image: url(./preview-imgs/opening-statement.svg);
}
.suggestedQuestionsAfterAnswerPreview {
background-image: url(./preview-imgs/suggested-questions-after-answer.svg);
}
.moreLikeThisPreview {
background-image: url(./preview-imgs/more-like-this.svg);
}

Some files were not shown because too many files have changed in this diff Show More