Compare commits

...

15 Commits

Author SHA1 Message Date
twwu
2a25ca2b2c feat: enhance online drive connection UI and add localization for connection status in dataset pipeline 2025-06-26 14:24:50 +08:00
twwu
3a9c79b09a feat: refactor data source handling and integrate OnlineDrive component in TestRunPanel 2025-06-26 13:46:12 +08:00
twwu
025b55ef3b feat: update tooltip text for test run mode in English and Chinese translations for clarity 2025-06-26 10:17:48 +08:00
twwu
cf7574bd10 feat: add FooterTips component and integrate it into TestRunPanel; extend DatasourceType enum with onlineDrive 2025-06-26 10:16:37 +08:00
twwu
c7cec120a6 feat: update variable validation regex for consistency in ExternalDataToolModal and schema 2025-06-25 17:07:31 +08:00
twwu
7d7fd18e65 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-25 16:16:41 +08:00
twwu
c6ae9628af feat: refactor input variable handling and configurations in pipeline processing components 2025-06-25 16:15:59 +08:00
Joel
4631575c12 feat: can support choose current node var 2025-06-25 16:06:08 +08:00
zxhlyh
a4f4fea0a5 fix: note node delete 2025-06-25 16:01:45 +08:00
twwu
261b7cabc8 feat: enhance OnlineDocumentPreview with datasourceNodeId and implement preview functionality 2025-06-25 11:36:56 +08:00
twwu
ccd346d1da feat: add handling for RAG pipeline variables in node interactions 2025-06-25 10:40:48 +08:00
twwu
a866cbc6d7 feat: implement usePipeline hook for managing pipeline variables and refactor input field handling 2025-06-25 10:11:26 +08:00
zxhlyh
8f4a0d4a22 variable picker 2025-06-24 17:27:06 +08:00
twwu
1c51bef3cb fix: standardize capitalization in translation keys and remove unused group property in FieldListContainer 2025-06-24 14:25:58 +08:00
zxhlyh
c31754e6cd fix: create pipeline from customized 2025-06-24 11:12:39 +08:00
55 changed files with 668 additions and 402 deletions

View File

@@ -153,7 +153,7 @@ const ExternalDataToolModal: FC<ExternalDataToolModalProps> = ({
return
}
if (localeData.variable && !/[a-zA-Z_]\w{0,29}/g.test(localeData.variable)) {
if (localeData.variable && !/^[a-zA-Z_]\w{0,29}$/.test(localeData.variable)) {
notify({ type: 'error', message: t('appDebug.varKeyError.notValid', { key: t('appDebug.feature.tools.modal.variableName.title') }) })
return
}

View File

@@ -23,7 +23,7 @@ const NotionConnector = ({ onSetting }: NotionConnectorProps) => {
</span>
<div className='system-sm-regular text-text-tertiary'>{t('datasetCreation.stepOne.notionSyncTip')}</div>
</div>
<Button className='h-8' variant='primary' onClick={onSetting}>{t('datasetCreation.stepOne.connect')}</Button>
<Button variant='primary' onClick={onSetting}>{t('datasetCreation.stepOne.connect')}</Button>
</div>
)
}

View File

@@ -18,7 +18,7 @@ import Details from './details'
import Content from './content'
import Actions from './actions'
import type { CreateDatasetReq } from '@/models/datasets'
import { useCreatePipelineDataset } from '@/service/knowledge/use-create-dataset'
import { useCreatePipelineDatasetFromCustomized } from '@/service/knowledge/use-create-dataset'
import CreateModal from './create-modal'
import { useInvalid } from '@/service/use-base'
import { useResetDatasetList } from '@/service/knowledge/use-dataset'
@@ -45,7 +45,7 @@ const TemplateCard = ({
template_id: pipeline.id,
type,
}, false)
const { mutateAsync: createEmptyDataset } = useCreatePipelineDataset()
const { mutateAsync: createDataset } = useCreatePipelineDatasetFromCustomized()
const { handleCheckPluginDependencies } = usePluginDependencies()
const resetDatasetList = useResetDatasetList()
@@ -66,7 +66,7 @@ const TemplateCard = ({
...payload,
yaml_content: pipelineTemplateInfo.export_data,
}
await createEmptyDataset(request, {
await createDataset(request, {
onSuccess: async (newDataset) => {
Toast.notify({
type: 'success',
@@ -76,7 +76,7 @@ const TemplateCard = ({
if (newDataset.pipeline_id)
await handleCheckPluginDependencies(newDataset.pipeline_id, true)
setShowCreateModal(false)
push(`/datasets/${newDataset.id}/pipeline`)
push(`/datasets/${newDataset.dataset_id}/pipeline`)
},
onError: () => {
Toast.notify({
@@ -85,7 +85,7 @@ const TemplateCard = ({
})
},
})
}, [getPipelineTemplateInfo, createEmptyDataset, t, handleCheckPluginDependencies, push, resetDatasetList])
}, [getPipelineTemplateInfo, createDataset, t, handleCheckPluginDependencies, push, resetDatasetList])
const handleShowTemplateDetails = useCallback(() => {
setShowDetailModal(true)

View File

@@ -16,14 +16,18 @@ const DataSourceOptions = ({
datasourceNodeId,
onSelect,
}: DataSourceOptionsProps) => {
const { datasources, options } = useDatasourceOptions(pipelineNodes)
const options = useDatasourceOptions(pipelineNodes)
const handelSelect = useCallback((value: string) => {
const selectedOption = datasources.find(option => option.nodeId === value)
const selectedOption = options.find(option => option.value === value)
if (!selectedOption)
return
onSelect(selectedOption)
}, [datasources, onSelect])
const datasource = {
nodeId: selectedOption.value,
nodeData: selectedOption.data,
}
onSelect(datasource)
}, [onSelect, options])
useEffect(() => {
if (options.length > 0 && !datasourceNodeId)

View File

@@ -1,10 +1,9 @@
import { useTranslation } from 'react-i18next'
import { AddDocumentsStep } from './types'
import type { DataSourceOption, Datasource } from '@/app/components/rag-pipeline/components/panel/test-run/types'
import type { DataSourceOption } from '@/app/components/rag-pipeline/components/panel/test-run/types'
import { useCallback, useMemo, useRef, useState } from 'react'
import { BlockEnum, type Node } from '@/app/components/workflow/types'
import type { DataSourceNodeType } from '@/app/components/workflow/nodes/data-source/types'
import type { DatasourceType } from '@/models/pipeline'
import type { CrawlResult, CrawlResultItem, DocumentItem, FileItem } from '@/models/datasets'
import { CrawlStep } from '@/models/datasets'
import produce from 'immer'
@@ -47,18 +46,6 @@ export const useAddDocumentsSteps = () => {
export const useDatasourceOptions = (pipelineNodes: Node<DataSourceNodeType>[]) => {
const datasourceNodes = pipelineNodes.filter(node => node.data.type === BlockEnum.DataSource)
const datasources: Datasource[] = useMemo(() => {
return datasourceNodes.map((node) => {
return {
nodeId: node.id,
type: node.data.provider_type as DatasourceType,
description: node.data.datasource_label,
docTitle: 'How to use?',
docLink: '',
fileExtensions: node.data.fileExtensions || [],
}
})
}, [datasourceNodes])
const options = useMemo(() => {
const options: DataSourceOption[] = []
@@ -70,10 +57,30 @@ export const useDatasourceOptions = (pipelineNodes: Node<DataSourceNodeType>[])
data: node.data,
})
})
if (process.env.NODE_ENV === 'development') {
// todo: delete mock data
options.push({
label: 'Google Drive',
value: '123456',
// @ts-expect-error mock data
data: {
datasource_parameters: {},
datasource_configurations: {},
type: BlockEnum.DataSource,
title: 'Google Drive',
plugin_id: 'langgenius/google-drive',
provider_type: 'online_drive',
provider_name: 'google_drive',
datasource_name: 'google-drive',
datasource_label: 'Google Drive',
selected: false,
},
})
}
return options
}, [datasourceNodes])
return { datasources, options }
return options
}
export const useLocalFile = () => {

View File

@@ -84,17 +84,18 @@ const CreateFormPipeline = () => {
const isVectorSpaceFull = plan.usage.vectorSpace >= plan.total.vectorSpace
const isShowVectorSpaceFull = allFileLoaded && isVectorSpaceFull && enableBilling
const notSupportBatchUpload = enableBilling && plan.type === 'sandbox'
const datasourceType = datasource?.nodeData.provider_type
const nextBtnDisabled = useMemo(() => {
if (!datasource) return true
if (datasource.type === DatasourceType.localFile)
if (datasourceType === DatasourceType.localFile)
return isShowVectorSpaceFull || !fileList.length || fileList.some(file => !file.file.id)
if (datasource.type === DatasourceType.onlineDocument)
if (datasourceType === DatasourceType.onlineDocument)
return isShowVectorSpaceFull || !onlineDocuments.length
if (datasource.type === DatasourceType.websiteCrawl)
if (datasourceType === DatasourceType.websiteCrawl)
return isShowVectorSpaceFull || !websitePages.length
return false
}, [datasource, isShowVectorSpaceFull, fileList, onlineDocuments.length, websitePages.length])
}, [datasource, datasourceType, isShowVectorSpaceFull, fileList, onlineDocuments.length, websitePages.length])
const { mutateAsync: runPublishedPipeline, isIdle, isPending } = useRunPublishedPipeline()
@@ -102,7 +103,7 @@ const CreateFormPipeline = () => {
if (!datasource)
return
const datasourceInfoList: Record<string, any>[] = []
if (datasource.type === DatasourceType.localFile) {
if (datasourceType === DatasourceType.localFile) {
const { id, name, type, size, extension, mime_type } = previewFile.current as File
const documentInfo = {
related_id: id,
@@ -116,7 +117,7 @@ const CreateFormPipeline = () => {
}
datasourceInfoList.push(documentInfo)
}
if (datasource.type === DatasourceType.onlineDocument) {
if (datasourceType === DatasourceType.onlineDocument) {
const { workspace_id, ...rest } = previewOnlineDocument.current
const documentInfo = {
workspace_id,
@@ -124,13 +125,13 @@ const CreateFormPipeline = () => {
}
datasourceInfoList.push(documentInfo)
}
if (datasource.type === DatasourceType.websiteCrawl)
if (datasourceType === DatasourceType.websiteCrawl)
datasourceInfoList.push(previewWebsitePage.current)
await runPublishedPipeline({
pipeline_id: pipelineId!,
inputs: data,
start_node_id: datasource.nodeId,
datasource_type: datasource.type,
datasource_type: datasourceType as DatasourceType,
datasource_info_list: datasourceInfoList,
is_preview: true,
}, {
@@ -138,13 +139,13 @@ const CreateFormPipeline = () => {
setEstimateData((res as PublishedPipelineRunPreviewResponse).data.outputs)
},
})
}, [datasource, pipelineId, previewFile, previewOnlineDocument, previewWebsitePage, runPublishedPipeline])
}, [datasource, datasourceType, pipelineId, previewFile, previewOnlineDocument, previewWebsitePage, runPublishedPipeline])
const handleProcess = useCallback(async (data: Record<string, any>) => {
if (!datasource)
return
const datasourceInfoList: Record<string, any>[] = []
if (datasource.type === DatasourceType.localFile) {
if (datasourceType === DatasourceType.localFile) {
fileList.forEach((file) => {
const { id, name, type, size, extension, mime_type } = file.file
const documentInfo = {
@@ -160,7 +161,7 @@ const CreateFormPipeline = () => {
datasourceInfoList.push(documentInfo)
})
}
if (datasource.type === DatasourceType.onlineDocument) {
if (datasourceType === DatasourceType.onlineDocument) {
onlineDocuments.forEach((page) => {
const { workspace_id, ...rest } = page
const documentInfo = {
@@ -170,7 +171,7 @@ const CreateFormPipeline = () => {
datasourceInfoList.push(documentInfo)
})
}
if (datasource.type === DatasourceType.websiteCrawl) {
if (datasourceType === DatasourceType.websiteCrawl) {
websitePages.forEach((websitePage) => {
datasourceInfoList.push(websitePage)
})
@@ -179,7 +180,7 @@ const CreateFormPipeline = () => {
pipeline_id: pipelineId!,
inputs: data,
start_node_id: datasource.nodeId,
datasource_type: datasource.type,
datasource_type: datasourceType as DatasourceType,
datasource_info_list: datasourceInfoList,
is_preview: false,
}, {
@@ -189,7 +190,7 @@ const CreateFormPipeline = () => {
handleNextStep()
},
})
}, [datasource, fileList, handleNextStep, onlineDocuments, pipelineId, runPublishedPipeline, websitePages])
}, [datasource, datasourceType, fileList, handleNextStep, onlineDocuments, pipelineId, runPublishedPipeline, websitePages])
const onClickProcess = useCallback(() => {
isPreview.current = false
@@ -246,38 +247,30 @@ const CreateFormPipeline = () => {
onSelect={setDatasource}
pipelineNodes={(pipelineInfo?.graph.nodes || []) as Node<DataSourceNodeType>[]}
/>
{datasource?.type === DatasourceType.localFile && (
{datasourceType === DatasourceType.localFile && (
<LocalFile
files={fileList}
allowedExtensions={datasource?.fileExtensions || []}
allowedExtensions={datasource!.nodeData.fileExtensions || []}
updateFile={updateFile}
updateFileList={updateFileList}
onPreview={updateCurrentFile}
notSupportBatchUpload={notSupportBatchUpload}
/>
)}
{datasource?.type === DatasourceType.onlineDocument && (
{datasourceType === DatasourceType.onlineDocument && (
<OnlineDocuments
nodeId={datasource?.nodeId || ''}
headerInfo={{
title: datasource.description,
docTitle: datasource.docTitle || '',
docLink: datasource.docLink || '',
}}
nodeId={datasource!.nodeId}
nodeData={datasource!.nodeData}
onlineDocuments={onlineDocuments}
updateOnlineDocuments={updateOnlineDocuments}
canPreview
onPreview={updateCurrentPage}
/>
)}
{datasource?.type === DatasourceType.websiteCrawl && (
{datasourceType === DatasourceType.websiteCrawl && (
<WebsiteCrawl
nodeId={datasource?.nodeId || ''}
headerInfo={{
title: datasource.description,
docTitle: datasource.docTitle || '',
docLink: datasource.docLink || '',
}}
nodeId={datasource!.nodeId}
nodeData={datasource!.nodeData}
crawlResult={crawlResult}
setCrawlResult={setCrawlResult}
step={step}
@@ -299,7 +292,7 @@ const CreateFormPipeline = () => {
currentStep === 2 && (
<ProcessDocuments
ref={formRef}
dataSourceNodeId={datasource?.nodeId || ''}
dataSourceNodeId={datasource!.nodeId}
onProcess={onClickProcess}
onPreview={onClickPreview}
onSubmit={handleSubmit}
@@ -324,7 +317,13 @@ const CreateFormPipeline = () => {
<div className='h-full min-w-0 flex-1'>
<div className='flex h-full flex-col pl-2 pt-2'>
{currentFile && <FilePreview file={currentFile} hidePreview={hideFilePreview} />}
{currentDocument && <OnlineDocumentPreview currentPage={currentDocument} hidePreview={hideOnlineDocumentPreview} />}
{currentDocument && (
<OnlineDocumentPreview
datasourceNodeId={datasource!.nodeId}
currentPage={currentDocument}
hidePreview={hideOnlineDocumentPreview}
/>
)}
{currentWebsite && <WebsitePreview payload={currentWebsite} hidePreview={hideWebsitePreview} />}
</div>
</div>
@@ -335,7 +334,7 @@ const CreateFormPipeline = () => {
<div className='h-full min-w-0 flex-1'>
<div className='flex h-full flex-col pl-2 pt-2'>
<ChunkPreview
dataSourceType={datasource!.type}
dataSourceType={datasourceType as DatasourceType}
files={fileList.map(file => file.file)}
onlineDocuments={onlineDocuments}
websitePages={websitePages}

View File

@@ -1,48 +1,64 @@
'use client'
import React from 'react'
import React, { useEffect, useState } from 'react'
import { useTranslation } from 'react-i18next'
import type { NotionPage } from '@/models/common'
import { usePreviewNotionPage } from '@/service/knowledge/use-dataset'
import { RiCloseLine } from '@remixicon/react'
import { formatNumberAbbreviated } from '@/utils/format'
import Loading from './loading'
import { Notion } from '@/app/components/base/icons/src/public/common'
import { useDatasetDetailContextWithSelector } from '@/context/dataset-detail'
import { usePreviewOnlineDocument } from '@/service/use-pipeline'
import Toast from '@/app/components/base/toast'
import { Markdown } from '@/app/components/base/markdown'
type OnlineDocumentPreviewProps = {
currentPage: NotionPage
datasourceNodeId: string
hidePreview: () => void
}
const OnlineDocumentPreview = ({
currentPage,
datasourceNodeId,
hidePreview,
}: OnlineDocumentPreviewProps) => {
const { t } = useTranslation()
const [content, setContent] = useState('')
const pipelineId = useDatasetDetailContextWithSelector(state => state.dataset?.pipeline_id)
const { mutateAsync: getOnlineDocumentContent, isPending } = usePreviewOnlineDocument()
// todo: replace with a generic hook for previewing online documents
const { data: notionPageData, isFetching } = usePreviewNotionPage({
workspaceID: currentPage.workspace_id,
pageID: currentPage.page_id,
pageType: currentPage.type,
})
useEffect(() => {
getOnlineDocumentContent({
workspaceID: currentPage.workspace_id,
pageID: currentPage.page_id,
pageType: currentPage.type,
pipelineId: pipelineId || '',
datasourceNodeId,
}, {
onSuccess(data) {
setContent(data.content)
},
onError(error) {
Toast.notify({
type: 'error',
message: error.message,
})
},
})
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [])
return (
<div className='flex h-full w-full flex-col rounded-t-xl border-l border-t border-components-panel-border bg-background-default-lighter shadow-md shadow-shadow-shadow-5'>
<div className='flex gap-x-2 border-b border-divider-subtle pb-3 pl-6 pr-4 pt-4'>
<div className='flex grow flex-col gap-y-1'>
<div className='system-2xs-semibold-uppercase'>{t('datasetPipeline.addDocuments.stepOne.preview')}</div>
<div className='system-2xs-semibold-uppercase text-text-accent'>{t('datasetPipeline.addDocuments.stepOne.preview')}</div>
<div className='title-md-semi-bold text-tex-primary'>{currentPage?.page_name}</div>
<div className='system-xs-medium flex gap-x-1 text-text-tertiary'>
<div className='system-xs-medium flex items-center gap-x-1 text-text-tertiary'>
<Notion className='size-3.5' />
<span>{currentPage.type}</span>
<span>·</span>
<span>Notion Page</span>
<span>·</span>
{notionPageData && (
<>
<span>·</span>
<span>{`${formatNumberAbbreviated(notionPageData.content.length)} ${t('datasetPipeline.addDocuments.characters')}`}</span>
</>
)}
<span>{`${formatNumberAbbreviated(content.length)} ${t('datasetPipeline.addDocuments.characters')}`}</span>
</div>
</div>
<button
@@ -53,14 +69,14 @@ const OnlineDocumentPreview = ({
<RiCloseLine className='size-[18px]' />
</button>
</div>
{isFetching && (
{isPending && (
<div className='grow'>
<Loading />
</div>
)}
{!isFetching && notionPageData && (
{!isPending && content && (
<div className='body-md-regular grow overflow-hidden px-6 py-5 text-text-secondary'>
{notionPageData.content}
<Markdown content={content} />
</div>
)}
</div>

View File

@@ -1,58 +1,15 @@
import { useMemo } from 'react'
import { BaseFieldType } from '@/app/components/base/form/form-scenarios/base/types'
import { usePublishedPipelineProcessingParams } from '@/service/use-pipeline'
import { VAR_TYPE_MAP } from '@/models/pipeline'
import { useDatasetDetailContextWithSelector } from '@/context/dataset-detail'
export const useConfigurations = (datasourceNodeId: string) => {
export const useInputVariables = (datasourceNodeId: string) => {
const pipelineId = useDatasetDetailContextWithSelector(state => state.dataset?.pipeline_id)
const { data: paramsConfig, isFetching: isFetchingParams } = usePublishedPipelineProcessingParams({
pipeline_id: pipelineId!,
node_id: datasourceNodeId,
})
const initialData = useMemo(() => {
const variables = paramsConfig?.variables || []
return variables.reduce((acc, item) => {
const type = VAR_TYPE_MAP[item.type]
if ([BaseFieldType.textInput, BaseFieldType.paragraph, BaseFieldType.select].includes(type))
acc[item.variable] = item.default_value ?? ''
if (type === BaseFieldType.numberInput)
acc[item.variable] = item.default_value ?? 0
if (type === BaseFieldType.checkbox)
acc[item.variable] = true
if ([BaseFieldType.file, BaseFieldType.fileList].includes(type))
acc[item.variable] = []
return acc
}, {} as Record<string, any>)
}, [paramsConfig])
const configurations = useMemo(() => {
const variables = paramsConfig?.variables || []
const configs = variables.map(item => ({
type: VAR_TYPE_MAP[item.type],
variable: item.variable,
label: item.label,
required: item.required,
maxLength: item.max_length,
options: item.options?.map(option => ({
label: option,
value: option,
})),
showConditions: [],
placeholder: item.placeholder,
tooltip: item.tooltips,
unit: item.unit,
allowedFileTypes: item.allowed_file_types,
allowedFileExtensions: item.allowed_file_extensions,
allowedFileUploadMethods: item.allowed_file_upload_methods,
}))
return configs
}, [paramsConfig])
return {
paramsConfig,
isFetchingParams,
initialData,
configurations,
}
}

View File

@@ -1,8 +1,9 @@
import React from 'react'
import { generateZodSchema } from '@/app/components/base/form/form-scenarios/base/utils'
import { useConfigurations } from './hooks'
import { useInputVariables } from './hooks'
import Form from './form'
import Actions from './actions'
import { useConfigurations, useInitialData } from '@/app/components/rag-pipeline/hooks/use-input-fields'
type ProcessDocumentsProps = {
dataSourceNodeId: string
@@ -21,7 +22,9 @@ const ProcessDocuments = ({
onBack,
ref,
}: ProcessDocumentsProps) => {
const { isFetchingParams, initialData, configurations } = useConfigurations(dataSourceNodeId)
const { isFetchingParams, paramsConfig } = useInputVariables(dataSourceNodeId)
const initialData = useInitialData(paramsConfig?.variables || [])
const configurations = useConfigurations(paramsConfig?.variables || [])
const schema = generateZodSchema(configurations)
return (

View File

@@ -12,7 +12,7 @@ import AppUnavailable from '@/app/components/base/app-unavailable'
import { useDefaultModel } from '@/app/components/header/account-setting/model-provider-page/hooks'
import { ModelTypeEnum } from '@/app/components/header/account-setting/model-provider-page/declarations'
import type { NotionPage } from '@/models/common'
import { useDocumentDetail, useInvalidDocumentDetailKey } from '@/service/knowledge/use-document'
import { useDocumentDetail, useInvalidDocumentDetail, useInvalidDocumentList } from '@/service/knowledge/use-document'
type DocumentSettingsProps = {
datasetId: string
@@ -26,8 +26,10 @@ const DocumentSettings = ({ datasetId, documentId }: DocumentSettingsProps) => {
const { indexingTechnique, dataset } = useContext(DatasetDetailContext)
const { data: embeddingsDefaultModel } = useDefaultModel(ModelTypeEnum.textEmbedding)
const invalidDocumentDetail = useInvalidDocumentDetailKey()
const invalidDocumentList = useInvalidDocumentList(datasetId)
const invalidDocumentDetail = useInvalidDocumentDetail()
const saveHandler = () => {
invalidDocumentList()
invalidDocumentDetail()
router.push(`/datasets/${datasetId}/documents/${documentId}`)
}

View File

@@ -13,7 +13,7 @@ import { DatasourceType } from '@/models/pipeline'
import { noop } from 'lodash-es'
import { useDatasetDetailContextWithSelector } from '@/context/dataset-detail'
import { useRouter } from 'next/navigation'
import { useInvalidDocumentList } from '@/service/knowledge/use-document'
import { useInvalidDocumentDetail, useInvalidDocumentList } from '@/service/knowledge/use-document'
type PipelineSettingsProps = {
datasetId: string
@@ -99,6 +99,7 @@ const PipelineSettings = ({
}, [lastRunData, pipelineId, runPublishedPipeline])
const invalidDocumentList = useInvalidDocumentList(datasetId)
const invalidDocumentDetail = useInvalidDocumentDetail()
const handleProcess = useCallback(async (data: Record<string, any>) => {
if (!lastRunData)
return
@@ -115,10 +116,11 @@ const PipelineSettings = ({
}, {
onSuccess: () => {
invalidDocumentList()
invalidDocumentDetail()
push(`/datasets/${datasetId}/documents/${documentId}`)
},
})
}, [datasetId, documentId, invalidDocumentList, lastRunData, pipelineId, push, runPublishedPipeline])
}, [datasetId, documentId, invalidDocumentDetail, invalidDocumentList, lastRunData, pipelineId, push, runPublishedPipeline])
const onClickProcess = useCallback(() => {
isPreview.current = false

View File

@@ -1,59 +1,15 @@
import { useMemo } from 'react'
import { useDatasetDetailContextWithSelector } from '@/context/dataset-detail'
import { usePublishedPipelineProcessingParams } from '@/service/use-pipeline'
import { VAR_TYPE_MAP } from '@/models/pipeline'
import { BaseFieldType } from '@/app/components/base/form/form-scenarios/base/types'
export const useConfigurations = (lastRunInputData: Record<string, any>, datasourceNodeId: string) => {
export const useInputVariables = (datasourceNodeId: string) => {
const pipelineId = useDatasetDetailContextWithSelector(state => state.dataset?.pipeline_id)
const { data: paramsConfig, isFetching: isFetchingParams } = usePublishedPipelineProcessingParams({
pipeline_id: pipelineId!,
node_id: datasourceNodeId,
})
const initialData = useMemo(() => {
const variables = paramsConfig?.variables || []
return variables.reduce((acc, item) => {
const type = VAR_TYPE_MAP[item.type]
const variableName = item.variable
if ([BaseFieldType.textInput, BaseFieldType.paragraph, BaseFieldType.select].includes(type))
acc[item.variable] = lastRunInputData[variableName] ?? ''
if (type === BaseFieldType.numberInput)
acc[item.variable] = lastRunInputData[variableName] ?? 0
if (type === BaseFieldType.checkbox)
acc[item.variable] = lastRunInputData[variableName]
if ([BaseFieldType.file, BaseFieldType.fileList].includes(type))
acc[item.variable] = lastRunInputData[variableName]
return acc
}, {} as Record<string, any>)
}, [lastRunInputData, paramsConfig?.variables])
const configurations = useMemo(() => {
const variables = paramsConfig?.variables || []
const configs = variables.map(item => ({
type: VAR_TYPE_MAP[item.type],
variable: item.variable,
label: item.label,
required: item.required,
maxLength: item.max_length,
options: item.options?.map(option => ({
label: option,
value: option,
})),
showConditions: [],
placeholder: item.placeholder,
tooltip: item.tooltips,
unit: item.unit,
allowedFileTypes: item.allowed_file_types,
allowedFileExtensions: item.allowed_file_extensions,
allowedFileUploadMethods: item.allowed_file_upload_methods,
}))
return configs
}, [paramsConfig])
return {
paramsConfig,
isFetchingParams,
initialData,
configurations,
}
}

View File

@@ -1,7 +1,8 @@
import { generateZodSchema } from '@/app/components/base/form/form-scenarios/base/utils'
import { useConfigurations } from './hooks'
import { useInputVariables } from './hooks'
import Actions from './actions'
import Form from '../../../../create-from-pipeline/process-documents/form'
import { useConfigurations, useInitialData } from '@/app/components/rag-pipeline/hooks/use-input-fields'
type ProcessDocumentsProps = {
datasourceNodeId: string
@@ -20,7 +21,9 @@ const ProcessDocuments = ({
onSubmit,
ref,
}: ProcessDocumentsProps) => {
const { isFetchingParams, initialData, configurations } = useConfigurations(lastRunInputData, datasourceNodeId)
const { isFetchingParams, paramsConfig } = useInputVariables(datasourceNodeId)
const initialData = useInitialData(paramsConfig?.variables || [], lastRunInputData)
const configurations = useConfigurations(paramsConfig?.variables || [])
const schema = generateZodSchema(configurations)
return (

View File

@@ -23,7 +23,7 @@ import { DataSourceType, ProcessMode } from '@/models/datasets'
import IndexFailed from '@/app/components/datasets/common/document-status-with-action/index-failed'
import { useProviderContext } from '@/context/provider-context'
import cn from '@/utils/classnames'
import { useDocumentList, useInvalidDocumentDetailKey, useInvalidDocumentList } from '@/service/knowledge/use-document'
import { useDocumentList, useInvalidDocumentDetail, useInvalidDocumentList } from '@/service/knowledge/use-document'
import { useInvalid } from '@/service/use-base'
import { useChildSegmentListKey, useSegmentListKey } from '@/service/knowledge/use-segment'
import useEditDocumentMetadata from '../metadata/hooks/use-edit-dataset-metadata'
@@ -123,7 +123,7 @@ const Documents: FC<IDocumentsProps> = ({ datasetId }) => {
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [documentsRes])
const invalidDocumentDetail = useInvalidDocumentDetailKey()
const invalidDocumentDetail = useInvalidDocumentDetail()
const invalidChunkList = useInvalid(useSegmentListKey)
const invalidChildChunkList = useInvalid(useChildSegmentListKey)

View File

@@ -52,6 +52,12 @@ export const useHiddenFieldNames = (type: PipelineInputVarType) => {
t('appDebug.variableConfig.tooltips'),
]
break
case PipelineInputVarType.checkbox:
fieldNames = [
t('appDebug.variableConfig.startedChecked'),
t('appDebug.variableConfig.tooltips'),
]
break
default:
fieldNames = [
t('appDebug.variableConfig.tooltips'),
@@ -258,6 +264,15 @@ export const useHiddenConfigurations = (props: {
popupProps: {
wrapperClassName: 'z-40',
},
}, {
type: InputFieldType.checkbox,
label: t('appDebug.variableConfig.startChecked'),
variable: 'default',
required: false,
showConditions: [{
variable: 'type',
value: PipelineInputVarType.checkbox,
}],
}, {
type: InputFieldType.textInput,
label: t('appDebug.variableConfig.placeholder'),

View File

@@ -25,14 +25,14 @@ export const createInputFieldSchema = (type: PipelineInputVarType, t: TFunction,
const { maxFileUploadLimit } = options
const commonSchema = z.object({
type: InputTypeEnum,
variable: z.string({
invalid_type_error: t('appDebug.varKeyError.notValid', { key: t('appDebug.variableConfig.varName') }),
}).nonempty({
variable: z.string().nonempty({
message: t('appDebug.varKeyError.canNoBeEmpty', { key: t('appDebug.variableConfig.varName') }),
}).max(MAX_VAR_KEY_LENGTH, {
message: t('appDebug.varKeyError.tooLong', { key: t('appDebug.variableConfig.varName') }),
}).regex(/^(?!\d)\w+/, {
message: t('appDebug.varKeyError.notStartWithNumber', { key: t('appDebug.variableConfig.varName') }),
}).regex(/^[a-zA-Z_]\w{0,29}$/, {
message: t('appDebug.varKeyError.notValid', { key: t('appDebug.variableConfig.varName') }),
}),
label: z.string().nonempty({
message: t('appDebug.variableConfig.errorMsg.labelNameRequired'),

View File

@@ -51,17 +51,17 @@ const FieldItem = ({
<div
ref={ref}
className={cn(
'flex h-8 cursor-pointer items-center justify-between gap-x-1 rounded-lg border border-components-panel-border-subtle bg-components-panel-on-panel-item-bg py-1 pl-2 shadow-xs hover:shadow-sm',
(isHovering && !readonly) ? 'pr-1' : 'pr-2.5',
'handle flex h-8 cursor-pointer items-center justify-between gap-x-1 rounded-lg border border-components-panel-border-subtle bg-components-panel-on-panel-item-bg py-1 pl-2 shadow-xs hover:shadow-sm',
(isHovering && !readonly) ? 'cursor-all-scroll pr-1' : 'pr-2.5',
readonly && 'cursor-default',
)}
onClick={handleOnClickEdit}
// onClick={handleOnClickEdit}
>
<div className='flex grow basis-0 items-center gap-x-1'>
<div className='flex grow basis-0 items-center gap-x-1 overflow-hidden'>
{
(isHovering && !readonly)
? <RiDraggable className='handle h-4 w-4 cursor-all-scroll text-text-quaternary' />
: <InputField className='size-4 text-text-accent' />
? <RiDraggable className='size-4 shrink-0 text-text-quaternary' />
: <InputField className='size-4 shrink-0 text-text-accent' />
}
<div
title={payload.variable}
@@ -73,10 +73,10 @@ const FieldItem = ({
<>
<div className='system-xs-regular shrink-0 text-text-quaternary'>·</div>
<div
title={payload.label as string}
className='system-xs-medium max-w-[130px] truncate text-text-tertiary'
title={payload.label}
className='system-xs-medium grow truncate text-text-tertiary'
>
{payload.label as string}
{payload.label}
</div>
</>
)}

View File

@@ -50,7 +50,6 @@ const FieldListContainer = ({
setList={handleListSortChange}
handle='.handle'
ghostClass='opacity-50'
group='rag-pipeline-input-field'
animation={150}
disabled={readonly}
>

View File

@@ -8,9 +8,11 @@ import type { InputVar } from '@/models/pipeline'
import type { SortableItem } from './types'
import type { MoreInfo, ValueSelector } from '@/app/components/workflow/types'
import { ChangeType } from '@/app/components/workflow/types'
import { useWorkflow } from '@/app/components/workflow/hooks'
import { useBoolean } from 'ahooks'
import Toast from '@/app/components/base/toast'
import { usePipeline } from '../../../hooks/use-pipeline'
const VARIABLE_PREFIX = 'rag'
export const useFieldList = (
initialInputFields: InputVar[],
@@ -22,7 +24,7 @@ export const useFieldList = (
const [removedVar, setRemovedVar] = useState<ValueSelector>([])
const [removedIndex, setRemoveIndex] = useState(0)
const { handleOutVarRenameChange, isVarUsedInNodes, removeUsedVarInNodes } = useWorkflow()
const { handleInputVarRename, isVarUsedInNodes, removeUsedVarInNodes } = usePipeline()
const [isShowRemoveVarConfirm, {
setTrue: showRemoveVarConfirm,
@@ -61,9 +63,9 @@ export const useFieldList = (
const handleRemoveField = useCallback((index: number) => {
const itemToRemove = inputFieldsRef.current[index]
// Check if the variable is used in other nodes
if (isVarUsedInNodes([nodeId, itemToRemove.variable || ''])) {
if (isVarUsedInNodes([VARIABLE_PREFIX, nodeId, itemToRemove.variable || ''])) {
showRemoveVarConfirm()
setRemovedVar([nodeId, itemToRemove.variable || ''])
setRemovedVar([VARIABLE_PREFIX, nodeId, itemToRemove.variable || ''])
setRemoveIndex(index as number)
return
}
@@ -99,9 +101,9 @@ export const useFieldList = (
handleInputFieldsChange(newInputFields)
// Update variable name in nodes if it has changed
if (moreInfo?.type === ChangeType.changeVarName)
handleOutVarRenameChange(nodeId, [nodeId, moreInfo.payload?.beforeKey || ''], [nodeId, moreInfo.payload?.afterKey || ''])
handleInputVarRename(nodeId, [VARIABLE_PREFIX, nodeId, moreInfo.payload?.beforeKey || ''], [VARIABLE_PREFIX, nodeId, moreInfo.payload?.afterKey || ''])
handleCloseInputFieldEditor()
}, [editingField?.variable, handleCloseInputFieldEditor, handleInputFieldsChange, handleOutVarRenameChange, nodeId])
}, [editingField?.variable, handleCloseInputFieldEditor, handleInputFieldsChange, handleInputVarRename, nodeId])
return {
inputFields,

View File

@@ -1,7 +1,7 @@
import { useAppForm } from '@/app/components/base/form'
import BaseField from '@/app/components/base/form/form-scenarios/base/field'
import type { RAGPipelineVariables } from '@/models/pipeline'
import { useConfigurations, useInitialData } from '../../panel/test-run/data-source/website-crawl/base/options/hooks'
import { useConfigurations, useInitialData } from '@/app/components/rag-pipeline/hooks/use-input-fields'
type FormProps = {
variables: RAGPipelineVariables

View File

@@ -12,14 +12,18 @@ const DataSourceOptions = ({
dataSourceNodeId,
onSelect,
}: DataSourceOptionsProps) => {
const { datasources, options } = useDatasourceOptions()
const options = useDatasourceOptions()
const handelSelect = useCallback((value: string) => {
const selectedOption = datasources.find(option => option.nodeId === value)
const selectedOption = options.find(option => option.value === value)
if (!selectedOption)
return
onSelect(selectedOption)
}, [datasources, onSelect])
const datasource = {
nodeId: selectedOption.value,
nodeData: selectedOption.data,
}
onSelect(datasource)
}, [onSelect, options])
useEffect(() => {
if (options.length > 0 && !dataSourceNodeId)
@@ -33,9 +37,10 @@ const DataSourceOptions = ({
<OptionCard
key={option.value}
label={option.label}
value={option.value}
nodeData={option.data}
selected={dataSourceNodeId === option.value}
onClick={handelSelect.bind(null, option.value)}
onClick={handelSelect}
/>
))}
</div>

View File

@@ -1,4 +1,4 @@
import React from 'react'
import React, { useCallback } from 'react'
import cn from '@/utils/classnames'
import BlockIcon from '@/app/components/workflow/block-icon'
import { BlockEnum } from '@/app/components/workflow/types'
@@ -7,19 +7,25 @@ import { useToolIcon } from '@/app/components/workflow/hooks'
type OptionCardProps = {
label: string
value: string
selected: boolean
nodeData: DataSourceNodeType
onClick?: () => void
onClick?: (value: string) => void
}
const OptionCard = ({
label,
value,
selected,
nodeData,
onClick,
}: OptionCardProps) => {
const toolIcon = useToolIcon(nodeData)
const handleClickCard = useCallback(() => {
onClick?.(value)
}, [value, onClick])
return (
<div
className={cn(
@@ -28,7 +34,7 @@ const OptionCard = ({
? 'border-components-option-card-option-selected-border bg-components-option-card-option-selected-bg shadow-xs ring-[0.5px] ring-inset ring-components-option-card-option-selected-border'
: 'hover:bg-components-option-card-bg-hover hover:border-components-option-card-option-border-hover hover:shadow-xs',
)}
onClick={onClick}
onClick={handleClickCard}
>
<div className='flex size-7 items-center justify-center rounded-lg border-[0.5px] border-components-panel-border bg-background-default-dodge p-1'>
<BlockIcon

View File

@@ -18,7 +18,7 @@ import { useFileUploadConfig } from '@/service/use-common'
const FILES_NUMBER_LIMIT = 20
type IFileUploaderProps = {
export type FileUploaderProps = {
fileList: FileItem[]
allowedExtensions: string[]
prepareFileList: (files: FileItem[]) => void
@@ -36,7 +36,7 @@ const FileUploader = ({
onFileListUpdate,
onPreview,
notSupportBatchUpload,
}: IFileUploaderProps) => {
}: FileUploaderProps) => {
const { t } = useTranslation()
const { notify } = useContext(ToastContext)
const { locale } = useContext(I18n)

View File

@@ -1,13 +1,10 @@
import type { NotionPage } from '@/models/common'
import OnlineDocumentSelector from './online-document-selector'
import type { DataSourceNodeType } from '@/app/components/workflow/nodes/data-source/types'
type OnlineDocumentsProps = {
nodeId: string
headerInfo: {
title: string
docTitle: string
docLink: string
}
nodeData: DataSourceNodeType
onlineDocuments: NotionPage[]
updateOnlineDocuments: (value: NotionPage[]) => void
canPreview?: boolean
@@ -17,7 +14,7 @@ type OnlineDocumentsProps = {
const OnlineDocuments = ({
nodeId,
headerInfo,
nodeData,
onlineDocuments,
updateOnlineDocuments,
canPreview = false,
@@ -27,7 +24,7 @@ const OnlineDocuments = ({
return (
<OnlineDocumentSelector
nodeId={nodeId}
headerInfo={headerInfo}
nodeData={nodeData}
value={onlineDocuments.map(page => page.page_id)}
onSelect={updateOnlineDocuments}
canPreview={canPreview}

View File

@@ -9,6 +9,7 @@ import { DatasourceType } from '@/models/pipeline'
import { ssePost } from '@/service/base'
import Toast from '@/app/components/base/toast'
import type { DataSourceNodeCompletedResponse } from '@/types/pipeline'
import type { DataSourceNodeType } from '@/app/components/workflow/nodes/data-source/types'
type OnlineDocumentSelectorProps = {
value?: string[]
@@ -18,11 +19,7 @@ type OnlineDocumentSelectorProps = {
onPreview?: (selectedPage: NotionPage) => void
isInPipeline?: boolean
nodeId: string
headerInfo: {
title: string
docTitle: string
docLink: string
}
nodeData: DataSourceNodeType
}
const OnlineDocumentSelector = ({
@@ -33,7 +30,7 @@ const OnlineDocumentSelector = ({
onPreview,
isInPipeline = false,
nodeId,
headerInfo,
nodeData,
}: OnlineDocumentSelectorProps) => {
const pipelineId = useDatasetDetailContextWithSelector(s => s.dataset?.pipeline_id)
const [documentsData, setDocumentsData] = useState<DataSourceNotionWorkspace[]>([])
@@ -118,6 +115,14 @@ const OnlineDocumentSelector = ({
setCurrentWorkspaceId(firstWorkspaceId)
}, [firstWorkspaceId])
const headerInfo = useMemo(() => {
return {
title: nodeData.title,
docTitle: 'How to use?',
docLink: 'https://docs.dify.ai',
}
}, [nodeData])
if (!documentsData?.length)
return null

View File

@@ -0,0 +1,50 @@
import { useTranslation } from 'react-i18next'
import Button from '@/app/components/base/button'
import { Icon3Dots } from '@/app/components/base/icons/src/vender/line/others'
import BlockIcon from '@/app/components/workflow/block-icon'
import { useToolIcon } from '@/app/components/workflow/hooks'
import type { DataSourceNodeType } from '@/app/components/workflow/nodes/data-source/types'
import { BlockEnum } from '@/app/components/workflow/types'
type ConnectProps = {
nodeData: DataSourceNodeType
onSetting: () => void
}
const Connect = ({
nodeData,
onSetting,
}: ConnectProps) => {
const { t } = useTranslation()
const toolIcon = useToolIcon(nodeData)
return (
<div className='flex flex-col items-start gap-y-2 rounded-xl bg-workflow-process-bg p-6'>
<div className='flex size-12 items-center justify-center rounded-[10px] border-[0.5px] border-components-card-border bg-components-card-bg p-1 shadow-lg shadow-shadow-shadow-5'>
<BlockIcon
type={BlockEnum.DataSource}
toolIcon={toolIcon}
size='md'
/>
</div>
<div className='flex flex-col gap-y-1'>
<div className='flex flex-col gap-y-1 pb-3 pt-1'>
<div className='system-md-semibold text-text-secondary'>
<span className='relative'>
{t('datasetPipeline.onlineDrive.notConnected', { name: nodeData.title })}
<Icon3Dots className='absolute -right-2.5 -top-1.5 size-4 text-text-secondary' />
</span>
</div>
<div className='system-sm-regular text-text-tertiary'>
{t('datasetPipeline.onlineDrive.notConnectedTip', { name: nodeData.title })}
</div>
</div>
<Button className='w-fit' variant='primary' onClick={onSetting}>
{t('datasetCreation.stepOne.connect')}
</Button>
</div>
</div>
)
}
export default Connect

View File

@@ -0,0 +1,16 @@
import type { DataSourceNodeType } from '@/app/components/workflow/nodes/data-source/types'
import Connect from './connect'
type OnlineDriveProps = {
nodeData: DataSourceNodeType
}
const OnlineDrive = ({
nodeData,
}: OnlineDriveProps) => {
return (
<Connect nodeData={nodeData} />
)
}
export default OnlineDrive

View File

@@ -1,5 +1,5 @@
'use client'
import React, { useCallback, useEffect, useRef, useState } from 'react'
import React, { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { useTranslation } from 'react-i18next'
import type { CrawlResult, CrawlResultItem } from '@/models/datasets'
import { CrawlStep } from '@/models/datasets'
@@ -19,22 +19,19 @@ import type {
DataSourceNodeCompletedResponse,
DataSourceNodeProcessingResponse,
} from '@/types/pipeline'
import type { DataSourceNodeType } from '@/app/components/workflow/nodes/data-source/types'
const I18N_PREFIX = 'datasetCreation.stepOne.website'
export type CrawlerProps = {
nodeId: string
nodeData: DataSourceNodeType
crawlResult: CrawlResult | undefined
setCrawlResult: (payload: CrawlResult) => void
step: CrawlStep
setStep: (step: CrawlStep) => void
checkedCrawlResult: CrawlResultItem[]
onCheckedCrawlResultChange: (payload: CrawlResultItem[]) => void
headerInfo: {
title: string
docTitle: string
docLink: string
}
previewIndex?: number
onPreview?: (payload: CrawlResultItem, index: number) => void
isInPipeline?: boolean
@@ -42,12 +39,12 @@ export type CrawlerProps = {
const Crawler = ({
nodeId,
nodeData,
crawlResult,
setCrawlResult,
step,
setStep,
checkedCrawlResult,
headerInfo,
onCheckedCrawlResultChange,
previewIndex,
onPreview,
@@ -125,6 +122,14 @@ const Crawler = ({
handleRun(value)
}, [handleRun])
const headerInfo = useMemo(() => {
return {
title: nodeData.title,
docTitle: 'How to use?',
docLink: 'https://docs.dify.ai',
}
}, [nodeData])
return (
<div className='flex flex-col'>
<Header

View File

@@ -9,7 +9,7 @@ import { useEffect, useMemo } from 'react'
import { useTranslation } from 'react-i18next'
import Toast from '@/app/components/base/toast'
import type { RAGPipelineVariables } from '@/models/pipeline'
import { useConfigurations, useInitialData } from './hooks'
import { useConfigurations, useInitialData } from '@/app/components/rag-pipeline/hooks/use-input-fields'
import { generateZodSchema } from '@/app/components/base/form/form-scenarios/base/utils'
const I18N_PREFIX = 'datasetCreation.stepOne.website'

View File

@@ -7,12 +7,12 @@ type WebsiteCrawlProps = CrawlerProps
const WebsiteCrawl = ({
nodeId,
nodeData,
crawlResult,
setCrawlResult,
step,
setStep,
checkedCrawlResult,
headerInfo,
onCheckedCrawlResultChange,
previewIndex,
onPreview,
@@ -21,12 +21,12 @@ const WebsiteCrawl = ({
return (
<Crawler
nodeId={nodeId}
nodeData={nodeData}
crawlResult={crawlResult}
setCrawlResult={setCrawlResult}
step={step}
setStep={setStep}
checkedCrawlResult={checkedCrawlResult}
headerInfo={headerInfo}
onCheckedCrawlResultChange={onCheckedCrawlResultChange}
previewIndex={previewIndex}
onPreview={onPreview}

View File

@@ -1,59 +1,15 @@
import { useMemo } from 'react'
import type { BaseConfiguration } from '@/app/components/base/form/form-scenarios/base/types'
import { BaseFieldType } from '@/app/components/base/form/form-scenarios/base/types'
import { useStore } from '@/app/components/workflow/store'
import { useDraftPipelineProcessingParams } from '@/service/use-pipeline'
import { VAR_TYPE_MAP } from '@/models/pipeline'
export const useConfigurations = (datasourceNodeId: string) => {
export const useInputVariables = (datasourceNodeId: string) => {
const pipelineId = useStore(state => state.pipelineId)
const { data: paramsConfig, isFetching: isFetchingParams } = useDraftPipelineProcessingParams({
pipeline_id: pipelineId!,
node_id: datasourceNodeId,
})
const initialData = useMemo(() => {
const variables = paramsConfig?.variables || []
return variables.reduce((acc, item) => {
const type = VAR_TYPE_MAP[item.type]
if ([BaseFieldType.textInput, BaseFieldType.paragraph, BaseFieldType.select].includes(type))
acc[item.variable] = item.default_value ?? ''
if (type === BaseFieldType.numberInput)
acc[item.variable] = item.default_value ?? 0
if (type === BaseFieldType.checkbox)
acc[item.variable] = true
if ([BaseFieldType.file, BaseFieldType.fileList].includes(type))
acc[item.variable] = []
return acc
}, {} as Record<string, any>)
}, [paramsConfig])
const configurations = useMemo(() => {
const variables = paramsConfig?.variables || []
const configs: BaseConfiguration[] = variables.map(item => ({
type: VAR_TYPE_MAP[item.type],
variable: item.variable,
label: item.label,
required: item.required,
maxLength: item.max_length,
options: item.options?.map(option => ({
label: option,
value: option,
})),
showConditions: [],
placeholder: item.placeholder,
tooltip: item.tooltips,
unit: item.unit,
allowedFileTypes: item.allowed_file_types,
allowedFileExtensions: item.allowed_file_extensions,
allowedFileUploadMethods: item.allowed_file_upload_methods,
}))
return configs
}, [paramsConfig])
return {
isFetchingParams,
initialData,
configurations,
paramsConfig,
}
}

View File

@@ -1,9 +1,10 @@
import React, { useCallback } from 'react'
import { generateZodSchema } from '@/app/components/base/form/form-scenarios/base/utils'
import { useConfigurations } from './hooks'
import { useInputVariables } from './hooks'
import Options from './options'
import Actions from './actions'
import type { CustomActionsProps } from '@/app/components/base/form/components/form/actions'
import { useConfigurations, useInitialData } from '@/app/components/rag-pipeline/hooks/use-input-fields'
type DocumentProcessingProps = {
dataSourceNodeId: string
@@ -16,7 +17,9 @@ const DocumentProcessing = ({
onProcess,
onBack,
}: DocumentProcessingProps) => {
const { isFetchingParams, initialData, configurations } = useConfigurations(dataSourceNodeId)
const { isFetchingParams, paramsConfig } = useInputVariables(dataSourceNodeId)
const initialData = useInitialData(paramsConfig?.variables || [])
const configurations = useConfigurations(paramsConfig?.variables || [])
const schema = generateZodSchema(configurations)
const renderCustomActions = useCallback((props: CustomActionsProps) => (

View File

@@ -0,0 +1,14 @@
import React from 'react'
import { useTranslation } from 'react-i18next'
const FooterTips = () => {
const { t } = useTranslation()
return (
<div className='system-xs-regular flex grow flex-col justify-end p-4 pt-2 text-text-tertiary'>
{t('datasetPipeline.testRun.tooltip')}
</div>
)
}
export default React.memo(FooterTips)

View File

@@ -1,4 +1,3 @@
import Tooltip from '@/app/components/base/tooltip'
import React from 'react'
import { useTranslation } from 'react-i18next'
import StepIndicator from './step-indicator'
@@ -16,12 +15,8 @@ const Header = ({
return (
<div className='flex flex-col gap-y-0.5 px-3 pb-2 pt-3.5'>
<div className='flex items-center gap-x-1 pl-1 pr-8'>
<span className='system-md-semibold-uppercase text-text-primary'>{t('datasetPipeline.testRun.title')}</span>
<Tooltip
popupContent={t('datasetPipeline.testRun.tooltip')}
popupClassName='max-w-[240px]'
/>
<div className='system-md-semibold-uppercase flex items-center gap-x-1 pl-1 pr-8 text-text-primary'>
{t('datasetPipeline.testRun.title')}
</div>
<StepIndicator steps={steps} currentStep={currentStep} />
</div>

View File

@@ -1,11 +1,10 @@
import { useTranslation } from 'react-i18next'
import type { DataSourceOption, Datasource } from './types'
import type { DataSourceOption } from './types'
import { TestRunStep } from './types'
import { useNodes } from 'reactflow'
import { BlockEnum } from '@/app/components/workflow/types'
import type { DataSourceNodeType } from '@/app/components/workflow/nodes/data-source/types'
import { useCallback, useMemo, useState } from 'react'
import type { DatasourceType } from '@/models/pipeline'
import type { CrawlResult } from '@/models/datasets'
import { type CrawlResultItem, CrawlStep, type FileItem } from '@/models/datasets'
import produce from 'immer'
@@ -45,18 +44,6 @@ export const useTestRunSteps = () => {
export const useDatasourceOptions = () => {
const nodes = useNodes<DataSourceNodeType>()
const datasourceNodes = nodes.filter(node => node.data.type === BlockEnum.DataSource)
const datasources: Datasource[] = useMemo(() => {
return datasourceNodes.map((node) => {
return {
nodeId: node.id,
type: node.data.provider_type as DatasourceType,
description: node.data.datasource_label,
docTitle: 'How to use?',
docLink: '',
fileExtensions: node.data.fileExtensions || [],
}
})
}, [datasourceNodes])
const options = useMemo(() => {
const options: DataSourceOption[] = []
@@ -68,10 +55,30 @@ export const useDatasourceOptions = () => {
data: node.data,
})
})
if (process.env.NODE_ENV === 'development') {
// todo: delete mock data
options.push({
label: 'Google Drive',
value: '123456',
// @ts-expect-error mock data
data: {
datasource_parameters: {},
datasource_configurations: {},
type: BlockEnum.DataSource,
title: 'Google Drive',
plugin_id: 'langgenius/google-drive',
provider_type: 'online_drive',
provider_name: 'google_drive',
datasource_name: 'google-drive',
datasource_label: 'Google Drive',
selected: false,
},
})
}
return options
}, [datasourceNodes])
return { datasources, options }
return options
}
export const useLocalFile = () => {

View File

@@ -15,6 +15,8 @@ import { DatasourceType } from '@/models/pipeline'
import { TransferMethod } from '@/types/app'
import CloseButton from './close-button'
import Header from './header'
import FooterTips from './footer-tips'
import OnlineDrive from './data-source/online-drive'
const TestRunPanel = () => {
const setShowDebugAndPreviewPanel = useWorkflowStoreWithSelector(state => state.setShowDebugAndPreviewPanel)
@@ -50,17 +52,18 @@ const TestRunPanel = () => {
const isVectorSpaceFull = plan.usage.vectorSpace >= plan.total.vectorSpace
const isShowVectorSpaceFull = allFileLoaded && isVectorSpaceFull && enableBilling
const datasourceType = datasource?.nodeData.provider_type
const nextBtnDisabled = useMemo(() => {
if (!datasource) return true
if (datasource.type === DatasourceType.localFile)
if (datasourceType === DatasourceType.localFile)
return isShowVectorSpaceFull || !fileList.length || fileList.some(file => !file.file.id)
if (datasource.type === DatasourceType.onlineDocument)
if (datasourceType === DatasourceType.onlineDocument)
return isShowVectorSpaceFull || !onlineDocuments.length
if (datasource.type === DatasourceType.websiteCrawl)
if (datasourceType === DatasourceType.websiteCrawl)
return isShowVectorSpaceFull || !websitePages.length
return false
}, [datasource, isShowVectorSpaceFull, fileList, onlineDocuments.length, websitePages.length])
}, [datasource, datasourceType, isShowVectorSpaceFull, fileList, onlineDocuments.length, websitePages.length])
const handleClose = () => {
setShowDebugAndPreviewPanel(false)
@@ -70,7 +73,7 @@ const TestRunPanel = () => {
if (!datasource)
return
const datasourceInfoList: Record<string, any>[] = []
if (datasource.type === DatasourceType.localFile) {
if (datasourceType === DatasourceType.localFile) {
const { id, name, type, size, extension, mime_type } = fileList[0].file
const documentInfo = {
related_id: id,
@@ -84,7 +87,7 @@ const TestRunPanel = () => {
}
datasourceInfoList.push(documentInfo)
}
if (datasource.type === DatasourceType.onlineDocument) {
if (datasourceType === DatasourceType.onlineDocument) {
const { workspace_id, ...rest } = onlineDocuments[0]
const documentInfo = {
workspace_id,
@@ -92,15 +95,15 @@ const TestRunPanel = () => {
}
datasourceInfoList.push(documentInfo)
}
if (datasource.type === DatasourceType.websiteCrawl)
if (datasourceType === DatasourceType.websiteCrawl)
datasourceInfoList.push(websitePages[0])
handleRun({
inputs: data,
start_node_id: datasource.nodeId,
datasource_type: datasource.type,
datasource_type: datasourceType,
datasource_info_list: datasourceInfoList,
})
}, [datasource, fileList, handleRun, onlineDocuments, websitePages])
}, [datasource, datasourceType, fileList, handleRun, onlineDocuments, websitePages])
return (
<div
@@ -108,7 +111,7 @@ const TestRunPanel = () => {
>
<CloseButton handleClose={handleClose} />
<Header steps={steps} currentStep={currentStep} />
<div className='grow overflow-y-auto'>
<div className='flex grow flex-col overflow-y-auto'>
{
currentStep === 1 && (
<>
@@ -117,37 +120,29 @@ const TestRunPanel = () => {
dataSourceNodeId={datasource?.nodeId || ''}
onSelect={setDatasource}
/>
{datasource?.type === DatasourceType.localFile && (
{datasourceType === DatasourceType.localFile && (
<LocalFile
files={fileList}
allowedExtensions={datasource?.fileExtensions || []}
allowedExtensions={datasource!.nodeData.fileExtensions || []}
updateFile={updateFile}
updateFileList={updateFileList}
notSupportBatchUpload={false} // only support single file upload in test run
/>
)}
{datasource?.type === DatasourceType.onlineDocument && (
{datasourceType === DatasourceType.onlineDocument && (
<OnlineDocuments
nodeId={datasource?.nodeId || ''}
headerInfo={{
title: datasource.description,
docTitle: datasource.docTitle || '',
docLink: datasource.docLink || '',
}}
nodeId={datasource!.nodeId}
nodeData={datasource!.nodeData}
onlineDocuments={onlineDocuments}
updateOnlineDocuments={updateOnlineDocuments}
isInPipeline
/>
)}
{datasource?.type === DatasourceType.websiteCrawl && (
{datasourceType === DatasourceType.websiteCrawl && (
<WebsiteCrawl
nodeId={datasource?.nodeId || ''}
nodeId={datasource!.nodeId}
checkedCrawlResult={websitePages}
headerInfo={{
title: datasource.description,
docTitle: datasource.docTitle || '',
docLink: datasource.docLink || '',
}}
nodeData={datasource!.nodeData}
crawlResult={crawlResult}
setCrawlResult={setCrawlResult}
step={step}
@@ -156,18 +151,25 @@ const TestRunPanel = () => {
isInPipeline
/>
)}
{datasourceType === DatasourceType.onlineDrive && (
<OnlineDrive
nodeData={datasource!.nodeData}
/>
)
}
{isShowVectorSpaceFull && (
<VectorSpaceFull />
)}
</div>
<Actions disabled={nextBtnDisabled} handleNextStep={handleNextStep} />
<FooterTips />
</>
)
}
{
currentStep === 2 && (
<DocumentProcessing
dataSourceNodeId={datasource?.nodeId || ''}
dataSourceNodeId={datasource!.nodeId}
onProcess={handleProcess}
onBack={handleBackStep}
/>

View File

@@ -1,5 +1,4 @@
import type { DataSourceNodeType } from '@/app/components/workflow/nodes/data-source/types'
import type { DatasourceType } from '@/models/pipeline'
export enum TestRunStep {
dataSource = 'dataSource',
@@ -14,9 +13,5 @@ export type DataSourceOption = {
export type Datasource = {
nodeId: string
type: DatasourceType
description: string
docTitle?: string
docLink?: string
fileExtensions?: string[]
nodeData: DataSourceNodeType
}

View File

@@ -1,23 +1,25 @@
import type { BaseConfiguration } from '@/app/components/base/form/form-scenarios/base/types'
import { BaseFieldType } from '@/app/components/base/form/form-scenarios/base/types'
import { type RAGPipelineVariables, VAR_TYPE_MAP } from '@/models/pipeline'
import { useMemo } from 'react'
import type { BaseConfiguration } from '@/app/components/base/form/form-scenarios/base/types'
import { type RAGPipelineVariables, VAR_TYPE_MAP } from '@/models/pipeline'
import { BaseFieldType } from '@/app/components/base/form/form-scenarios/base/types'
export const useInitialData = (variables: RAGPipelineVariables) => {
export const useInitialData = (variables: RAGPipelineVariables, lastRunInputData?: Record<string, any>) => {
const initialData = useMemo(() => {
return variables.reduce((acc, item) => {
const type = VAR_TYPE_MAP[item.type]
const variableName = item.variable
const defaultValue = lastRunInputData?.[variableName] || item.default_value
if ([BaseFieldType.textInput, BaseFieldType.paragraph, BaseFieldType.select].includes(type))
acc[item.variable] = item.default_value ?? ''
acc[variableName] = defaultValue ?? ''
if (type === BaseFieldType.numberInput)
acc[item.variable] = item.default_value ?? 0
acc[variableName] = defaultValue ?? 0
if (type === BaseFieldType.checkbox)
acc[item.variable] = true
acc[variableName] = defaultValue ?? false
if ([BaseFieldType.file, BaseFieldType.fileList].includes(type))
acc[item.variable] = []
acc[variableName] = defaultValue ?? []
return acc
}, {} as Record<string, any>)
}, [variables])
}, [lastRunInputData, variables])
return initialData
}

View File

@@ -0,0 +1,115 @@
import { useCallback } from 'react'
import { getOutgoers, useStoreApi } from 'reactflow'
import { BlockEnum, type Node, type ValueSelector } from '../../workflow/types'
import { uniqBy } from 'lodash-es'
import { findUsedVarNodes, updateNodeVars } from '../../workflow/nodes/_base/components/variable/utils'
import type { DataSourceNodeType } from '../../workflow/nodes/data-source/types'
export const usePipeline = () => {
const store = useStoreApi()
const getAllDatasourceNodes = useCallback(() => {
const {
getNodes,
} = store.getState()
const nodes = getNodes() as Node<DataSourceNodeType>[]
const datasourceNodes = nodes.filter(node => node.data.type === BlockEnum.DataSource)
return datasourceNodes
}, [store])
const getAllNodesInSameBranch = useCallback((nodeId: string) => {
const {
getNodes,
edges,
} = store.getState()
const nodes = getNodes()
const list: Node[] = []
const traverse = (root: Node, callback: (node: Node) => void) => {
if (root) {
const outgoers = getOutgoers(root, nodes, edges)
if (outgoers.length) {
outgoers.forEach((node) => {
callback(node)
traverse(node, callback)
})
}
}
}
if (nodeId === 'shared') {
const allDatasourceNodes = getAllDatasourceNodes()
if (allDatasourceNodes.length === 0)
return []
list.push(...allDatasourceNodes)
allDatasourceNodes.forEach((node) => {
traverse(node, (childNode) => {
list.push(childNode)
})
})
}
else {
const currentNode = nodes.find(node => node.id === nodeId)!
if (!currentNode)
return []
list.push(currentNode)
traverse(currentNode, (node) => {
list.push(node)
})
}
return uniqBy(list, 'id')
}, [getAllDatasourceNodes, store])
const isVarUsedInNodes = useCallback((varSelector: ValueSelector) => {
const nodeId = varSelector[1] // Assuming the first element is always 'VARIABLE_PREFIX'(rag)
const afterNodes = getAllNodesInSameBranch(nodeId)
const effectNodes = findUsedVarNodes(varSelector, afterNodes)
return effectNodes.length > 0
}, [getAllNodesInSameBranch])
const handleInputVarRename = useCallback((nodeId: string, oldValeSelector: ValueSelector, newVarSelector: ValueSelector) => {
const { getNodes, setNodes } = store.getState()
const afterNodes = getAllNodesInSameBranch(nodeId)
const effectNodes = findUsedVarNodes(oldValeSelector, afterNodes)
if (effectNodes.length > 0) {
const newNodes = getNodes().map((node) => {
if (effectNodes.find(n => n.id === node.id))
return updateNodeVars(node, oldValeSelector, newVarSelector)
return node
})
setNodes(newNodes)
}
}, [getAllNodesInSameBranch, store])
const removeUsedVarInNodes = useCallback((varSelector: ValueSelector) => {
const nodeId = varSelector[1] // Assuming the first element is always 'VARIABLE_PREFIX'(rag)
const { getNodes, setNodes } = store.getState()
const afterNodes = getAllNodesInSameBranch(nodeId)
const effectNodes = findUsedVarNodes(varSelector, afterNodes)
if (effectNodes.length > 0) {
const newNodes = getNodes().map((node) => {
if (effectNodes.find(n => n.id === node.id))
return updateNodeVars(node, varSelector, [])
return node
})
setNodes(newNodes)
}
}, [getAllNodesInSameBranch, store])
return {
handleInputVarRename,
isVarUsedInNodes,
removeUsedVarInNodes,
}
}

View File

@@ -60,6 +60,7 @@ import {
} from './use-workflow'
import { WorkflowHistoryEvent, useWorkflowHistory } from './use-workflow-history'
import { useNodesMetaData } from './use-nodes-meta-data'
import type { RAGPipelineVariables } from '@/models/pipeline'
export const useNodesInteractions = () => {
const { t } = useTranslation()
@@ -549,7 +550,7 @@ export const useNodesInteractions = () => {
if (!currentNode)
return
if (nodesMetaDataMap?.[currentNode.data.type as BlockEnum].metaData.isUndeletable)
if (nodesMetaDataMap?.[currentNode.data.type as BlockEnum]?.metaData.isUndeletable)
return
if (currentNode.data.type === BlockEnum.Iteration) {
@@ -628,6 +629,19 @@ export const useNodesInteractions = () => {
}
}
if (currentNode.data.type === BlockEnum.DataSource) {
const { id } = currentNode
const { ragPipelineVariables, setRagPipelineVariables } = workflowStore.getState()
if (ragPipelineVariables && setRagPipelineVariables) {
const newRagPipelineVariables: RAGPipelineVariables = []
ragPipelineVariables.forEach((variable) => {
if (variable.belong_to_node_id === id) return
newRagPipelineVariables.push(variable)
})
setRagPipelineVariables(newRagPipelineVariables)
}
}
const connectedEdges = getConnectedEdges([{ id: nodeId } as Node], edges)
const nodesConnectedSourceOrTargetHandleIdsMap = getNodesConnectedSourceOrTargetHandleIdsMap(connectedEdges.map(edge => ({ type: 'remove', edge })), nodes)
const newNodes = produce(nodes, (draft: Node[]) => {

View File

@@ -63,6 +63,15 @@ export const useWorkflow = () => {
workflowStore.setState({ panelWidth: width })
}, [workflowStore])
const getNodeById = useCallback((nodeId: string) => {
const {
getNodes,
} = store.getState()
const nodes = getNodes()
const currentNode = nodes.find(node => node.id === nodeId)
return currentNode
}, [store])
const getTreeLeafNodes = useCallback((nodeId: string) => {
const {
getNodes,
@@ -445,6 +454,7 @@ export const useWorkflow = () => {
return {
setPanelWidth,
getNodeById,
getTreeLeafNodes,
getBeforeNodesInSameBranch,
getBeforeNodesInSameBranchIncludeParent,

View File

@@ -62,7 +62,7 @@ export const isSpecialVar = (prefix: string): boolean => {
return ['sys', 'env', 'conversation', 'rag'].includes(prefix)
}
const inputVarTypeToVarType = (type: InputVarType): VarType => {
export const inputVarTypeToVarType = (type: InputVarType): VarType => {
return ({
[InputVarType.number]: VarType.number,
[InputVarType.singleFile]: VarType.file,
@@ -838,9 +838,9 @@ export const getVarType = ({
})
const targetVarNodeId = (() => {
if(isSystem)
if (isSystem)
return startNode?.id
if(isInNodeRagVariable)
if (isInNodeRagVariable)
return valueSelector[1]
return valueSelector[0]
})()
@@ -857,14 +857,14 @@ export const getVarType = ({
}
else {
const targetVar = curr.find((v: any) => {
if(isInNodeRagVariable)
if (isInNodeRagVariable)
return v.variable === valueSelector.join('.')
return v.variable === valueSelector[1]
})
})
if (!targetVar)
return VarType.string
if(isInNodeRagVariable)
if (isInNodeRagVariable)
return targetVar.type
const isStructuredOutputVar = !!targetVar.children?.schema?.properties
@@ -1084,6 +1084,13 @@ export const getNodeUsedVars = (node: Node): ValueSelector[] => {
res = [...(mixVars as ValueSelector[]), ...(vars as any)]
break
}
case BlockEnum.DataSource: {
const payload = data as DataSourceNodeType
const mixVars = matchNotSystemVars(Object.keys(payload.datasource_parameters)?.filter(key => payload.datasource_parameters[key].type === ToolVarType.mixed).map(key => payload.datasource_parameters[key].value) as string[])
const vars = Object.keys(payload.datasource_parameters).filter(key => payload.datasource_parameters[key].type === ToolVarType.variable).map(key => payload.datasource_parameters[key].value as string) || []
res = [...(mixVars as ValueSelector[]), ...(vars as any)]
break
}
case BlockEnum.VariableAssigner: {
res = (data as VariableAssignerNodeType)?.variables
@@ -1357,6 +1364,30 @@ export const updateNodeVars = (oldNode: Node, oldVarSelector: ValueSelector, new
}
break
}
case BlockEnum.DataSource: {
const payload = data as DataSourceNodeType
const hasShouldRenameVar = Object.keys(payload.datasource_parameters)?.filter(key => payload.datasource_parameters[key].type !== ToolVarType.constant)
if (hasShouldRenameVar) {
Object.keys(payload.datasource_parameters).forEach((key) => {
const value = payload.datasource_parameters[key]
const { type } = value
if (type === ToolVarType.variable) {
payload.datasource_parameters[key] = {
...value,
value: newVarSelector,
}
}
if (type === ToolVarType.mixed) {
payload.datasource_parameters[key] = {
...value,
value: replaceOldVarInText(payload.datasource_parameters[key].value as string, oldVarSelector, newVarSelector),
}
}
})
}
break
}
case BlockEnum.VariableAssigner: {
const payload = data as VariableAssignerNodeType
if (payload.variables) {

View File

@@ -9,14 +9,18 @@ import {
RiMoreLine,
} from '@remixicon/react'
import produce from 'immer'
import { useReactFlow, useStoreApi } from 'reactflow'
import {
useNodes,
useReactFlow,
useStoreApi,
} from 'reactflow'
import RemoveButton from '../remove-button'
import useAvailableVarList from '../../hooks/use-available-var-list'
import VarReferencePopup from './var-reference-popup'
import { getNodeInfoById, isConversationVar, isENV, isRagVariableVar, isSystemVar, varTypeToStructType } from './utils'
import ConstantField from './constant-field'
import cn from '@/utils/classnames'
import type { Node, NodeOutPutVar, ValueSelector, Var } from '@/app/components/workflow/types'
import type { CommonNodeType, NodeOutPutVar, ValueSelector, Var } from '@/app/components/workflow/types'
import type { CredentialFormSchema } from '@/app/components/header/account-setting/model-provider-page/declarations'
import { BlockEnum } from '@/app/components/workflow/types'
import { VarBlockIcon } from '@/app/components/workflow/block-icon'
@@ -41,7 +45,7 @@ import { isExceptionVariable } from '@/app/components/workflow/utils'
import VarFullPathPanel from './var-full-path-panel'
import { noop } from 'lodash-es'
import { InputField } from '@/app/components/base/icons/src/vender/pipeline'
import { useStore as useWorkflowStore } from '@/app/components/workflow/store'
const TRIGGER_DEFAULT_WIDTH = 227
type Props = {
@@ -101,11 +105,8 @@ const VarReferencePicker: FC<Props> = ({
}) => {
const { t } = useTranslation()
const store = useStoreApi()
const {
getNodes,
} = store.getState()
const nodes = useNodes<CommonNodeType>()
const isChatMode = useIsChatMode()
const { getCurrentVariableType } = useWorkflowVariables()
const { availableVars, availableNodesWithParent: availableNodes } = useAvailableVarList(nodeId, {
onlyLeafNodeVar,
@@ -119,12 +120,13 @@ const VarReferencePicker: FC<Props> = ({
return node.data.type === BlockEnum.Start
})
const node = getNodes().find(n => n.id === nodeId)
const isInIteration = !!node?.data.isInIteration
const iterationNode = isInIteration ? getNodes().find(n => n.id === node.parentId) : null
const node = nodes.find(n => n.id === nodeId)
const ragPipelineVariables = useWorkflowStore(s => s.ragPipelineVariables)
const isInIteration = !!(node?.data as any).isInIteration
const iterationNode = isInIteration ? nodes.find(n => n.id === node?.parentId) : null
const isInLoop = !!node?.data.isInLoop
const loopNode = isInLoop ? getNodes().find(n => n.id === node.parentId) : null
const isInLoop = !!(node?.data as any).isInLoop
const loopNode = isInLoop ? nodes.find(n => n.id === node?.parentId) : null
const triggerRef = useRef<HTMLDivElement>(null)
const [triggerWidth, setTriggerWidth] = useState(TRIGGER_DEFAULT_WIDTH)
@@ -137,7 +139,10 @@ const VarReferencePicker: FC<Props> = ({
const [varKindType, setVarKindType] = useState<VarKindType>(defaultVarKindType)
const isConstant = isSupportConstantValue && varKindType === VarKindType.constant
const outputVars = useMemo(() => (passedInAvailableVars || availableVars), [passedInAvailableVars, availableVars])
const outputVars = useMemo(() => {
const results = passedInAvailableVars || availableVars
return results
}, [passedInAvailableVars, availableVars])
const [open, setOpen] = useState(false)
useEffect(() => {
@@ -268,7 +273,7 @@ const VarReferencePicker: FC<Props> = ({
}, [availableNodes, reactflow, store])
const type = getCurrentVariableType({
parentNode: isInIteration ? iterationNode : loopNode,
parentNode: (isInIteration ? iterationNode : loopNode) as any,
valueSelector: value as ValueSelector,
availableNodes,
isChatMode,

View File

@@ -4,7 +4,11 @@ import {
useWorkflow,
useWorkflowVariables,
} from '@/app/components/workflow/hooks'
import type { Node, ValueSelector, Var } from '@/app/components/workflow/types'
import type { NodeOutPutVar } from '@/app/components/workflow/types'
import { BlockEnum, type Node, type ValueSelector, type Var } from '@/app/components/workflow/types'
import { useStore as useWorkflowStore } from '@/app/components/workflow/store'
import { inputVarTypeToVarType } from '../../data-source/utils'
type Params = {
onlyLeafNodeVar?: boolean
hideEnv?: boolean
@@ -24,24 +28,49 @@ const useAvailableVarList = (nodeId: string, {
onlyLeafNodeVar: false,
filterVar: () => true,
}) => {
const { getTreeLeafNodes, getBeforeNodesInSameBranchIncludeParent } = useWorkflow()
const { getTreeLeafNodes, getNodeById, getBeforeNodesInSameBranchIncludeParent } = useWorkflow()
const { getNodeAvailableVars } = useWorkflowVariables()
const isChatMode = useIsChatMode()
const availableNodes = passedInAvailableNodes || (onlyLeafNodeVar ? getTreeLeafNodes(nodeId) : getBeforeNodesInSameBranchIncludeParent(nodeId))
const {
parentNode: iterationNode,
} = useNodeInfo(nodeId)
const availableVars = getNodeAvailableVars({
const currNode = getNodeById(nodeId)
const ragPipelineVariables = useWorkflowStore(s => s.ragPipelineVariables)
const isDataSourceNode = currNode?.data?.type === BlockEnum.DataSource
const dataSourceRagVars: NodeOutPutVar[] = []
if(isDataSourceNode) {
const ragVariablesInDataSource = ragPipelineVariables?.filter(ragVariable => ragVariable.belong_to_node_id === nodeId)
const filterVars = ragVariablesInDataSource?.filter(v => filterVar({
variable: v.variable,
type: inputVarTypeToVarType(v.type),
nodeId,
isRagVariable: true,
}, ['rag', nodeId, v.variable]))
if(filterVars?.length) {
dataSourceRagVars.push({
nodeId,
title: currNode.data?.title,
vars: filterVars.map((v) => {
return {
variable: `rag.${nodeId}.${v.variable}`,
type: inputVarTypeToVarType(v.type),
description: v.label,
isRagVariable: true,
} as Var
}),
})
}
}
const availableVars = [...getNodeAvailableVars({
parentNode: iterationNode,
beforeNodes: availableNodes,
isChatMode,
filterVar,
hideEnv,
hideChatVar,
})
}), ...dataSourceRagVars]
return {
availableVars,

View File

@@ -349,7 +349,7 @@ const translation = {
'addModalTitle': 'Add Input Field',
'editModalTitle': 'Edit Input Field',
'description': 'Setting for variable {{varName}}',
'fieldType': 'Field type',
'fieldType': 'Field Type',
'string': 'Short Text',
'text-input': 'Short Text',
'paragraph': 'Paragraph',
@@ -359,7 +359,7 @@ const translation = {
'multi-files': 'File List',
'notSet': 'Not set, try typing {{input}} in the prefix prompt',
'stringTitle': 'Form text box options',
'maxLength': 'Max length',
'maxLength': 'Max Length',
'options': 'Options',
'addOption': 'Add option',
'apiBasedVar': 'API-based Variable',
@@ -414,6 +414,7 @@ const translation = {
atLeastOneOption: 'At least one option is required',
optionRepeat: 'Has repeat options',
},
'startChecked': 'Start checked',
},
vision: {
name: 'Vision',

View File

@@ -51,7 +51,7 @@ const translation = {
},
testRun: {
title: 'Test Run',
tooltip: 'In Test Run mode, only one file upload at a time is allowed for easier debugging and observation.',
tooltip: 'In test run mode, only one document is allowed to be imported at a time for easier debugging and observation.',
steps: {
dataSource: 'Data Source',
documentProcessing: 'Document Processing',
@@ -106,6 +106,10 @@ const translation = {
documentSettings: {
title: 'Document Settings',
},
onlineDrive: {
notConnected: '{{name}} is not connected',
notConnectedTip: 'To sync with {{name}}, connection to {{name}} must be established first.',
},
}
export default translation

View File

@@ -252,7 +252,7 @@ const translation = {
'addModalTitle': 'Add Input Field',
'editModalTitle': 'Edit Input Field',
'description': 'Setting for variable {{varName}}',
'fieldType': 'Field type',
'fieldType': 'Field Type',
'string': 'Short Text',
'text-input': 'Short Text',
'paragraph': 'Paragraph',
@@ -260,7 +260,7 @@ const translation = {
'number': 'Number',
'notSet': 'Not set, try typing {{input}} in the prefix prompt',
'stringTitle': 'Form text box options',
'maxLength': 'Max length',
'maxLength': 'Max Length',
'options': 'Options',
'addOption': 'Add option',
'apiBasedVar': 'API-based Variable',

View File

@@ -408,6 +408,7 @@ const translation = {
atLeastOneOption: '至少需要一个选项',
optionRepeat: '选项不能重复',
},
'startChecked': '默认勾选',
},
vision: {
name: '视觉',

View File

@@ -51,7 +51,7 @@ const translation = {
},
testRun: {
title: '测试运行',
tooltip: '在测试运行模式下,每次只能上传一个文件,便于调试和观察。',
tooltip: '在测试运行模式下,每次只能导入一个文档,以便于调试和观察。',
steps: {
dataSource: '数据源',
documentProcessing: '文档处理',
@@ -106,6 +106,10 @@ const translation = {
documentSettings: {
title: '文档设置',
},
onlineDrive: {
notConnected: '{{name}} 未绑定',
notConnectedTip: '同步 {{name}} 内容前, 须先绑定 {{name}}。',
},
}
export default translation

View File

@@ -774,16 +774,7 @@ export type CreateDatasetResponse = {
updated_by: string
updated_at: number
pipeline_id: string
}
export type NotionPagePreviewRequest = {
workspaceID: string
pageID: string
pageType: string
}
export type NotionPagePreviewResponse = {
content: string
dataset_id: string
}
export type IndexingStatusBatchRequest = {

View File

@@ -11,6 +11,7 @@ export enum DatasourceType {
localFile = 'local_file',
onlineDocument = 'online_document',
websiteCrawl = 'website_crawl',
onlineDrive = 'online_drive',
}
export type PipelineTemplateListParams = {
@@ -254,3 +255,15 @@ export type PipelineExecutionLogResponse = {
input_data: Record<string, any>
datasource_node_id: string
}
export type OnlineDocumentPreviewRequest = {
workspaceID: string
pageID: string
pageType: string
pipelineId: string
datasourceNodeId: string
}
export type OnlineDocumentPreviewResponse = {
content: string
}

View File

@@ -251,3 +251,15 @@ export const useCreatePipelineDataset = (
...mutationOptions,
})
}
export const useCreatePipelineDatasetFromCustomized = (
mutationOptions: MutationOptions<CreateDatasetResponse, Error, CreateDatasetReq> = {},
) => {
return useMutation({
mutationKey: [NAME_SPACE, 'create-pipeline-dataset'],
mutationFn: (req: CreateDatasetReq) => {
return post<CreateDatasetResponse>('/rag/pipeline/dataset', { body: req })
},
...mutationOptions,
})
}

View File

@@ -6,8 +6,6 @@ import type {
DatasetListRequest,
IndexingStatusBatchRequest,
IndexingStatusBatchResponse,
NotionPagePreviewRequest,
NotionPagePreviewResponse,
ProcessRuleResponse,
RelatedAppResponse,
} from '@/models/datasets'
@@ -57,16 +55,6 @@ export const useDatasetRelatedApps = (datasetId: string) => {
})
}
export const usePreviewNotionPage = (params: NotionPagePreviewRequest) => {
const { workspaceID, pageID, pageType } = params
return useQuery({
queryKey: [NAME_SPACE, 'preview-notion-page'],
queryFn: () => get<NotionPagePreviewResponse>(`notion/workspaces/${workspaceID}/pages/${pageID}/${pageType}/preview`),
enabled: !!workspaceID && !!pageID && !!pageType,
staleTime: 0,
})
}
export const useIndexingStatusBatch = (
params: IndexingStatusBatchRequest,
mutationOptions: MutationOptions<IndexingStatusBatchResponse, Error> = {},

View File

@@ -127,6 +127,6 @@ export const useDocumentMetadata = (payload: {
})
}
export const useInvalidDocumentDetailKey = () => {
export const useInvalidDocumentDetail = () => {
return useInvalid(useDocumentDetailKey)
}

View File

@@ -1,12 +1,15 @@
import type { MutationOptions } from '@tanstack/react-query'
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
import { del, get, patch, post } from './base'
import { DatasourceType } from '@/models/pipeline'
import type {
DeleteTemplateResponse,
ExportTemplateDSLResponse,
ImportPipelineDSLConfirmResponse,
ImportPipelineDSLRequest,
ImportPipelineDSLResponse,
OnlineDocumentPreviewRequest,
OnlineDocumentPreviewResponse,
PipelineCheckDependenciesResponse,
PipelineExecutionLogRequest,
PipelineExecutionLogResponse,
@@ -324,3 +327,25 @@ export const usePipelineExecutionLog = (params: PipelineExecutionLogRequest) =>
staleTime: 0,
})
}
export const usePreviewOnlineDocument = () => {
return useMutation({
mutationKey: [NAME_SPACE, 'preview-online-document'],
mutationFn: (params: OnlineDocumentPreviewRequest) => {
const { pipelineId, datasourceNodeId, workspaceID, pageID, pageType } = params
return post<OnlineDocumentPreviewResponse>(
`/rag/pipelines/${pipelineId}/workflows/published/datasource/nodes/${datasourceNodeId}/preview`,
{
body: {
datasource_type: DatasourceType.onlineDocument,
inputs: {
workspace_id: workspaceID,
page_id: pageID,
type: pageType,
},
},
},
)
},
})
}