mirror of
https://github.com/langgenius/dify.git
synced 2026-01-17 04:20:01 +00:00
Compare commits
6 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8e837dde1a | ||
|
|
9ae91a2ec3 | ||
|
|
276d3d10a0 | ||
|
|
f13623184a | ||
|
|
ef61e1487f | ||
|
|
701e2b334f |
@@ -100,7 +100,7 @@ class Config:
|
||||
self.CONSOLE_URL = get_env('CONSOLE_URL')
|
||||
self.API_URL = get_env('API_URL')
|
||||
self.APP_URL = get_env('APP_URL')
|
||||
self.CURRENT_VERSION = "0.3.17"
|
||||
self.CURRENT_VERSION = "0.3.18"
|
||||
self.COMMIT_SHA = get_env('COMMIT_SHA')
|
||||
self.EDITION = "SELF_HOSTED"
|
||||
self.DEPLOY_ENV = get_env('DEPLOY_ENV')
|
||||
|
||||
@@ -2,7 +2,6 @@ import json
|
||||
from typing import Type
|
||||
|
||||
import requests
|
||||
from xinference.client import RESTfulGenerateModelHandle, RESTfulChatModelHandle, RESTfulChatglmCppChatModelHandle
|
||||
|
||||
from core.helper import encrypter
|
||||
from core.model_providers.models.embedding.xinference_embedding import XinferenceEmbedding
|
||||
@@ -73,7 +72,7 @@ class XinferenceProvider(BaseModelProvider):
|
||||
top_p=KwargRule[float](min=0, max=1, default=0.7),
|
||||
presence_penalty=KwargRule[float](enabled=False),
|
||||
frequency_penalty=KwargRule[float](enabled=False),
|
||||
max_tokens=KwargRule[int](alias='max_new_tokens', min=10, max=4000, default=256),
|
||||
max_tokens=KwargRule[int](min=10, max=4000, default=256),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"human_prefix": "用户",
|
||||
"assistant_prefix": "助手",
|
||||
"context_prompt": "用户在与一个客观的助手对话。助手会尊重找到的材料,给出全面专业的解释,但不会过度演绎。同时回答中不会暴露引用的材料:\n\n```\n引用材料\n{{context}}\n```\n\n",
|
||||
"context_prompt": "用户在与一个客观的助手对话。助手会尊重找到的材料,给出全面专业的解释,但不会过度演绎。同时回答中不会暴露引用的材料:\n\n```\n{{context}}\n```\n\n",
|
||||
"histories_prompt": "用户和助手的历史对话内容如下:\n```\n{{histories}}\n```\n\n",
|
||||
"system_prompt_orders": [
|
||||
"context_prompt",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"context_prompt": "用户在与一个客观的助手对话。助手会尊重找到的材料,给出全面专业的解释,但不会过度演绎。同时回答中不会暴露引用的材料:\n\n```\n引用材料\n{{context}}\n```\n",
|
||||
"context_prompt": "用户在与一个客观的助手对话。助手会尊重找到的材料,给出全面专业的解释,但不会过度演绎。同时回答中不会暴露引用的材料:\n\n```\n{{context}}\n```\n",
|
||||
"system_prompt_orders": [
|
||||
"context_prompt",
|
||||
"pre_prompt"
|
||||
|
||||
@@ -9,11 +9,11 @@ from xinference.client import RESTfulChatglmCppChatModelHandle, \
|
||||
|
||||
class XinferenceLLM(Xinference):
|
||||
def _call(
|
||||
self,
|
||||
prompt: str,
|
||||
stop: Optional[List[str]] = None,
|
||||
run_manager: Optional[CallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
self,
|
||||
prompt: str,
|
||||
stop: Optional[List[str]] = None,
|
||||
run_manager: Optional[CallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> str:
|
||||
"""Call the xinference model and return the output.
|
||||
|
||||
@@ -56,10 +56,10 @@ class XinferenceLLM(Xinference):
|
||||
if generate_config and generate_config.get("stream"):
|
||||
combined_text_output = ""
|
||||
for token in self._stream_generate(
|
||||
model=model,
|
||||
prompt=prompt,
|
||||
run_manager=run_manager,
|
||||
generate_config=generate_config,
|
||||
model=model,
|
||||
prompt=prompt,
|
||||
run_manager=run_manager,
|
||||
generate_config=generate_config,
|
||||
):
|
||||
combined_text_output += token
|
||||
return combined_text_output
|
||||
@@ -73,10 +73,10 @@ class XinferenceLLM(Xinference):
|
||||
if generate_config and generate_config.get("stream"):
|
||||
combined_text_output = ""
|
||||
for token in self._stream_generate(
|
||||
model=model,
|
||||
prompt=prompt,
|
||||
run_manager=run_manager,
|
||||
generate_config=generate_config,
|
||||
model=model,
|
||||
prompt=prompt,
|
||||
run_manager=run_manager,
|
||||
generate_config=generate_config,
|
||||
):
|
||||
combined_text_output += token
|
||||
completion = combined_text_output
|
||||
@@ -89,13 +89,13 @@ class XinferenceLLM(Xinference):
|
||||
|
||||
return completion
|
||||
|
||||
|
||||
def _stream_generate(
|
||||
self,
|
||||
model: Union["RESTfulGenerateModelHandle", "RESTfulChatModelHandle", "RESTfulChatglmCppChatModelHandle"],
|
||||
prompt: str,
|
||||
run_manager: Optional[CallbackManagerForLLMRun] = None,
|
||||
generate_config: Optional[Union["LlamaCppGenerateConfig", "PytorchGenerateConfig", "ChatglmCppGenerateConfig"]] = None,
|
||||
self,
|
||||
model: Union["RESTfulGenerateModelHandle", "RESTfulChatModelHandle", "RESTfulChatglmCppChatModelHandle"],
|
||||
prompt: str,
|
||||
run_manager: Optional[CallbackManagerForLLMRun] = None,
|
||||
generate_config: Optional[
|
||||
Union["LlamaCppGenerateConfig", "PytorchGenerateConfig", "ChatglmCppGenerateConfig"]] = None,
|
||||
) -> Generator[str, None, None]:
|
||||
"""
|
||||
Args:
|
||||
@@ -123,6 +123,10 @@ class XinferenceLLM(Xinference):
|
||||
if choices:
|
||||
choice = choices[0]
|
||||
if isinstance(choice, dict):
|
||||
if 'finish_reason' in choice and choice['finish_reason'] \
|
||||
and choice['finish_reason'] in ['stop', 'length']:
|
||||
break
|
||||
|
||||
if 'text' in choice:
|
||||
token = choice.get("text", "")
|
||||
elif 'delta' in choice and 'content' in choice['delta']:
|
||||
|
||||
@@ -49,4 +49,5 @@ huggingface_hub~=0.16.4
|
||||
transformers~=4.31.0
|
||||
stripe~=5.5.0
|
||||
pandas==1.5.3
|
||||
xinference==0.2.1
|
||||
xinference==0.2.1
|
||||
safetensors==0.3.2
|
||||
@@ -2,7 +2,7 @@ version: '3.1'
|
||||
services:
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:0.3.17
|
||||
image: langgenius/dify-api:0.3.18
|
||||
restart: always
|
||||
environment:
|
||||
# Startup mode, 'api' starts the API server.
|
||||
@@ -124,7 +124,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing the queue.
|
||||
worker:
|
||||
image: langgenius/dify-api:0.3.17
|
||||
image: langgenius/dify-api:0.3.18
|
||||
restart: always
|
||||
environment:
|
||||
# Startup mode, 'worker' starts the Celery worker for processing the queue.
|
||||
@@ -176,7 +176,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:0.3.17
|
||||
image: langgenius/dify-web:0.3.18
|
||||
restart: always
|
||||
environment:
|
||||
EDITION: SELF_HOSTED
|
||||
|
||||
@@ -1,15 +1,14 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { useRouter, useSearchParams } from 'next/navigation'
|
||||
import useSWRInfinite from 'swr/infinite'
|
||||
import { debounce } from 'lodash-es'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import AppCard from './AppCard'
|
||||
import NewAppCard from './NewAppCard'
|
||||
import type { AppListResponse } from '@/models/app'
|
||||
import { fetchAppList } from '@/service/apps'
|
||||
import { useAppContext, useSelector } from '@/context/app-context'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
|
||||
import { ProviderEnum } from '@/app/components/header/account-setting/model-page/declarations'
|
||||
import Confirm from '@/app/components/base/confirm/common'
|
||||
@@ -24,15 +23,18 @@ const Apps = () => {
|
||||
const { t } = useTranslation()
|
||||
const { isCurrentWorkspaceManager } = useAppContext()
|
||||
const { data, isLoading, setSize, mutate } = useSWRInfinite(getKey, fetchAppList, { revalidateFirstPage: false })
|
||||
const loadingStateRef = useRef(false)
|
||||
const pageContainerRef = useSelector(state => state.pageContainerRef)
|
||||
const anchorRef = useRef<HTMLAnchorElement>(null)
|
||||
const anchorRef = useRef<HTMLDivElement>(null)
|
||||
const searchParams = useSearchParams()
|
||||
const router = useRouter()
|
||||
const payProviderName = searchParams.get('provider_name')
|
||||
const payStatus = searchParams.get('payment_result')
|
||||
const [showPayStatusModal, setShowPayStatusModal] = useState(false)
|
||||
|
||||
const handleCancelShowPayStatusModal = useCallback(() => {
|
||||
setShowPayStatusModal(false)
|
||||
router.replace('/', { forceOptimisticNavigation: false })
|
||||
}, [router])
|
||||
|
||||
useEffect(() => {
|
||||
document.title = `${t('app.title')} - Dify`
|
||||
if (localStorage.getItem(NEED_REFRESH_APP_LIST_KEY) === '1') {
|
||||
@@ -41,35 +43,24 @@ const Apps = () => {
|
||||
}
|
||||
if (payProviderName === ProviderEnum.anthropic && (payStatus === 'succeeded' || payStatus === 'cancelled'))
|
||||
setShowPayStatusModal(true)
|
||||
}, [])
|
||||
}, [mutate, payProviderName, payStatus, t])
|
||||
|
||||
useEffect(() => {
|
||||
loadingStateRef.current = isLoading
|
||||
}, [isLoading])
|
||||
|
||||
useEffect(() => {
|
||||
const onScroll = debounce(() => {
|
||||
if (!loadingStateRef.current) {
|
||||
const { scrollTop, clientHeight } = pageContainerRef.current!
|
||||
const anchorOffset = anchorRef.current!.offsetTop
|
||||
if (anchorOffset - scrollTop - clientHeight < 100)
|
||||
let observer: IntersectionObserver | undefined
|
||||
if (anchorRef.current) {
|
||||
observer = new IntersectionObserver((entries) => {
|
||||
if (entries[0].isIntersecting)
|
||||
setSize(size => size + 1)
|
||||
}
|
||||
}, 50)
|
||||
|
||||
pageContainerRef.current?.addEventListener('scroll', onScroll)
|
||||
return () => pageContainerRef.current?.removeEventListener('scroll', onScroll)
|
||||
}, [])
|
||||
|
||||
const handleCancelShowPayStatusModal = () => {
|
||||
setShowPayStatusModal(false)
|
||||
router.replace('/', { forceOptimisticNavigation: false })
|
||||
}
|
||||
}, { rootMargin: '100px' })
|
||||
observer.observe(anchorRef.current)
|
||||
}
|
||||
return () => observer?.disconnect()
|
||||
}, [isLoading, setSize, anchorRef, mutate])
|
||||
|
||||
return (
|
||||
<nav className='grid content-start grid-cols-1 gap-4 px-12 pt-8 sm:grid-cols-2 md:grid-cols-3 lg:grid-cols-4 grow shrink-0'>
|
||||
<><nav className='grid content-start grid-cols-1 gap-4 px-12 pt-8 sm:grid-cols-2 md:grid-cols-3 lg:grid-cols-4 grow shrink-0'>
|
||||
{ isCurrentWorkspaceManager
|
||||
&& <NewAppCard ref={anchorRef} onSuccess={mutate} />}
|
||||
&& <NewAppCard onSuccess={mutate} />}
|
||||
{data?.map(({ data: apps }) => apps.map(app => (
|
||||
<AppCard key={app.id} app={app} onRefresh={mutate} />
|
||||
)))}
|
||||
@@ -95,6 +86,8 @@ const Apps = () => {
|
||||
)
|
||||
}
|
||||
</nav>
|
||||
<div ref={anchorRef} className='h-0'> </div>
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import React from "react";
|
||||
import React from 'react'
|
||||
import type { FC } from 'react'
|
||||
import GA, { GaType } from '@/app/components/base/ga'
|
||||
|
||||
@@ -6,13 +6,11 @@ const Layout: FC<{
|
||||
children: React.ReactNode
|
||||
}> = ({ children }) => {
|
||||
return (
|
||||
<div className="overflow-x-auto">
|
||||
<div className="w-screen h-screen min-w-[300px]">
|
||||
<GA gaType={GaType.webapp} />
|
||||
{children}
|
||||
</div>
|
||||
<div className="min-w-[300px]">
|
||||
<GA gaType={GaType.webapp} />
|
||||
{children}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default Layout
|
||||
export default Layout
|
||||
|
||||
@@ -574,7 +574,7 @@ const Main: FC<IMainProps> = ({
|
||||
return <Loading type='app' />
|
||||
|
||||
return (
|
||||
<div className='bg-gray-100'>
|
||||
<div className='bg-gray-100 flex w-full h-full'>
|
||||
{!isInstalledApp && (
|
||||
<Header
|
||||
title={siteInfo.title}
|
||||
@@ -588,7 +588,7 @@ const Main: FC<IMainProps> = ({
|
||||
|
||||
<div
|
||||
className={cn(
|
||||
'flex rounded-t-2xl bg-white overflow-hidden',
|
||||
'flex rounded-t-2xl bg-white overflow-hidden h-full w-full',
|
||||
isInstalledApp && 'rounded-b-2xl',
|
||||
)}
|
||||
style={isInstalledApp
|
||||
@@ -611,7 +611,7 @@ const Main: FC<IMainProps> = ({
|
||||
)}
|
||||
{/* main */}
|
||||
<div className={cn(
|
||||
isInstalledApp ? s.installedApp : 'h-[calc(100vh_-_3rem)]',
|
||||
isInstalledApp ? s.installedApp : '',
|
||||
'flex-grow flex flex-col overflow-y-auto',
|
||||
)
|
||||
}>
|
||||
|
||||
@@ -85,7 +85,7 @@ const Sidebar: FC<ISidebarProps> = ({
|
||||
<div
|
||||
className={
|
||||
cn(
|
||||
(isInstalledApp || isUniversalChat) ? 'tablet:h-[calc(100vh_-_74px)]' : 'tablet:h-[calc(100vh_-_3rem)]',
|
||||
(isInstalledApp || isUniversalChat) ? 'tablet:h-[calc(100vh_-_74px)]' : '',
|
||||
'shrink-0 flex flex-col bg-white pc:w-[244px] tablet:w-[192px] mobile:w-[240px] border-r border-gray-200 mobile:h-screen',
|
||||
)
|
||||
}
|
||||
|
||||
@@ -510,7 +510,7 @@ const Main: FC<IMainProps> = ({
|
||||
|
||||
<div className={'flex bg-white overflow-hidden'}>
|
||||
<div className={cn(
|
||||
isInstalledApp ? s.installedApp : 'h-[calc(100vh_-_3rem)]',
|
||||
isInstalledApp ? s.installedApp : '',
|
||||
'flex-grow flex flex-col overflow-y-auto',
|
||||
)
|
||||
}>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "dify-web",
|
||||
"version": "0.3.17",
|
||||
"version": "0.3.18",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
|
||||
Reference in New Issue
Block a user