From c8bc091a586b9641ecee8597617deff8f2aace57 Mon Sep 17 00:00:00 2001 From: Archer <545436317@qq.com> Date: Fri, 6 Sep 2024 15:45:02 +0800 Subject: [PATCH] 4.8.10 perf (#2630) * perf: i18n init * i18n * fix: user select end status * fix: interactive workflow * fix: restart chat * fix: oauth login --- .../global/core/workflow/runtime/utils.ts | 14 ++- .../dispatch/agent/runTool/toolChoice.ts | 115 +++++++++--------- .../service/core/workflow/dispatch/index.ts | 38 ++++-- packages/web/styles/theme.ts | 12 +- projects/app/Dockerfile | 12 +- .../core/chat/ChatContainer/ChatBox/index.tsx | 43 ++++--- .../core/chat/ChatContainer/ChatBox/type.d.ts | 18 ++- .../core/chat/ChatContainer/ChatBox/utils.ts | 4 +- .../core/chat/ChatContainer/useChat.ts | 1 + .../core/chat/components/AIResponseBox.tsx | 3 +- .../app/src/pages/api/core/chat/chatTest.ts | 16 ++- .../app/detail/components/useChatTest.tsx | 10 +- projects/app/src/pages/login/provider.tsx | 9 +- projects/app/src/pages/login/sso.tsx | 9 +- 14 files changed, 172 insertions(+), 132 deletions(-) diff --git a/packages/global/core/workflow/runtime/utils.ts b/packages/global/core/workflow/runtime/utils.ts index f2e7ce6b1986..f8779207f414 100644 --- a/packages/global/core/workflow/runtime/utils.ts +++ b/packages/global/core/workflow/runtime/utils.ts @@ -40,9 +40,17 @@ export const getLastInteractiveValue = (histories: ChatItemType[]) => { const lastValue = lastAIMessage.value[lastAIMessage.value.length - 1]; if ( - lastValue && - lastValue.type === ChatItemValueTypeEnum.interactive && - !!lastValue.interactive + !lastValue || + lastValue.type !== ChatItemValueTypeEnum.interactive || + !lastValue.interactive + ) { + return null; + } + + // Check is user select + if ( + lastValue.interactive.type === 'userSelect' && + !lastValue.interactive.params.userSelectedVal ) { return lastValue.interactive; } diff --git a/packages/service/core/workflow/dispatch/agent/runTool/toolChoice.ts b/packages/service/core/workflow/dispatch/agent/runTool/toolChoice.ts index 8addf07f4948..a4d16afe080e 100644 --- a/packages/service/core/workflow/dispatch/agent/runTool/toolChoice.ts +++ b/packages/service/core/workflow/dispatch/agent/runTool/toolChoice.ts @@ -25,7 +25,7 @@ import { GPTMessages2Chats } from '@fastgpt/global/core/chat/adapt'; import { AIChatItemType } from '@fastgpt/global/core/chat/type'; import { updateToolInputValue } from './utils'; import { computedMaxToken, computedTemperature } from '../../../../ai/utils'; -import { sliceStrStartEnd } from '@fastgpt/global/common/string/tools'; +import { getNanoid, sliceStrStartEnd } from '@fastgpt/global/common/string/tools'; import { addLog } from '../../../../../common/system/log'; type ToolRunResponseType = { @@ -367,6 +367,7 @@ async function streamResponse({ }); let textAnswer = ''; + let callingTool: { name: string; arguments: string } | null = null; let toolCalls: ChatCompletionMessageToolCall[] = []; for await (const part of stream) { @@ -390,69 +391,71 @@ async function streamResponse({ }); } else if (responseChoice?.tool_calls?.[0]) { const toolCall: ChatCompletionMessageToolCall = responseChoice.tool_calls[0]; - // In a stream response, only one tool is returned at a time. If have id, description is executing a tool - if (toolCall.id) { - const toolNode = toolNodes.find((item) => item.nodeId === toolCall.function?.name); + if (toolCall.id || callingTool) { + // Start call tool + if (toolCall.id) { + callingTool = { + name: toolCall.function.name || '', + arguments: toolCall.function.arguments || '' + }; + } else if (callingTool) { + // Continue call + callingTool.name += toolCall.function.name || ''; + callingTool.arguments += toolCall.function.arguments || ''; + } + + const toolFunction = callingTool!; + + const toolNode = toolNodes.find((item) => item.nodeId === toolFunction.name); if (toolNode) { - if (toolCall.function?.arguments === undefined) { - toolCall.function.arguments = ''; - } + // New tool, add to list. + const toolId = getNanoid(); + toolCalls.push({ + ...toolCall, + id: toolId, + function: toolFunction, + toolName: toolNode.name, + toolAvatar: toolNode.avatar + }); - // Get last tool call - const lastToolCall = toolCalls[toolCalls.length - 1]; - - // new tool - if (lastToolCall?.id !== toolCall.id) { - toolCalls.push({ - ...toolCall, - toolName: toolNode.name, - toolAvatar: toolNode.avatar - }); - - workflowStreamResponse?.({ - event: SseResponseEventEnum.toolCall, - data: { - tool: { - id: toolCall.id, - toolName: toolNode.name, - toolAvatar: toolNode.avatar, - functionName: toolCall.function.name, - params: toolCall.function.arguments, - response: '' - } + workflowStreamResponse?.({ + event: SseResponseEventEnum.toolCall, + data: { + tool: { + id: toolId, + toolName: toolNode.name, + toolAvatar: toolNode.avatar, + functionName: toolFunction.name, + params: toolFunction?.arguments ?? '', + response: '' } - }); - - continue; - } - // last tool, update params - } else { - continue; + } + }); + callingTool = null; } - } - - /* arg 插入最后一个工具的参数里 */ - const arg: string = toolCall?.function?.arguments ?? ''; - const currentTool = toolCalls[toolCalls.length - 1]; - - if (currentTool) { - currentTool.function.arguments += arg; + } else { + /* arg 插入最后一个工具的参数里 */ + const arg: string = toolCall?.function?.arguments ?? ''; + const currentTool = toolCalls[toolCalls.length - 1]; + if (currentTool && arg) { + currentTool.function.arguments += arg; - workflowStreamResponse?.({ - write, - event: SseResponseEventEnum.toolParams, - data: { - tool: { - id: currentTool.id, - toolName: '', - toolAvatar: '', - params: arg, - response: '' + workflowStreamResponse?.({ + write, + event: SseResponseEventEnum.toolParams, + data: { + tool: { + id: currentTool.id, + toolName: '', + toolAvatar: '', + params: arg, + response: '' + } } - } - }); + }); + } } } } diff --git a/packages/service/core/workflow/dispatch/index.ts b/packages/service/core/workflow/dispatch/index.ts index f864d8de8db1..ff27de77a374 100644 --- a/packages/service/core/workflow/dispatch/index.ts +++ b/packages/service/core/workflow/dispatch/index.ts @@ -137,6 +137,13 @@ export async function dispatchWorkFlow(data: Props): Promise self.findIndex((t) => t.nodeId === node.nodeId) === index ); - // In the current version, only one interactive node is allowed at the same time - const interactiveResponse = nodeRunResult.result?.[DispatchNodeResponseKeyEnum.interactive]; - if (interactiveResponse) { - chatAssistantResponse.push( - handleInteractiveResult({ - entryNodeIds: [nodeRunResult.node.nodeId], - interactiveResponse - }) - ); - return []; - } - // Run next nodes(先运行 run 的,再运行 skip 的) const nextStepActiveNodesResults = ( await Promise.all(nextStepActiveNodes.map((node) => checkNodeCanRun(node))) @@ -543,6 +548,15 @@ export async function dispatchWorkFlow(data: Props): Promise void; }; -/* - The input is divided into sections - 1. text - 2. img - 3. file - 4. .... -*/ - const ChatBox = ( { feedbackType = FeedbackTypeEnum.hidden, @@ -377,7 +373,13 @@ const ChatBox = ( * user confirm send prompt */ const sendPrompt: SendPromptFnType = useCallback( - ({ text = '', files = [], history = chatHistories, autoTTSResponse = false }) => { + ({ + text = '', + files = [], + history = chatHistories, + autoTTSResponse = false, + isInteractivePrompt = false + }) => { variablesForm.handleSubmit( async (variables) => { if (!onStartChat) return; @@ -444,6 +446,7 @@ const ChatBox = ( ] as UserChatItemValueItemType[], status: 'finish' }, + // 普通 chat 模式,需要增加一个 AI 来接收响应消息 { dataId: responseChatId, obj: ChatRoleEnum.AI, @@ -459,20 +462,26 @@ const ChatBox = ( } ]; - const isInteractive = checkIsInteractiveByHistories(history); // Update histories(Interactive input does not require new session rounds) - setChatHistories(isInteractive ? newChatList.slice(0, -2) : newChatList); + setChatHistories( + isInteractivePrompt + ? // 把交互的结果存储到对话记录中,交互模式下,不需要新的会话轮次 + setUserSelectResultToHistories(newChatList.slice(0, -2), text) + : newChatList + ); // 清空输入内容 resetInputVal({}); setQuestionGuide([]); scrollToBottom('smooth', 100); + try { // create abort obj const abortSignal = new AbortController(); chatController.current = abortSignal; - // Last empty ai message will be removed + // 最后一条 AI 消息是空的,会被过滤掉,这里得到的 messages,不会包含最后一条 AI 消息,所以不需要 slice 了。 + // 这里,无论是否为交互模式,最后都是 Human 的消息。 const messages = chats2GPTMessages({ messages: newChatList, reserveId: true }); const { @@ -480,7 +489,7 @@ const ChatBox = ( responseText, isNewChat = false } = await onStartChat({ - messages: messages, + messages, // 保证最后一条是 Human 的消息 responseChatItemId: responseChatId, controller: abortSignal, generatingMessage: (e) => generatingMessage({ ...e, autoTTSResponse }), @@ -847,12 +856,6 @@ const ChatBox = ( abortRequest(); setValue('chatStarted', false); scrollToBottom('smooth', 500); - }, - scrollToBottom, - sendPrompt: (question: string) => { - sendPrompt({ - text: question - }); } })); diff --git a/projects/app/src/components/core/chat/ChatContainer/ChatBox/type.d.ts b/projects/app/src/components/core/chat/ChatContainer/ChatBox/type.d.ts index d1be5df52f88..3f7398c5f328 100644 --- a/projects/app/src/components/core/chat/ChatContainer/ChatBox/type.d.ts +++ b/projects/app/src/components/core/chat/ChatContainer/ChatBox/type.d.ts @@ -27,20 +27,16 @@ export type ChatBoxInputFormType = { export type ChatBoxInputType = { text?: string; files?: UserInputFileItemType[]; + isInteractivePrompt?: boolean; }; -export type SendPromptFnType = ({ - text, - files, - history, - autoTTSResponse -}: ChatBoxInputType & { - autoTTSResponse?: boolean; - history?: ChatSiteItemType[]; -}) => void; +export type SendPromptFnType = ( + e: ChatBoxInputType & { + autoTTSResponse?: boolean; + history?: ChatSiteItemType[]; + } +) => void; export type ComponentRef = { restartChat: () => void; - scrollToBottom: (behavior?: 'smooth' | 'auto') => void; - sendPrompt: (question: string) => void; }; diff --git a/projects/app/src/components/core/chat/ChatContainer/ChatBox/utils.ts b/projects/app/src/components/core/chat/ChatContainer/ChatBox/utils.ts index b7e6160c5486..88f12932b8b8 100644 --- a/projects/app/src/components/core/chat/ChatContainer/ChatBox/utils.ts +++ b/projects/app/src/components/core/chat/ChatContainer/ChatBox/utils.ts @@ -52,7 +52,9 @@ export const checkIsInteractiveByHistories = (chatHistories: ChatSiteItemType[]) return ( lastMessageValue.type === ChatItemValueTypeEnum.interactive && - !!lastMessageValue?.interactive?.params + !!lastMessageValue?.interactive?.params && + // 如果用户选择了,则不认为是交互模式(可能是上一轮以交互结尾,发起的新的一轮对话) + !lastMessageValue?.interactive?.params?.userSelectedVal ); }; diff --git a/projects/app/src/components/core/chat/ChatContainer/useChat.ts b/projects/app/src/components/core/chat/ChatContainer/useChat.ts index a7631937355a..124aebc56153 100644 --- a/projects/app/src/components/core/chat/ChatContainer/useChat.ts +++ b/projects/app/src/components/core/chat/ChatContainer/useChat.ts @@ -49,6 +49,7 @@ export const useChat = () => { ChatBoxRef.current?.restartChat?.(); }, [variablesForm]); + return { ChatBoxRef, chatRecords, diff --git a/projects/app/src/components/core/chat/components/AIResponseBox.tsx b/projects/app/src/components/core/chat/components/AIResponseBox.tsx index 4f59b232c9af..f4a08ea67a6e 100644 --- a/projects/app/src/components/core/chat/components/AIResponseBox.tsx +++ b/projects/app/src/components/core/chat/components/AIResponseBox.tsx @@ -22,7 +22,6 @@ import Avatar from '@fastgpt/web/components/common/Avatar'; import { SendPromptFnType } from '../ChatContainer/ChatBox/type'; import { useContextSelector } from 'use-context-selector'; import { ChatBoxContext } from '../ChatContainer/ChatBox/Provider'; -import { setUserSelectResultToHistories } from '../ChatContainer/ChatBox/utils'; import { InteractiveNodeResponseItemType } from '@fastgpt/global/core/workflow/template/system/userSelect/type'; import { isEqual } from 'lodash'; @@ -167,7 +166,7 @@ const RenderInteractive = React.memo( onClick={() => { onSendMessage?.({ text: option.value, - history: setUserSelectResultToHistories(chatHistories, option.value) + isInteractivePrompt: true }); }} > diff --git a/projects/app/src/pages/api/core/chat/chatTest.ts b/projects/app/src/pages/api/core/chat/chatTest.ts index 59092c8c030f..b57566376b08 100644 --- a/projects/app/src/pages/api/core/chat/chatTest.ts +++ b/projects/app/src/pages/api/core/chat/chatTest.ts @@ -57,12 +57,17 @@ async function handler(req: NextApiRequest, res: NextApiResponse) { chatConfig } = req.body as Props; try { + if (!Array.isArray(nodes)) { + throw new Error('Nodes is not array'); + } + if (!Array.isArray(edges)) { + throw new Error('Edges is not array'); + } const chatMessages = GPTMessages2Chats(messages); + const userInput = chatMessages.pop()?.value as UserChatItemValueItemType[] | undefined; // console.log(JSON.stringify(chatMessages, null, 2), '====', chatMessages.length); - const userInput = chatMessages.pop()?.value as UserChatItemValueItemType[] | undefined; - /* user auth */ const [{ app }, { teamId, tmbId }] = await Promise.all([ authApp({ req, authToken: true, appId, per: ReadPermissionVal }), @@ -76,13 +81,6 @@ async function handler(req: NextApiRequest, res: NextApiResponse) { const isPlugin = app.type === AppTypeEnum.plugin; - if (!Array.isArray(nodes)) { - throw new Error('Nodes is not array'); - } - if (!Array.isArray(edges)) { - throw new Error('Edges is not array'); - } - let runtimeNodes = storeNodes2RuntimeNodes(nodes, getWorkflowEntryNodeIds(nodes, chatMessages)); // Plugin need to replace inputs diff --git a/projects/app/src/pages/app/detail/components/useChatTest.tsx b/projects/app/src/pages/app/detail/components/useChatTest.tsx index 7506ceeb2829..d7ca47d5bcab 100644 --- a/projects/app/src/pages/app/detail/components/useChatTest.tsx +++ b/projects/app/src/pages/app/detail/components/useChatTest.tsx @@ -1,5 +1,5 @@ import { useUserStore } from '@/web/support/user/useUserStore'; -import React from 'react'; +import React, { useMemo } from 'react'; import type { StartChatFnProps } from '@/components/core/chat/ChatContainer/type'; import { streamFetch } from '@/web/common/api/fetch'; import { getMaxHistoryLimitFromNodes } from '@fastgpt/global/core/workflow/runtime/utils'; @@ -14,9 +14,9 @@ import dynamic from 'next/dynamic'; import { useChat } from '@/components/core/chat/ChatContainer/useChat'; import { Box } from '@chakra-ui/react'; import { AppChatConfigType } from '@fastgpt/global/core/app/type'; +import ChatBox from '@/components/core/chat/ChatContainer/ChatBox'; const PluginRunBox = dynamic(() => import('@/components/core/chat/ChatContainer/PluginRunBox')); -const ChatBox = dynamic(() => import('@/components/core/chat/ChatContainer/ChatBox')); export const useChatTest = ({ nodes, @@ -56,8 +56,10 @@ export const useChatTest = ({ } ); - const pluginInputs = - nodes.find((node) => node.flowNodeType === FlowNodeTypeEnum.pluginInput)?.inputs || []; + const pluginInputs = useMemo(() => { + return nodes.find((node) => node.flowNodeType === FlowNodeTypeEnum.pluginInput)?.inputs || []; + }, [nodes]); + const { ChatBoxRef, chatRecords, diff --git a/projects/app/src/pages/login/provider.tsx b/projects/app/src/pages/login/provider.tsx index eeca4618d410..20871c9c784f 100644 --- a/projects/app/src/pages/login/provider.tsx +++ b/projects/app/src/pages/login/provider.tsx @@ -11,7 +11,6 @@ import Loading from '@fastgpt/web/components/common/MyLoading'; import { serviceSideProps } from '@/web/common/utils/i18n'; import { getErrText } from '@fastgpt/global/common/error/utils'; import { useTranslation } from 'next-i18next'; -import { useMount } from 'ahooks'; const provider = () => { const { t } = useTranslation(); @@ -104,9 +103,15 @@ const provider = () => { } else { authCode(code); } - }, [code, error, loginStore, state]); + }, []); return ; }; export default provider; + +export async function getServerSideProps(context: any) { + return { + props: { ...(await serviceSideProps(context)) } + }; +} diff --git a/projects/app/src/pages/login/sso.tsx b/projects/app/src/pages/login/sso.tsx index 50b5eef770f9..3107b514dbea 100644 --- a/projects/app/src/pages/login/sso.tsx +++ b/projects/app/src/pages/login/sso.tsx @@ -8,6 +8,7 @@ import { ssoLogin } from '@/web/support/user/api'; import Loading from '@fastgpt/web/components/common/MyLoading'; import { useTranslation } from 'next-i18next'; import { useRequest2 } from '@fastgpt/web/hooks/useRequest'; +import { serviceSideProps } from '@/web/common/utils/i18n'; const provider = () => { const { t } = useTranslation(); @@ -39,9 +40,15 @@ const provider = () => { clearToken(); handleSSO(); } - }, [handleSSO, query]); + }, []); return ; }; export default provider; + +export async function getServerSideProps(context: any) { + return { + props: { ...(await serviceSideProps(context)) } + }; +}