Skip to content

Commit

Permalink
fix: userselect chatId unrefresh
Browse files Browse the repository at this point in the history
  • Loading branch information
c121914yu committed Sep 11, 2024
1 parent ec1ccfc commit 12f3dba
Show file tree
Hide file tree
Showing 7 changed files with 57 additions and 105 deletions.
1 change: 1 addition & 0 deletions docSite/content/zh-cn/docs/development/upgrading/4811.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,3 +25,4 @@ weight: 813
8. 优化 - 工作流嵌套层级限制 20 层,避免因编排不合理导致的无限死循环。
9. 优化 - 工作流 handler 性能优化。
10. 修复 - 知识库选择权限问题。
11. 修复 - 空 chatId 发起对话,首轮携带用户选择时会异常。
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,11 @@ import { formatChatValue2InputType } from '../utils';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { ChatBoxContext } from '../Provider';
import { useContextSelector } from 'use-context-selector';
import { SendPromptFnType } from '../type';

export type ChatControllerProps = {
isLastChild: boolean;
chat: ChatSiteItemType;
showVoiceIcon?: boolean;
onSendMessage: SendPromptFnType;
onRetry?: () => void;
onDelete?: () => void;
onMark?: () => void;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ import { useCopyData } from '@/web/common/hooks/useCopyData';
import MyIcon from '@fastgpt/web/components/common/Icon';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import { useTranslation } from 'next-i18next';
import { SendPromptFnType } from '../type';
import { AIChatItemValueItemType, ChatItemValueItemType } from '@fastgpt/global/core/chat/type';
import { CodeClassNameEnum } from '@/components/Markdown/utils';
import { isEqual } from 'lodash';
Expand Down Expand Up @@ -51,7 +50,6 @@ type BasicProps = {

type Props = BasicProps & {
type: ChatRoleEnum.Human | ChatRoleEnum.AI;
onSendMessage: SendPromptFnType;
};

const RenderQuestionGuide = ({ questionGuides }: { questionGuides: string[] }) => {
Expand Down Expand Up @@ -80,14 +78,12 @@ const AIContentCard = React.memo(function AIContentCard({
dataId,
isLastChild,
isChatting,
onSendMessage,
questionGuides
}: {
dataId: string;
chatValue: ChatItemValueItemType[];
isLastChild: boolean;
isChatting: boolean;
onSendMessage: SendPromptFnType;
questionGuides: string[];
}) {
return (
Expand All @@ -101,7 +97,6 @@ const AIContentCard = React.memo(function AIContentCard({
value={value}
isLastChild={isLastChild && i === chatValue.length - 1}
isChatting={isChatting}
onSendMessage={onSendMessage}
/>
);
})}
Expand All @@ -113,16 +108,7 @@ const AIContentCard = React.memo(function AIContentCard({
});

const ChatItem = (props: Props) => {
const {
type,
avatar,
statusBoxData,
children,
isLastChild,
questionGuides = [],
onSendMessage,
chat
} = props;
const { type, avatar, statusBoxData, children, isLastChild, questionGuides = [], chat } = props;

const styleMap: BoxProps =
type === ChatRoleEnum.Human
Expand Down Expand Up @@ -270,7 +256,6 @@ const ChatItem = (props: Props) => {
dataId={chat.dataId}
isLastChild={isLastChild && i === splitAiResponseResults.length - 1}
isChatting={isChatting}
onSendMessage={onSendMessage}
questionGuides={questionGuides}
/>
)}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ import dynamic from 'next/dynamic';
import type { StreamResponseType } from '@/web/common/api/fetch';
import { useContextSelector } from 'use-context-selector';
import { useSystem } from '@fastgpt/web/hooks/useSystem';
import { useCreation, useMemoizedFn, useThrottleFn, useTrackedEffect } from 'ahooks';
import { useCreation, useMemoizedFn, useThrottleFn } from 'ahooks';
import MyIcon from '@fastgpt/web/components/common/Icon';

const ResponseTags = dynamic(() => import('./components/ResponseTags'));
Expand Down Expand Up @@ -832,12 +832,10 @@ const ChatBox = (
};
window.addEventListener('message', windowMessage);

eventBus.on(EventNameEnum.sendQuestion, ({ text }: { text: string }) => {
if (!text) return;
sendPrompt({
text
});
});
const fn: SendPromptFnType = (e) => {
sendPrompt(e);
};
eventBus.on(EventNameEnum.sendQuestion, fn);
eventBus.on(EventNameEnum.editQuestion, ({ text }: { text: string }) => {
if (!text) return;
resetInputVal({ text });
Expand Down Expand Up @@ -881,7 +879,6 @@ const ChatBox = (
onRetry={retryInput(item.dataId)}
onDelete={delOneMessage(item.dataId)}
isLastChild={index === chatHistories.length - 1}
onSendMessage={sendPrompt}
/>
)}
{item.obj === ChatRoleEnum.AI && (
Expand All @@ -891,7 +888,6 @@ const ChatBox = (
avatar={appAvatar}
chat={item}
isLastChild={index === chatHistories.length - 1}
onSendMessage={sendPrompt}
{...{
showVoiceIcon,
shareId,
Expand Down Expand Up @@ -977,7 +973,6 @@ const ChatBox = (
outLinkUid,
questionGuides,
retryInput,
sendPrompt,
shareId,
showEmpty,
showMarkIcon,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@ import { ChatSiteItemType } from '@fastgpt/global/core/chat/type';
import { useCallback, useRef, useState } from 'react';
import { useForm } from 'react-hook-form';
import { PluginRunBoxTabEnum } from './PluginRunBox/constants';
import { ComponentRef as ChatComponentRef } from './ChatBox/type';
import { ComponentRef as ChatComponentRef, SendPromptFnType } from './ChatBox/type';
import { eventBus, EventNameEnum } from '@/web/common/utils/eventbus';

export const useChat = () => {
const ChatBoxRef = useRef<ChatComponentRef>(null);
Expand Down Expand Up @@ -61,3 +62,5 @@ export const useChat = () => {
resetChatRecords
};
};

export const onSendPrompt: SendPromptFnType = (e) => eventBus.emit(EventNameEnum.sendQuestion, e);
115 changes: 44 additions & 71 deletions projects/app/src/components/core/chat/components/AIResponseBox.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -12,24 +12,20 @@ import {
import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants';
import {
AIChatItemValueItemType,
ChatSiteItemType,
ToolModuleResponseItemType,
UserChatItemValueItemType
} from '@fastgpt/global/core/chat/type';
import React from 'react';
import MyIcon from '@fastgpt/web/components/common/Icon';
import Avatar from '@fastgpt/web/components/common/Avatar';
import { SendPromptFnType } from '../ChatContainer/ChatBox/type';
import { useContextSelector } from 'use-context-selector';
import { ChatBoxContext } from '../ChatContainer/ChatBox/Provider';
import { InteractiveNodeResponseItemType } from '@fastgpt/global/core/workflow/template/system/userSelect/type';
import { isEqual } from 'lodash';
import { onSendPrompt } from '../ChatContainer/useChat';

type props = {
value: UserChatItemValueItemType | AIChatItemValueItemType;
isLastChild: boolean;
isChatting: boolean;
onSendMessage?: SendPromptFnType;
};

const RenderText = React.memo(function RenderText({
Expand Down Expand Up @@ -128,67 +124,51 @@ ${toolResponse}`}
},
(prevProps, nextProps) => isEqual(prevProps, nextProps)
);
const RenderInteractive = React.memo(
function RenderInteractive({
isChatting,
interactive,
onSendMessage,
chatHistories
}: {
isChatting: boolean;
interactive: InteractiveNodeResponseItemType;
onSendMessage?: SendPromptFnType;
chatHistories: ChatSiteItemType[];
}) {
return (
<>
{interactive?.params?.description && <Markdown source={interactive.params.description} />}
<Flex flexDirection={'column'} gap={2} w={'250px'}>
{interactive.params.userSelectOptions?.map((option) => {
const selected = option.value === interactive?.params?.userSelectedVal;
const RenderInteractive = React.memo(function RenderInteractive({
interactive
}: {
interactive: InteractiveNodeResponseItemType;
}) {
return (
<>
{interactive?.params?.description && <Markdown source={interactive.params.description} />}
<Flex flexDirection={'column'} gap={2} w={'250px'}>
{interactive.params.userSelectOptions?.map((option) => {
const selected = option.value === interactive?.params?.userSelectedVal;

return (
<Button
key={option.key}
variant={'whitePrimary'}
whiteSpace={'pre-wrap'}
isDisabled={interactive?.params?.userSelectedVal !== undefined}
{...(selected
? {
_disabled: {
cursor: 'default',
borderColor: 'primary.300',
bg: 'primary.50 !important',
color: 'primary.600'
}
return (
<Button
key={option.key}
variant={'whitePrimary'}
whiteSpace={'pre-wrap'}
isDisabled={interactive?.params?.userSelectedVal !== undefined}
{...(selected
? {
_disabled: {
cursor: 'default',
borderColor: 'primary.300',
bg: 'primary.50 !important',
color: 'primary.600'
}
: {})}
onClick={() => {
onSendMessage?.({
text: option.value,
isInteractivePrompt: true
});
}}
>
{option.value}
</Button>
);
})}
</Flex>
</>
);
},
(
prevProps,
nextProps // isChatting 更新时候,onSendMessage 和 chatHistories 肯定都更新了,这里不需要额外的刷新
) =>
prevProps.isChatting === nextProps.isChatting &&
isEqual(prevProps.interactive, nextProps.interactive)
);

const AIResponseBox = ({ value, isLastChild, isChatting, onSendMessage }: props) => {
const chatHistories = useContextSelector(ChatBoxContext, (v) => v.chatHistories);
}
: {})}
onClick={() => {
onSendPrompt({
text: option.value,
isInteractivePrompt: true
});
}}
>
{option.value}
</Button>
);
})}
</Flex>
</>
);
});

const AIResponseBox = ({ value, isLastChild, isChatting }: props) => {
if (value.type === ChatItemValueTypeEnum.text && value.text)
return <RenderText showAnimation={isChatting && isLastChild} text={value.text.content} />;
if (value.type === ChatItemValueTypeEnum.tool && value.tools)
Expand All @@ -198,14 +178,7 @@ const AIResponseBox = ({ value, isLastChild, isChatting, onSendMessage }: props)
value.interactive &&
value.interactive.type === 'userSelect'
)
return (
<RenderInteractive
isChatting={isChatting}
interactive={value.interactive}
onSendMessage={onSendMessage}
chatHistories={chatHistories}
/>
);
return <RenderInteractive interactive={value.interactive} />;
};

export default React.memo(AIResponseBox);
7 changes: 2 additions & 5 deletions projects/app/src/pages/chat/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,6 @@ const Chat = ({
const completionChatId = chatId || getNanoid();
// Just send a user prompt
const histories = messages.slice(-1);

const { responseText, responseData } = await streamFetch({
data: {
messages: histories,
Expand All @@ -146,10 +145,8 @@ const Chat = ({
const newTitle = getChatTitleFromChatMessage(GPTMessages2Chats(histories)[0]);

// new chat
if (completionChatId !== chatId) {
if (controller.signal.reason !== 'leave') {
onChangeChatId(completionChatId, true);
}
if (completionChatId !== chatId && controller.signal.reason !== 'leave') {
onChangeChatId(completionChatId, true);
}
loadHistories();

Expand Down

0 comments on commit 12f3dba

Please sign in to comment.