Skip to content

Commit

Permalink
fix: chat thinking render (#3544)
Browse files Browse the repository at this point in the history
  • Loading branch information
Ricbet authored Apr 16, 2024
1 parent 182e936 commit fcb8182
Show file tree
Hide file tree
Showing 4 changed files with 23 additions and 16 deletions.
9 changes: 7 additions & 2 deletions packages/ai-native/src/browser/chat/chat.view.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -464,14 +464,19 @@ export const AIChatView = observer(() => {

const handleReply = React.useCallback(
(userInput: { type: AISerivceType; message: string }, replayCommandProps: IReplayComponentParam) => {
const { relationId } = replayCommandProps;

if (chatRenderRegistry.chatAIRoleRender) {
replayCommandProps.renderContent = (content: string, status: EMsgStreamStatus) =>
chatRenderRegistry.chatAIRoleRender!({ content, status });
}

aiChatService.setLatestSessionId(relationId);
aiChatService.messageWithStream(userInput.message, {}, relationId);

const aiMessage = createMessageByAI({
id: uuid(6),
relationId: replayCommandProps.relationId,
relationId,
text: <StreamReplyRender prompt={userInput.message} params={replayCommandProps} />,
className: styles.chat_with_more_actions,
});
Expand All @@ -484,7 +489,7 @@ export const AIChatView = observer(() => {
}
setLoading(false);
},
[messageListData, chatRenderRegistry, chatRenderRegistry.chatAIRoleRender],
[messageListData, aiChatService, chatRenderRegistry, chatRenderRegistry.chatAIRoleRender],
);

const handleClear = React.useCallback(() => {
Expand Down
2 changes: 1 addition & 1 deletion packages/ai-native/src/browser/components/ChatThinking.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ export const ChatThinkingResult = ({
}

return children;
}, [status, message, children]);
}, [status, message, hasMessage, children]);

const isRenderRegenerate = useMemo(() => {
if (isUndefined(showRegenerate)) {
Expand Down
19 changes: 13 additions & 6 deletions packages/ai-native/src/browser/components/StreamMsg.tsx
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import hljs from 'highlight.js';
import React, { useCallback, useEffect } from 'react';
import React, { useCallback, useEffect, useMemo } from 'react';

import { DisposableCollection, useInjectable } from '@opensumi/ide-core-browser';
import { IAIReporter, localize } from '@opensumi/ide-core-common';
Expand All @@ -20,6 +20,7 @@ interface IStreamMsgWrapperProps {

export const StreamMsgWrapper = (props: IStreamMsgWrapperProps) => {
const { sessionId, prompt, startTime = 0, onRegenerate, renderContent } = props;

const [chunk, setChunk] = React.useState('');
const [content, setContent] = React.useState<string>('');
const [isError, setIsError] = React.useState<boolean>(false);
Expand Down Expand Up @@ -112,11 +113,17 @@ export const StreamMsgWrapper = (props: IStreamMsgWrapperProps) => {
props.onStop?.();
};

return status === EMsgStreamStatus.THINKING && msgStreamManager.currentSessionId === sessionId ? (
<ChatThinking status={status} message={content} onStop={onStop}>
{renderMsgList()}
</ChatThinking>
) : (
const isThinking = useMemo(() => status === EMsgStreamStatus.THINKING && msgStreamManager.currentSessionId === sessionId, [status, sessionId, msgStreamManager.currentSessionId]);

if (isThinking) {
return (
<ChatThinking status={status} message={content} onStop={onStop}>
{renderMsgList()}
</ChatThinking>
);
}

return (
<ChatThinkingResult status={status} message={content} onRegenerate={handleRegenerate} sessionId={sessionId}>
{renderMsgList()}
</ChatThinkingResult>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,23 +17,18 @@ export interface IReplayComponentParam {
// 流式输出渲染组件
export const StreamReplyRender = (props: { prompt: string; params: IReplayComponentParam }) => {
const { prompt, params } = props;
const { relationId, renderContent } = params;

const aiChatService = useInjectable<ChatInternalService>(IChatInternalService);

const { relationId, renderContent } = params;

const send = React.useCallback(
(isRetry = false) => {
aiChatService.setLatestSessionId(relationId);
aiChatService.messageWithStream(prompt, isRetry ? { enableGptCache: false } : {}, relationId);
},
[aiChatService],
[aiChatService, relationId, prompt],
);

React.useEffect(() => {
send();
}, []);

return (
<StreamMsgWrapper
sessionId={relationId}
Expand Down

0 comments on commit fcb8182

Please sign in to comment.