mirror of
https://github.com/CherryHQ/cherry-studio.git
synced 2026-01-08 14:29:15 +08:00
fix: smartblock update not persist to db (#8046)
* chore(version): 1.4.10 * feat: enhance ThinkingTagExtractionMiddleware and update smartBlockUpdate function - Added support for THINKING_START and TEXT_START chunk types in ThinkingTagExtractionMiddleware. - Updated smartBlockUpdate function to include an isComplete parameter for better block state management. - Ensured proper handling of block updates based on completion status across various message types. * fix: refine block update logic in messageThunk - Adjusted conditions for canceling throttled block updates based on block type changes and completion status. - Improved handling of block updates to ensure accurate state management during message processing. * chore: add comment * fix: update message block status handling - Changed the status of image blocks from STREAMING to PENDING to better reflect the processing state. - Refined logic in OpenAIResponseAPIClient to ensure user messages are correctly handled based on assistant message content. - Improved rendering conditions in ImageBlock component for better user experience during image loading. --------- Co-authored-by: kangfenmao <kangfenmao@qq.com>
This commit is contained in:
parent
a93cab6b43
commit
8340922263
@ -367,16 +367,15 @@ export class OpenAIResponseAPIClient extends OpenAIBaseClient<
|
||||
(m) => (m as OpenAI.Responses.EasyInputMessage).role === 'assistant'
|
||||
) as OpenAI.Responses.EasyInputMessage
|
||||
const finalUserMessage = userMessage.pop() as OpenAI.Responses.EasyInputMessage
|
||||
if (
|
||||
finalAssistantMessage &&
|
||||
Array.isArray(finalAssistantMessage.content) &&
|
||||
finalUserMessage &&
|
||||
Array.isArray(finalUserMessage.content)
|
||||
) {
|
||||
finalAssistantMessage.content = [...finalAssistantMessage.content, ...finalUserMessage.content]
|
||||
if (finalUserMessage && Array.isArray(finalUserMessage.content)) {
|
||||
if (finalAssistantMessage && Array.isArray(finalAssistantMessage.content)) {
|
||||
finalAssistantMessage.content = [...finalAssistantMessage.content, ...finalUserMessage.content]
|
||||
// 这里是故意将上条助手消息的内容(包含图片和文件)作为用户消息发送
|
||||
userMessage = [{ ...finalAssistantMessage, role: 'user' } as OpenAI.Responses.EasyInputMessage]
|
||||
} else {
|
||||
userMessage.push(finalUserMessage)
|
||||
}
|
||||
}
|
||||
// 这里是故意将上条助手消息的内容(包含图片和文件)作为用户消息发送
|
||||
userMessage = [{ ...finalAssistantMessage, role: 'user' } as OpenAI.Responses.EasyInputMessage]
|
||||
}
|
||||
|
||||
// 4. 最终请求消息
|
||||
|
||||
@ -1,5 +1,11 @@
|
||||
import { Model } from '@renderer/types'
|
||||
import { ChunkType, TextDeltaChunk, ThinkingCompleteChunk, ThinkingDeltaChunk } from '@renderer/types/chunk'
|
||||
import {
|
||||
ChunkType,
|
||||
TextDeltaChunk,
|
||||
ThinkingCompleteChunk,
|
||||
ThinkingDeltaChunk,
|
||||
ThinkingStartChunk
|
||||
} from '@renderer/types/chunk'
|
||||
import { TagConfig, TagExtractor } from '@renderer/utils/tagExtraction'
|
||||
import Logger from 'electron-log/renderer'
|
||||
|
||||
@ -59,6 +65,8 @@ export const ThinkingTagExtractionMiddleware: CompletionsMiddleware =
|
||||
let hasThinkingContent = false
|
||||
let thinkingStartTime = 0
|
||||
|
||||
let isFirstTextChunk = true
|
||||
|
||||
const processedStream = resultFromUpstream.pipeThrough(
|
||||
new TransformStream<GenericChunk, GenericChunk>({
|
||||
transform(chunk: GenericChunk, controller) {
|
||||
@ -87,6 +95,9 @@ export const ThinkingTagExtractionMiddleware: CompletionsMiddleware =
|
||||
if (!hasThinkingContent) {
|
||||
hasThinkingContent = true
|
||||
thinkingStartTime = Date.now()
|
||||
controller.enqueue({
|
||||
type: ChunkType.THINKING_START
|
||||
} as ThinkingStartChunk)
|
||||
}
|
||||
|
||||
if (extractionResult.content?.trim()) {
|
||||
@ -98,6 +109,12 @@ export const ThinkingTagExtractionMiddleware: CompletionsMiddleware =
|
||||
controller.enqueue(thinkingDeltaChunk)
|
||||
}
|
||||
} else {
|
||||
if (isFirstTextChunk) {
|
||||
controller.enqueue({
|
||||
type: ChunkType.TEXT_START
|
||||
})
|
||||
isFirstTextChunk = false
|
||||
}
|
||||
// 发送清理后的文本内容
|
||||
const cleanTextChunk: TextDeltaChunk = {
|
||||
...textChunk,
|
||||
@ -107,7 +124,7 @@ export const ThinkingTagExtractionMiddleware: CompletionsMiddleware =
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
} else if (chunk.type !== ChunkType.TEXT_START) {
|
||||
// 其他类型的chunk直接传递(包括 THINKING_DELTA, THINKING_COMPLETE 等)
|
||||
controller.enqueue(chunk)
|
||||
}
|
||||
|
||||
@ -9,9 +9,8 @@ interface Props {
|
||||
}
|
||||
|
||||
const ImageBlock: React.FC<Props> = ({ block }) => {
|
||||
if (block.status === MessageBlockStatus.STREAMING || block.status === MessageBlockStatus.PROCESSING)
|
||||
return <Skeleton.Image active style={{ width: 200, height: 200 }} />
|
||||
if (block.status === MessageBlockStatus.SUCCESS) {
|
||||
if (block.status === MessageBlockStatus.PENDING) return <Skeleton.Image active style={{ width: 200, height: 200 }} />
|
||||
if (block.status === MessageBlockStatus.STREAMING || block.status === MessageBlockStatus.SUCCESS) {
|
||||
const images = block.metadata?.generateImageResponse?.images?.length
|
||||
? block.metadata?.generateImageResponse?.images
|
||||
: block?.file?.path
|
||||
|
||||
@ -344,14 +344,27 @@ const fetchAndProcessAssistantResponseImpl = async (
|
||||
* 智能更新策略:根据块类型连续性自动判断使用节流还是立即更新
|
||||
* - 连续同类块:使用节流(减少重渲染)
|
||||
* - 块类型切换:立即更新(确保状态正确)
|
||||
* @param blockId 块ID
|
||||
* @param changes 块更新内容
|
||||
* @param blockType 块类型
|
||||
* @param isComplete 是否完成,如果完成,则需要保存块更新到redux中
|
||||
*/
|
||||
const smartBlockUpdate = (blockId: string, changes: Partial<MessageBlock>, blockType: MessageBlockType) => {
|
||||
const smartBlockUpdate = (
|
||||
blockId: string,
|
||||
changes: Partial<MessageBlock>,
|
||||
blockType: MessageBlockType,
|
||||
isComplete: boolean = false
|
||||
) => {
|
||||
const isBlockTypeChanged = currentActiveBlockType !== null && currentActiveBlockType !== blockType
|
||||
|
||||
if (isBlockTypeChanged) {
|
||||
if (lastBlockId && lastBlockId !== blockId) {
|
||||
if (isBlockTypeChanged || isComplete) {
|
||||
// 如果块类型改变,则取消上一个块的节流更新,并保存块更新到redux中(尽管有可能被上一个块本身的oncomplete事件的取消节流已经取消了)
|
||||
if (isBlockTypeChanged && lastBlockId) {
|
||||
cancelThrottledBlockUpdate(lastBlockId)
|
||||
}
|
||||
// 如果当前块完成,则取消当前块的节流更新,并保存块更新到redux中,避免streaming状态覆盖掉完成状态
|
||||
if (isComplete) {
|
||||
cancelThrottledBlockUpdate(blockId)
|
||||
}
|
||||
dispatch(updateOneBlock({ id: blockId, changes }))
|
||||
saveUpdatedBlockToDB(blockId, assistantMsgId, topicId, getState)
|
||||
} else {
|
||||
@ -464,7 +477,7 @@ const fetchAndProcessAssistantResponseImpl = async (
|
||||
content: finalText,
|
||||
status: MessageBlockStatus.SUCCESS
|
||||
}
|
||||
smartBlockUpdate(mainTextBlockId, changes, MessageBlockType.MAIN_TEXT)
|
||||
smartBlockUpdate(mainTextBlockId, changes, MessageBlockType.MAIN_TEXT, true)
|
||||
mainTextBlockId = null
|
||||
} else {
|
||||
console.warn(
|
||||
@ -512,7 +525,7 @@ const fetchAndProcessAssistantResponseImpl = async (
|
||||
status: MessageBlockStatus.SUCCESS,
|
||||
thinking_millsec: final_thinking_millsec
|
||||
}
|
||||
smartBlockUpdate(thinkingBlockId, changes, MessageBlockType.THINKING)
|
||||
smartBlockUpdate(thinkingBlockId, changes, MessageBlockType.THINKING, true)
|
||||
} else {
|
||||
console.warn(
|
||||
`[onThinkingComplete] Received thinking.complete but last block was not THINKING (was ${lastBlockType}) or lastBlockId is null.`
|
||||
@ -591,7 +604,7 @@ const fetchAndProcessAssistantResponseImpl = async (
|
||||
if (finalStatus === MessageBlockStatus.ERROR) {
|
||||
changes.error = { message: `Tool execution failed/error`, details: toolResponse.response }
|
||||
}
|
||||
smartBlockUpdate(existingBlockId, changes, MessageBlockType.TOOL)
|
||||
smartBlockUpdate(existingBlockId, changes, MessageBlockType.TOOL, true)
|
||||
} else {
|
||||
console.warn(
|
||||
`[onToolCallComplete] Received unhandled tool status: ${toolResponse.status} for ID: ${toolResponse.id}`
|
||||
@ -612,7 +625,7 @@ const fetchAndProcessAssistantResponseImpl = async (
|
||||
knowledge: externalToolResult.knowledge,
|
||||
status: MessageBlockStatus.SUCCESS
|
||||
}
|
||||
smartBlockUpdate(citationBlockId, changes, MessageBlockType.CITATION)
|
||||
smartBlockUpdate(citationBlockId, changes, MessageBlockType.CITATION, true)
|
||||
} else {
|
||||
console.error('[onExternalToolComplete] citationBlockId is null. Cannot update.')
|
||||
}
|
||||
@ -652,7 +665,7 @@ const fetchAndProcessAssistantResponseImpl = async (
|
||||
const mainTextChanges = {
|
||||
citationReferences: [...currentRefs, { blockId, citationBlockSource: llmWebSearchResult.source }]
|
||||
}
|
||||
smartBlockUpdate(existingMainTextBlock.id, mainTextChanges, MessageBlockType.MAIN_TEXT)
|
||||
smartBlockUpdate(existingMainTextBlock.id, mainTextChanges, MessageBlockType.MAIN_TEXT, true)
|
||||
}
|
||||
|
||||
if (initialPlaceholderBlockId) {
|
||||
@ -678,7 +691,7 @@ const fetchAndProcessAssistantResponseImpl = async (
|
||||
const mainTextChanges = {
|
||||
citationReferences: [...currentRefs, { citationBlockId, citationBlockSource: llmWebSearchResult.source }]
|
||||
}
|
||||
smartBlockUpdate(existingMainTextBlock.id, mainTextChanges, MessageBlockType.MAIN_TEXT)
|
||||
smartBlockUpdate(existingMainTextBlock.id, mainTextChanges, MessageBlockType.MAIN_TEXT, true)
|
||||
}
|
||||
await handleBlockTransition(citationBlock, MessageBlockType.CITATION)
|
||||
}
|
||||
@ -688,7 +701,7 @@ const fetchAndProcessAssistantResponseImpl = async (
|
||||
lastBlockType = MessageBlockType.IMAGE
|
||||
const initialChanges: Partial<MessageBlock> = {
|
||||
type: MessageBlockType.IMAGE,
|
||||
status: MessageBlockStatus.STREAMING
|
||||
status: MessageBlockStatus.PENDING
|
||||
}
|
||||
lastBlockType = MessageBlockType.IMAGE
|
||||
imageBlockId = initialPlaceholderBlockId
|
||||
@ -696,7 +709,7 @@ const fetchAndProcessAssistantResponseImpl = async (
|
||||
smartBlockUpdate(imageBlockId, initialChanges, MessageBlockType.IMAGE)
|
||||
} else if (!imageBlockId) {
|
||||
const imageBlock = createImageBlock(assistantMsgId, {
|
||||
status: MessageBlockStatus.STREAMING
|
||||
status: MessageBlockStatus.PENDING
|
||||
})
|
||||
imageBlockId = imageBlock.id
|
||||
await handleBlockTransition(imageBlock, MessageBlockType.IMAGE)
|
||||
@ -710,7 +723,7 @@ const fetchAndProcessAssistantResponseImpl = async (
|
||||
metadata: { generateImageResponse: imageData },
|
||||
status: MessageBlockStatus.STREAMING
|
||||
}
|
||||
smartBlockUpdate(imageBlockId, changes, MessageBlockType.IMAGE)
|
||||
smartBlockUpdate(imageBlockId, changes, MessageBlockType.IMAGE, true)
|
||||
}
|
||||
},
|
||||
onImageGenerated: (imageData) => {
|
||||
@ -727,7 +740,7 @@ const fetchAndProcessAssistantResponseImpl = async (
|
||||
metadata: { generateImageResponse: imageData },
|
||||
status: MessageBlockStatus.SUCCESS
|
||||
}
|
||||
smartBlockUpdate(imageBlockId, changes, MessageBlockType.IMAGE)
|
||||
smartBlockUpdate(imageBlockId, changes, MessageBlockType.IMAGE, true)
|
||||
}
|
||||
} else {
|
||||
console.error('[onImageGenerated] Last block was not an Image block or ID is missing.')
|
||||
@ -775,7 +788,7 @@ const fetchAndProcessAssistantResponseImpl = async (
|
||||
const changes: Partial<MessageBlock> = {
|
||||
status: isErrorTypeAbort ? MessageBlockStatus.PAUSED : MessageBlockStatus.ERROR
|
||||
}
|
||||
smartBlockUpdate(possibleBlockId, changes, MessageBlockType.MAIN_TEXT)
|
||||
smartBlockUpdate(possibleBlockId, changes, lastBlockType!, true)
|
||||
}
|
||||
|
||||
const errorBlock = createErrorBlock(assistantMsgId, serializableError, { status: MessageBlockStatus.SUCCESS })
|
||||
@ -817,7 +830,7 @@ const fetchAndProcessAssistantResponseImpl = async (
|
||||
const changes: Partial<MessageBlock> = {
|
||||
status: MessageBlockStatus.SUCCESS
|
||||
}
|
||||
smartBlockUpdate(possibleBlockId, changes, lastBlockType!)
|
||||
smartBlockUpdate(possibleBlockId, changes, lastBlockType!, true)
|
||||
}
|
||||
|
||||
const endTime = Date.now()
|
||||
|
||||
Loading…
Reference in New Issue
Block a user