From bfc3b0e54e1d82dda171974c8b162766a6e3dba7 Mon Sep 17 00:00:00 2001 From: Jason Young <44939412+farion1231@users.noreply.github.com> Date: Mon, 21 Jul 2025 10:17:27 +0800 Subject: [PATCH] fix: persistent "Searching..." indicators for o3 model web search (#8328) * fix: fix persistent "Searching..." indicators for o3 model web search - Add missing LLM_WEB_SEARCH_IN_PROGRESS event in OpenAIAPIClient - Prevent duplicate CitationBlock creation - Ensure search status updates correctly after completion Fixes #8307 * fix: prevent duplicate citation blocks in web search callbacks - Add checks in onExternalToolInProgress and onLLMWebSearchInProgress - Return early if citationBlockId already exists - Fixes persistent "Searching..." indicator for o3 model web search --- .../src/aiCore/clients/openai/OpenAIApiClient.ts | 15 +++++++++++++++ .../callbacks/citationCallbacks.ts | 10 ++++++++++ 2 files changed, 25 insertions(+) diff --git a/src/renderer/src/aiCore/clients/openai/OpenAIApiClient.ts b/src/renderer/src/aiCore/clients/openai/OpenAIApiClient.ts index 199f3b60d3..f1e1e50cfe 100644 --- a/src/renderer/src/aiCore/clients/openai/OpenAIApiClient.ts +++ b/src/renderer/src/aiCore/clients/openai/OpenAIApiClient.ts @@ -563,6 +563,7 @@ export class OpenAIAPIClient extends OpenAIBaseClient< // 在RawSdkChunkToGenericChunkMiddleware中使用 getResponseChunkTransformer(): ResponseChunkTransformer { let hasBeenCollectedWebSearch = false + let hasEmittedWebSearchInProgress = false const collectWebSearchData = ( chunk: OpenAISdkRawChunk, contentSource: OpenAISdkRawContentSource, @@ -769,6 +770,13 @@ export class OpenAIAPIClient extends OpenAIBaseClient< const webSearchData = collectWebSearchData(chunk, contentSource, context) if (webSearchData) { + // 如果还未发送搜索进度事件,先发送进度事件 + if (!hasEmittedWebSearchInProgress) { + controller.enqueue({ + type: ChunkType.LLM_WEB_SEARCH_IN_PROGRESS + }) + hasEmittedWebSearchInProgress = true + } controller.enqueue({ type: ChunkType.LLM_WEB_SEARCH_COMPLETE, llm_web_search: webSearchData @@ -833,6 +841,13 @@ export class OpenAIAPIClient extends OpenAIBaseClient< logger.debug(`Stream finished with reason: ${choice.finish_reason}`) const webSearchData = collectWebSearchData(chunk, contentSource, context) if (webSearchData) { + // 如果还未发送搜索进度事件,先发送进度事件 + if (!hasEmittedWebSearchInProgress) { + controller.enqueue({ + type: ChunkType.LLM_WEB_SEARCH_IN_PROGRESS + }) + hasEmittedWebSearchInProgress = true + } controller.enqueue({ type: ChunkType.LLM_WEB_SEARCH_COMPLETE, llm_web_search: webSearchData diff --git a/src/renderer/src/services/messageStreaming/callbacks/citationCallbacks.ts b/src/renderer/src/services/messageStreaming/callbacks/citationCallbacks.ts index c1ee6f6b05..82a70a315d 100644 --- a/src/renderer/src/services/messageStreaming/callbacks/citationCallbacks.ts +++ b/src/renderer/src/services/messageStreaming/callbacks/citationCallbacks.ts @@ -22,6 +22,11 @@ export const createCitationCallbacks = (deps: CitationCallbacksDependencies) => return { onExternalToolInProgress: async () => { + // 避免创建重复的引用块 + if (citationBlockId) { + logger.warn('[onExternalToolInProgress] Citation block already exists:', citationBlockId) + return + } const citationBlock = createCitationBlock(assistantMsgId, {}, { status: MessageBlockStatus.PROCESSING }) citationBlockId = citationBlock.id await blockManager.handleBlockTransition(citationBlock, MessageBlockType.CITATION) @@ -41,6 +46,11 @@ export const createCitationCallbacks = (deps: CitationCallbacksDependencies) => }, onLLMWebSearchInProgress: async () => { + // 避免创建重复的引用块 + if (citationBlockId) { + logger.warn('[onLLMWebSearchInProgress] Citation block already exists:', citationBlockId) + return + } if (blockManager.hasInitialPlaceholder) { // blockManager.lastBlockType = MessageBlockType.CITATION logger.debug('blockManager.initialPlaceholderBlockId', blockManager.initialPlaceholderBlockId)