From a8941326dcfec2ba595b5fbf70fd95e881f4185e Mon Sep 17 00:00:00 2001 From: kangfenmao Date: Sun, 9 Mar 2025 13:37:37 +0800 Subject: [PATCH] fix: Improve MCP tool response handling and logging - Add more descriptive console logging for tool call responses - Use cloneDeep when storing MCP tool responses to prevent reference issues - Simplify upsertMCPToolResponse method calls --- src/renderer/src/providers/OpenAIProvider.ts | 25 ++++++-------------- src/renderer/src/services/ApiService.ts | 4 ++-- 2 files changed, 9 insertions(+), 20 deletions(-) diff --git a/src/renderer/src/providers/OpenAIProvider.ts b/src/renderer/src/providers/OpenAIProvider.ts index 9c828fa149..0d893305fc 100644 --- a/src/renderer/src/providers/OpenAIProvider.ts +++ b/src/renderer/src/providers/OpenAIProvider.ts @@ -385,30 +385,19 @@ export default class OpenAIProvider extends BaseProvider { continue } - upsertMCPToolResponse( - toolResponses, - { - tool: mcpTool, - status: 'invoking' - }, - onChunk - ) + upsertMCPToolResponse(toolResponses, { tool: mcpTool, status: 'invoking' }, onChunk) + const toolCallResponse = await callMCPTool(mcpTool) - console.log(toolCallResponse) + + console.log('[OpenAIProvider] toolCallResponse', toolCallResponse) + reqMessages.push({ role: 'tool', content: toolCallResponse.content, tool_call_id: toolCall.id } as ChatCompletionToolMessageParam) - upsertMCPToolResponse( - toolResponses, - { - tool: mcpTool, - status: 'done', - response: toolCallResponse - }, - onChunk - ) + + upsertMCPToolResponse(toolResponses, { tool: mcpTool, status: 'done', response: toolCallResponse }, onChunk) } const newStream = await this.sdk.chat.completions diff --git a/src/renderer/src/services/ApiService.ts b/src/renderer/src/services/ApiService.ts index 00421d1936..4af7dd4c61 100644 --- a/src/renderer/src/services/ApiService.ts +++ b/src/renderer/src/services/ApiService.ts @@ -5,7 +5,7 @@ import { setGenerating } from '@renderer/store/runtime' import { Assistant, Message, Model, Provider, Suggestion } from '@renderer/types' import { addAbortController } from '@renderer/utils/abortController' import { formatMessageError } from '@renderer/utils/error' -import { findLast, isEmpty } from 'lodash' +import { cloneDeep, findLast, isEmpty } from 'lodash' import AiProvider from '../providers/AiProvider' import { @@ -99,7 +99,7 @@ export async function fetchChatCompletion({ } if (mcpToolResponse) { - message.metadata = { ...message.metadata, mcpTools: mcpToolResponse } + message.metadata = { ...message.metadata, mcpTools: cloneDeep(mcpToolResponse) } } // Handle citations from Perplexity API