fix: get empty response when using MCP by functional method (#8296)

* feat: 添加日志记录以调试中间件处理流程

在多个中间件中添加日志记录以跟踪chunk处理流程
在AiProvider中添加日志记录以调试中间件移除逻辑

* fix(openai): 修复tool_call chunk被跳过的问题

添加对choice.delta.content为null情况的处理
同时添加日志输出用于调试chunk数据

* fix(openai): 修复流式响应中空内容判断逻辑

* fix(openai): 修复流式响应中tool_calls内容判断逻辑
This commit is contained in:
Phantom 2025-07-19 23:23:53 +08:00 committed by GitHub
parent 2e77792042
commit 0149cfbd21
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 18 additions and 0 deletions

View File

@ -692,6 +692,7 @@ export class OpenAIAPIClient extends OpenAIBaseClient<
return (context: ResponseChunkTransformerContext) => ({
async transform(chunk: OpenAISdkRawChunk, controller: TransformStreamDefaultController<GenericChunk>) {
// 持续更新usage信息
logger.silly('chunk', chunk)
if (chunk.usage) {
lastUsageInfo = {
prompt_tokens: chunk.usage.prompt_tokens || 0,
@ -714,6 +715,7 @@ export class OpenAIAPIClient extends OpenAIBaseClient<
choice.delta &&
Object.keys(choice.delta).length > 0 &&
(!('content' in choice.delta) ||
(choice.delta.tool_calls && choice.delta.tool_calls.length > 0) ||
(typeof choice.delta.content === 'string' && choice.delta.content !== '') ||
(typeof (choice.delta as any).reasoning_content === 'string' &&
(choice.delta as any).reasoning_content !== '') ||

View File

@ -77,34 +77,45 @@ export default class AiProvider {
.add(MiddlewareRegistry[ImageGenerationMiddlewareName])
} else {
// Existing logic for other models
logger.silly('Builder Params', params)
if (!params.enableReasoning) {
// 这里注释掉不会影响正常的关闭思考,可忽略不计的性能下降
// builder.remove(ThinkingTagExtractionMiddlewareName)
builder.remove(ThinkChunkMiddlewareName)
logger.silly('ThinkChunkMiddleware is removed')
}
// 注意用client判断会导致typescript类型收窄
if (!(this.apiClient instanceof OpenAIAPIClient) && !(this.apiClient instanceof OpenAIResponseAPIClient)) {
logger.silly('ThinkingTagExtractionMiddleware is removed')
builder.remove(ThinkingTagExtractionMiddlewareName)
}
if (!(this.apiClient instanceof AnthropicAPIClient) && !(this.apiClient instanceof OpenAIResponseAPIClient)) {
logger.silly('RawStreamListenerMiddleware is removed')
builder.remove(RawStreamListenerMiddlewareName)
}
if (!params.enableWebSearch) {
logger.silly('WebSearchMiddleware is removed')
builder.remove(WebSearchMiddlewareName)
}
if (!params.mcpTools?.length) {
builder.remove(ToolUseExtractionMiddlewareName)
logger.silly('ToolUseExtractionMiddleware is removed')
builder.remove(McpToolChunkMiddlewareName)
logger.silly('McpToolChunkMiddleware is removed')
}
if (isEnabledToolUse(params.assistant) && isFunctionCallingModel(model)) {
builder.remove(ToolUseExtractionMiddlewareName)
logger.silly('ToolUseExtractionMiddleware is removed')
}
if (params.callType !== 'chat') {
logger.silly('AbortHandlerMiddleware is removed')
builder.remove(AbortHandlerMiddlewareName)
}
if (params.callType === 'test') {
builder.remove(ErrorHandlerMiddlewareName)
logger.silly('ErrorHandlerMiddleware is removed')
builder.remove(FinalChunkConsumerMiddlewareName)
logger.silly('FinalChunkConsumerMiddleware is removed')
}
}

View File

@ -100,6 +100,7 @@ function createToolHandlingTransform(
async transform(chunk: GenericChunk, controller) {
try {
// 处理MCP工具进展chunk
logger.silly('chunk', chunk)
if (chunk.type === ChunkType.MCP_TOOL_CREATED) {
const createdChunk = chunk as MCPToolCreatedChunk

View File

@ -43,6 +43,7 @@ export const TextChunkMiddleware: CompletionsMiddleware =
const enhancedTextStream = resultFromUpstream.pipeThrough(
new TransformStream<GenericChunk, GenericChunk>({
transform(chunk: GenericChunk, controller) {
logger.silly('chunk', chunk)
if (chunk.type === ChunkType.TEXT_DELTA) {
accumulatedTextContent += chunk.text

View File

@ -72,6 +72,7 @@ export const ThinkingTagExtractionMiddleware: CompletionsMiddleware =
const processedStream = resultFromUpstream.pipeThrough(
new TransformStream<GenericChunk, GenericChunk>({
transform(chunk: GenericChunk, controller) {
logger.silly('chunk', chunk)
if (chunk.type === ChunkType.TEXT_DELTA) {
const textChunk = chunk as TextDeltaChunk

View File

@ -69,6 +69,7 @@ function createToolUseExtractionTransform(
async transform(chunk: GenericChunk, controller) {
try {
// 处理文本内容,检测工具使用标签
logger.silly('chunk', chunk)
if (chunk.type === ChunkType.TEXT_DELTA) {
const textChunk = chunk as TextDeltaChunk

View File

@ -398,6 +398,7 @@ export async function fetchChatCompletion({
filterEmptyMessages(filterContextMessages(takeRight(filteredMessages, contextCount + 2))) // 取原来几个provider的最大值
)
// FIXME: qwen3即使关闭思考仍然会导致enableReasoning的结果为true
const enableReasoning =
((isSupportedThinkingTokenModel(model) || isSupportedReasoningEffortModel(model)) &&
assistant.settings?.reasoning_effort !== undefined) ||