diff --git a/packages/shared/adapters/AiSdkToAnthropicSSE.ts b/packages/shared/adapters/AiSdkToAnthropicSSE.ts index 08d45a09d7..a9f1508a6a 100644 --- a/packages/shared/adapters/AiSdkToAnthropicSSE.ts +++ b/packages/shared/adapters/AiSdkToAnthropicSSE.ts @@ -36,6 +36,8 @@ import type { Usage } from '@anthropic-ai/sdk/resources/messages' import { loggerService } from '@logger' +import { reasoningCache } from '@main/apiServer/services/cache' +import type { JSONValue } from 'ai' import { type FinishReason, type LanguageModelUsage, type TextStreamPart, type ToolSet } from 'ai' const logger = loggerService.withContext('AiSdkToAnthropicSSE') @@ -74,7 +76,7 @@ export type SSEEventCallback = (event: RawMessageStreamEvent) => void * Interface for a simple cache that stores reasoning details */ export interface ReasoningCacheInterface { - set(signature: string, details: unknown[]): void + set(signature: string, details: JSONValue): void } export interface AiSdkToAnthropicSSEOptions { @@ -82,9 +84,6 @@ export interface AiSdkToAnthropicSSEOptions { messageId?: string inputTokens?: number onEvent: SSEEventCallback - /** - * Optional cache for storing reasoning details from providers like OpenRouter - */ reasoningCache?: ReasoningCacheInterface } @@ -186,6 +185,17 @@ export class AiSdkToAnthropicSSE { // === Tool Events === case 'tool-call': + if (this.reasoningCache && chunk.providerMetadata?.google?.thoughtSignature) { + this.reasoningCache.set('google', chunk.providerMetadata?.google?.thoughtSignature) + } + // FIXME: 按toolcall id绑定 + if ( + this.reasoningCache && + chunk.providerMetadata?.openrouter?.reasoning_details && + Array.isArray(chunk.providerMetadata.openrouter.reasoning_details) + ) { + this.reasoningCache.set('openrouter', chunk.providerMetadata.openrouter.reasoning_details) + } this.handleToolCall({ type: 'tool-call', toolCallId: chunk.toolCallId, @@ -205,13 +215,6 @@ export class AiSdkToAnthropicSSE { break case 'finish-step': - if ( - this.reasoningCache && - chunk.providerMetadata?.openrouter?.reasoning_details && - Array.isArray(chunk.providerMetadata.openrouter.reasoning_details) - ) { - this.reasoningCache.set('openrouter', chunk.providerMetadata.openrouter.reasoning_details) - } if (chunk.finishReason === 'tool-calls') { this.state.stopReason = 'tool_use' } @@ -552,6 +555,7 @@ export class AiSdkToAnthropicSSE { } this.onEvent(messageStopEvent) + reasoningCache.destroy() } /** diff --git a/src/main/apiServer/services/cache.ts b/src/main/apiServer/services/cache.ts index 765ab1e1b9..39dc5b1544 100644 --- a/src/main/apiServer/services/cache.ts +++ b/src/main/apiServer/services/cache.ts @@ -1,11 +1,12 @@ import { loggerService } from '@logger' +import type { JSONValue } from 'ai' const logger = loggerService.withContext('Cache') /** * Cache entry with TTL support */ interface CacheEntry { - details: T[] + details: T timestamp: number } @@ -28,24 +29,19 @@ export class ReasoningCache { /** * Store reasoning details by signature */ - set(signature: string, details: T[]): void { - if (!signature || !details.length) return + set(signature: string, details: T): void { + if (!signature || !details) return this.cache.set(signature, { details, timestamp: Date.now() }) - - logger.debug('Cached reasoning details', { - signature: signature.substring(0, 20) + '...', - detailsCount: details.length - }) } /** * Retrieve reasoning details by signature */ - get(signature: string): T[] | undefined { + get(signature: string): T | undefined { const entry = this.cache.get(signature) if (!entry) return undefined @@ -55,11 +51,6 @@ export class ReasoningCache { return undefined } - logger.debug('Retrieved reasoning details from cache', { - signature: signature.substring(0, 20) + '...', - detailsCount: entry.details.length - }) - return entry.details } @@ -113,4 +104,4 @@ export class ReasoningCache { } // Singleton cache instance -export const reasoningCache = new ReasoningCache() +export const reasoningCache = new ReasoningCache() diff --git a/src/main/apiServer/services/unified-messages.ts b/src/main/apiServer/services/unified-messages.ts index 063885d72c..af97941f2b 100644 --- a/src/main/apiServer/services/unified-messages.ts +++ b/src/main/apiServer/services/unified-messages.ts @@ -301,11 +301,24 @@ function convertAnthropicToAiMessages(params: MessageCreateParams): ModelMessage imageParts.push({ type: 'image', image: source.url }) } } else if (block.type === 'tool_use') { + const options: ProviderOptions = {} + if (isGemini3ModelId(params.model)) { + if (reasoningCache.get('google')) { + options.google = { + thoughtSignature: MAGIC_STRING + } + } else if (reasoningCache.get('openrouter')) { + options.openrouter = { + reasoning_details: (reasoningCache.get('openrouter') as JSONValue[]) || [] + } + } + } toolCallParts.push({ type: 'tool-call', toolName: block.name, toolCallId: block.id, - input: block.input + input: block.input, + providerOptions: options }) } else if (block.type === 'tool_result') { // Look up toolName from the pre-built map (covers cross-message references)