From 95c18d192a948297258d9e868140c44c3e157300 Mon Sep 17 00:00:00 2001 From: suyao Date: Fri, 28 Nov 2025 13:42:33 +0800 Subject: [PATCH] feat: add reasoning cache support to AiSdkToAnthropicSSE and update unified-messages integration --- packages/shared/adapters/AiSdkToAnthropicSSE.ts | 17 +++++++++++++++-- src/main/apiServer/services/unified-messages.ts | 3 ++- src/renderer/src/aiCore/provider/constants.ts | 2 +- 3 files changed, 18 insertions(+), 4 deletions(-) diff --git a/packages/shared/adapters/AiSdkToAnthropicSSE.ts b/packages/shared/adapters/AiSdkToAnthropicSSE.ts index 9b23638f48..08d45a09d7 100644 --- a/packages/shared/adapters/AiSdkToAnthropicSSE.ts +++ b/packages/shared/adapters/AiSdkToAnthropicSSE.ts @@ -36,7 +36,6 @@ import type { Usage } from '@anthropic-ai/sdk/resources/messages' import { loggerService } from '@logger' -import { reasoningCache } from '@main/apiServer/services/cache' import { type FinishReason, type LanguageModelUsage, type TextStreamPart, type ToolSet } from 'ai' const logger = loggerService.withContext('AiSdkToAnthropicSSE') @@ -71,11 +70,22 @@ interface AdapterState { export type SSEEventCallback = (event: RawMessageStreamEvent) => void +/** + * Interface for a simple cache that stores reasoning details + */ +export interface ReasoningCacheInterface { + set(signature: string, details: unknown[]): void +} + export interface AiSdkToAnthropicSSEOptions { model: string messageId?: string inputTokens?: number onEvent: SSEEventCallback + /** + * Optional cache for storing reasoning details from providers like OpenRouter + */ + reasoningCache?: ReasoningCacheInterface } /** @@ -84,9 +94,11 @@ export interface AiSdkToAnthropicSSEOptions { export class AiSdkToAnthropicSSE { private state: AdapterState private onEvent: SSEEventCallback + private reasoningCache?: ReasoningCacheInterface constructor(options: AiSdkToAnthropicSSEOptions) { this.onEvent = options.onEvent + this.reasoningCache = options.reasoningCache this.state = { messageId: options.messageId || `msg_${Date.now()}_${Math.random().toString(36).substring(2, 11)}`, model: options.model, @@ -194,10 +206,11 @@ export class AiSdkToAnthropicSSE { case 'finish-step': if ( + this.reasoningCache && chunk.providerMetadata?.openrouter?.reasoning_details && Array.isArray(chunk.providerMetadata.openrouter.reasoning_details) ) { - reasoningCache.set('openrouter', chunk.providerMetadata?.openrouter?.reasoning_details) + this.reasoningCache.set('openrouter', chunk.providerMetadata.openrouter.reasoning_details) } if (chunk.finishReason === 'tool-calls') { this.state.stopReason = 'tool_use' diff --git a/src/main/apiServer/services/unified-messages.ts b/src/main/apiServer/services/unified-messages.ts index 51751202dd..298131460f 100644 --- a/src/main/apiServer/services/unified-messages.ts +++ b/src/main/apiServer/services/unified-messages.ts @@ -387,7 +387,8 @@ async function executeStream(config: ExecuteStreamConfig): Promise {}) + onEvent: onEvent || (() => {}), + reasoningCache }) // Execute stream - pass model object instead of string diff --git a/src/renderer/src/aiCore/provider/constants.ts b/src/renderer/src/aiCore/provider/constants.ts index 67cde7894d..57dad9fbc0 100644 --- a/src/renderer/src/aiCore/provider/constants.ts +++ b/src/renderer/src/aiCore/provider/constants.ts @@ -1 +1 @@ -export { COPILOT_DEFAULT_HEADERS, isCopilotResponsesModel } from '@shared/provider/constant' +export { COPILOT_DEFAULT_HEADERS, COPILOT_EDITOR_VERSION, isCopilotResponsesModel } from '@shared/provider/constant'