feat: enhance reasoning cache integration and update provider options in unified messages

This commit is contained in:
suyao 2025-11-28 16:37:58 +08:00
parent ed769ac4f7
commit e8dccf51fe
No known key found for this signature in database
3 changed files with 35 additions and 27 deletions

View File

@ -36,6 +36,8 @@ import type {
Usage Usage
} from '@anthropic-ai/sdk/resources/messages' } from '@anthropic-ai/sdk/resources/messages'
import { loggerService } from '@logger' import { loggerService } from '@logger'
import { reasoningCache } from '@main/apiServer/services/cache'
import type { JSONValue } from 'ai'
import { type FinishReason, type LanguageModelUsage, type TextStreamPart, type ToolSet } from 'ai' import { type FinishReason, type LanguageModelUsage, type TextStreamPart, type ToolSet } from 'ai'
const logger = loggerService.withContext('AiSdkToAnthropicSSE') const logger = loggerService.withContext('AiSdkToAnthropicSSE')
@ -74,7 +76,7 @@ export type SSEEventCallback = (event: RawMessageStreamEvent) => void
* Interface for a simple cache that stores reasoning details * Interface for a simple cache that stores reasoning details
*/ */
export interface ReasoningCacheInterface { export interface ReasoningCacheInterface {
set(signature: string, details: unknown[]): void set(signature: string, details: JSONValue): void
} }
export interface AiSdkToAnthropicSSEOptions { export interface AiSdkToAnthropicSSEOptions {
@ -82,9 +84,6 @@ export interface AiSdkToAnthropicSSEOptions {
messageId?: string messageId?: string
inputTokens?: number inputTokens?: number
onEvent: SSEEventCallback onEvent: SSEEventCallback
/**
* Optional cache for storing reasoning details from providers like OpenRouter
*/
reasoningCache?: ReasoningCacheInterface reasoningCache?: ReasoningCacheInterface
} }
@ -186,6 +185,17 @@ export class AiSdkToAnthropicSSE {
// === Tool Events === // === Tool Events ===
case 'tool-call': case 'tool-call':
if (this.reasoningCache && chunk.providerMetadata?.google?.thoughtSignature) {
this.reasoningCache.set('google', chunk.providerMetadata?.google?.thoughtSignature)
}
// FIXME: 按toolcall id绑定
if (
this.reasoningCache &&
chunk.providerMetadata?.openrouter?.reasoning_details &&
Array.isArray(chunk.providerMetadata.openrouter.reasoning_details)
) {
this.reasoningCache.set('openrouter', chunk.providerMetadata.openrouter.reasoning_details)
}
this.handleToolCall({ this.handleToolCall({
type: 'tool-call', type: 'tool-call',
toolCallId: chunk.toolCallId, toolCallId: chunk.toolCallId,
@ -205,13 +215,6 @@ export class AiSdkToAnthropicSSE {
break break
case 'finish-step': case 'finish-step':
if (
this.reasoningCache &&
chunk.providerMetadata?.openrouter?.reasoning_details &&
Array.isArray(chunk.providerMetadata.openrouter.reasoning_details)
) {
this.reasoningCache.set('openrouter', chunk.providerMetadata.openrouter.reasoning_details)
}
if (chunk.finishReason === 'tool-calls') { if (chunk.finishReason === 'tool-calls') {
this.state.stopReason = 'tool_use' this.state.stopReason = 'tool_use'
} }
@ -552,6 +555,7 @@ export class AiSdkToAnthropicSSE {
} }
this.onEvent(messageStopEvent) this.onEvent(messageStopEvent)
reasoningCache.destroy()
} }
/** /**

View File

@ -1,11 +1,12 @@
import { loggerService } from '@logger' import { loggerService } from '@logger'
import type { JSONValue } from 'ai'
const logger = loggerService.withContext('Cache') const logger = loggerService.withContext('Cache')
/** /**
* Cache entry with TTL support * Cache entry with TTL support
*/ */
interface CacheEntry<T> { interface CacheEntry<T> {
details: T[] details: T
timestamp: number timestamp: number
} }
@ -28,24 +29,19 @@ export class ReasoningCache<T> {
/** /**
* Store reasoning details by signature * Store reasoning details by signature
*/ */
set(signature: string, details: T[]): void { set(signature: string, details: T): void {
if (!signature || !details.length) return if (!signature || !details) return
this.cache.set(signature, { this.cache.set(signature, {
details, details,
timestamp: Date.now() timestamp: Date.now()
}) })
logger.debug('Cached reasoning details', {
signature: signature.substring(0, 20) + '...',
detailsCount: details.length
})
} }
/** /**
* Retrieve reasoning details by signature * Retrieve reasoning details by signature
*/ */
get(signature: string): T[] | undefined { get(signature: string): T | undefined {
const entry = this.cache.get(signature) const entry = this.cache.get(signature)
if (!entry) return undefined if (!entry) return undefined
@ -55,11 +51,6 @@ export class ReasoningCache<T> {
return undefined return undefined
} }
logger.debug('Retrieved reasoning details from cache', {
signature: signature.substring(0, 20) + '...',
detailsCount: entry.details.length
})
return entry.details return entry.details
} }
@ -113,4 +104,4 @@ export class ReasoningCache<T> {
} }
// Singleton cache instance // Singleton cache instance
export const reasoningCache = new ReasoningCache() export const reasoningCache = new ReasoningCache<JSONValue>()

View File

@ -301,11 +301,24 @@ function convertAnthropicToAiMessages(params: MessageCreateParams): ModelMessage
imageParts.push({ type: 'image', image: source.url }) imageParts.push({ type: 'image', image: source.url })
} }
} else if (block.type === 'tool_use') { } else if (block.type === 'tool_use') {
const options: ProviderOptions = {}
if (isGemini3ModelId(params.model)) {
if (reasoningCache.get('google')) {
options.google = {
thoughtSignature: MAGIC_STRING
}
} else if (reasoningCache.get('openrouter')) {
options.openrouter = {
reasoning_details: (reasoningCache.get('openrouter') as JSONValue[]) || []
}
}
}
toolCallParts.push({ toolCallParts.push({
type: 'tool-call', type: 'tool-call',
toolName: block.name, toolName: block.name,
toolCallId: block.id, toolCallId: block.id,
input: block.input input: block.input,
providerOptions: options
}) })
} else if (block.type === 'tool_result') { } else if (block.type === 'tool_result') {
// Look up toolName from the pre-built map (covers cross-message references) // Look up toolName from the pre-built map (covers cross-message references)