diff --git a/src/renderer/src/aiCore/prepareParams/header.ts b/src/renderer/src/aiCore/prepareParams/header.ts new file mode 100644 index 0000000000..8c53cbce53 --- /dev/null +++ b/src/renderer/src/aiCore/prepareParams/header.ts @@ -0,0 +1,13 @@ +import { isClaude45ReasoningModel } from '@renderer/config/models' +import type { Assistant, Model } from '@renderer/types' +import { isToolUseModeFunction } from '@renderer/utils/assistant' + +const INTERLEAVED_THINKING_HEADER = 'interleaved-thinking-2025-05-14' + +export function addAnthropicHeaders(assistant: Assistant, model: Model): string[] { + const anthropicHeaders: string[] = [] + if (isClaude45ReasoningModel(model) && isToolUseModeFunction(assistant)) { + anthropicHeaders.push(INTERLEAVED_THINKING_HEADER) + } + return anthropicHeaders +} diff --git a/src/renderer/src/aiCore/prepareParams/parameterBuilder.ts b/src/renderer/src/aiCore/prepareParams/parameterBuilder.ts index 397c481cf3..e865f9f15f 100644 --- a/src/renderer/src/aiCore/prepareParams/parameterBuilder.ts +++ b/src/renderer/src/aiCore/prepareParams/parameterBuilder.ts @@ -7,10 +7,12 @@ import { anthropic } from '@ai-sdk/anthropic' import { google } from '@ai-sdk/google' import { vertexAnthropic } from '@ai-sdk/google-vertex/anthropic/edge' import { vertex } from '@ai-sdk/google-vertex/edge' +import { combineHeaders } from '@ai-sdk/provider-utils' import type { WebSearchPluginConfig } from '@cherrystudio/ai-core/built-in/plugins' import { isBaseProvider } from '@cherrystudio/ai-core/core/providers/schemas' import { loggerService } from '@logger' import { + isAnthropicModel, isGenerateImageModel, isOpenRouterBuiltInWebSearchModel, isReasoningModel, @@ -19,6 +21,8 @@ import { isSupportedThinkingTokenModel, isWebSearchModel } from '@renderer/config/models' +import { isAwsBedrockProvider } from '@renderer/config/providers' +import { isVertexProvider } from '@renderer/hooks/useVertexAI' import { getAssistantSettings, getDefaultModel } from '@renderer/services/AssistantService' import store from '@renderer/store' import type { CherryWebSearchConfig } from '@renderer/store/websearch' @@ -34,6 +38,7 @@ import { setupToolsConfig } from '../utils/mcp' import { buildProviderOptions } from '../utils/options' import { getAnthropicThinkingBudget } from '../utils/reasoning' import { buildProviderBuiltinWebSearchConfig } from '../utils/websearch' +import { addAnthropicHeaders } from './header' import { supportsTopP } from './modelCapabilities' import { getTemperature, getTopP } from './modelParameters' @@ -172,13 +177,21 @@ export async function buildStreamTextParams( } } + let headers: Record = options.requestOptions?.headers ?? {} + + // https://docs.claude.com/en/docs/build-with-claude/extended-thinking#interleaved-thinking + if (!isVertexProvider(provider) && !isAwsBedrockProvider(provider) && isAnthropicModel(model)) { + const newBetaHeaders = { 'anthropic-beta': addAnthropicHeaders(assistant, model).join(',') } + headers = combineHeaders(headers, newBetaHeaders) + } + // 构建基础参数 const params: StreamTextParams = { messages: sdkMessages, maxOutputTokens: maxTokens, temperature: getTemperature(assistant, model), abortSignal: options.requestOptions?.signal, - headers: options.requestOptions?.headers, + headers, providerOptions, stopWhen: stepCountIs(20), maxRetries: 0 diff --git a/src/renderer/src/config/models/websearch.ts b/src/renderer/src/config/models/websearch.ts index 418c81133d..f012be7cfa 100644 --- a/src/renderer/src/config/models/websearch.ts +++ b/src/renderer/src/config/models/websearch.ts @@ -70,7 +70,7 @@ export function isWebSearchModel(model: Model): boolean { // bedrock和vertex不支持 if ( isAnthropicModel(model) && - (provider.id === SystemProviderIds['aws-bedrock'] || provider.id === SystemProviderIds.vertexai) + !(provider.id === SystemProviderIds['aws-bedrock'] || provider.id === SystemProviderIds.vertexai) ) { return CLAUDE_SUPPORTED_WEBSEARCH_REGEX.test(modelId) } diff --git a/src/renderer/src/config/providers.ts b/src/renderer/src/config/providers.ts index 0f2b6cfadd..1e25a550f1 100644 --- a/src/renderer/src/config/providers.ts +++ b/src/renderer/src/config/providers.ts @@ -1573,6 +1573,10 @@ export function isAIGatewayProvider(provider: Provider): boolean { return provider.type === 'ai-gateway' } +export function isAwsBedrockProvider(provider: Provider): boolean { + return provider.type === 'aws-bedrock' +} + const NOT_SUPPORT_API_VERSION_PROVIDERS = ['github', 'copilot', 'perplexity'] as const satisfies SystemProviderId[] export const isSupportAPIVersionProvider = (provider: Provider) => { diff --git a/src/renderer/src/pages/home/Inputbar/tools/urlContextTool.tsx b/src/renderer/src/pages/home/Inputbar/tools/urlContextTool.tsx index 037d43e19f..bb38e67b0e 100644 --- a/src/renderer/src/pages/home/Inputbar/tools/urlContextTool.tsx +++ b/src/renderer/src/pages/home/Inputbar/tools/urlContextTool.tsx @@ -1,4 +1,4 @@ -import { isGeminiModel } from '@renderer/config/models' +import { isAnthropicModel, isGeminiModel } from '@renderer/config/models' import { isSupportUrlContextProvider } from '@renderer/config/providers' import { defineTool, registerTool, TopicType } from '@renderer/pages/home/Inputbar/types' import { getProviderByModel } from '@renderer/services/AssistantService' @@ -10,9 +10,8 @@ const urlContextTool = defineTool({ label: (t) => t('chat.input.url_context'), visibleInScopes: [TopicType.Chat], condition: ({ model }) => { - if (!isGeminiModel(model)) return false const provider = getProviderByModel(model) - return !!provider && isSupportUrlContextProvider(provider) + return !!provider && isSupportUrlContextProvider(provider) && (isGeminiModel(model) || isAnthropicModel(model)) }, render: ({ assistant }) => })