diff --git a/src/renderer/src/aiCore/clients/openai/OpenAIApiClient.ts b/src/renderer/src/aiCore/clients/openai/OpenAIApiClient.ts index e39d3834bd..1faff88983 100644 --- a/src/renderer/src/aiCore/clients/openai/OpenAIApiClient.ts +++ b/src/renderer/src/aiCore/clients/openai/OpenAIApiClient.ts @@ -25,6 +25,7 @@ import { import { isSupportArrayContentProvider, isSupportDeveloperRoleProvider, + isSupportQwen3EnableThinkingProvider, isSupportStreamOptionsProvider } from '@renderer/config/providers' import { processPostsuffixQwen3Model, processReqMessages } from '@renderer/services/ModelMessageService' @@ -526,7 +527,11 @@ export class OpenAIAPIClient extends OpenAIBaseClient< } const lastUserMsg = userMessages.findLast((m) => m.role === 'user') - if (lastUserMsg && isSupportedThinkingTokenQwenModel(model) && model.provider !== 'dashscope') { + if ( + lastUserMsg && + isSupportedThinkingTokenQwenModel(model) && + !isSupportQwen3EnableThinkingProvider(this.provider) + ) { const postsuffix = '/no_think' const qwenThinkModeEnabled = assistant.settings?.qwenThinkMode === true const currentContent = lastUserMsg.content diff --git a/src/renderer/src/config/providers.ts b/src/renderer/src/config/providers.ts index 806a7ff303..536a7d5524 100644 --- a/src/renderer/src/config/providers.ts +++ b/src/renderer/src/config/providers.ts @@ -722,22 +722,40 @@ export const PROVIDER_CONFIG = { const NOT_SUPPORT_ARRAY_CONTENT_PROVIDERS = ['deepseek', 'baichuan', 'minimax', 'xirang'] +/** + * 判断提供商是否支持 message 的 content 为数组类型。 Only for OpenAI Chat Completions API. + */ export const isSupportArrayContentProvider = (provider: Provider) => { return provider.isNotSupportArrayContent !== true || !NOT_SUPPORT_ARRAY_CONTENT_PROVIDERS.includes(provider.id) } const NOT_SUPPORT_DEVELOPER_ROLE_PROVIDERS = ['poe'] +/** + * 判断提供商是否支持 developer 作为 message role。 Only for OpenAI API. + */ export const isSupportDeveloperRoleProvider = (provider: Provider) => { return provider.isNotSupportDeveloperRole !== true || !NOT_SUPPORT_DEVELOPER_ROLE_PROVIDERS.includes(provider.id) } const NOT_SUPPORT_STREAM_OPTIONS_PROVIDERS = ['mistral'] +/** + * 判断提供商是否支持 stream_options 参数。Only for OpenAI API. + */ export const isSupportStreamOptionsProvider = (provider: Provider) => { return provider.isNotSupportStreamOptions !== true || !NOT_SUPPORT_STREAM_OPTIONS_PROVIDERS.includes(provider.id) } +const SUPPORT_QWEN3_ENABLE_THINKING_PROVIDER = ['dashscope', 'modelscope'] + +/** + * 判断提供商是否支持使用enable_thinking参数来控制Qwen3系列模型的思考。 Only for OpenAI Chat Completions API. + */ +export const isSupportQwen3EnableThinkingProvider = (provider: Provider) => { + return SUPPORT_QWEN3_ENABLE_THINKING_PROVIDER.includes(provider.id) +} + /** * 判断是否为系统内置的提供商。比直接使用`provider.isSystem`更好,因为该数据字段不会随着版本更新而变化。 * @param provider - Provider对象,包含提供商的信息