fix: support DeepSeek v3.1 for ppio & openrouter free (#9697)

* fix: 修复deepseek-chat-v3.1模型判断逻辑

使用includes替代严格相等判断,以兼容更多可能的provider模型id格式

* feat: 添加对PPIO提供商的支持并优化DeepSeek思考令牌逻辑

为DeepSeek V3.1添加PPIO提供商支持,同时统一硅和PPIO提供商的思考令牌配置
将未知提供商的默认行为改为启用思考令牌,并更新警告日志信息

* fix(openrouter): 处理总是思考模型的特殊情况

当模型为总是思考类型且不支持思考标记时,返回空对象以避免隐藏思考内容
This commit is contained in:
Phantom 2025-08-31 00:48:20 +08:00 committed by GitHub
parent dfb3322b28
commit 0b7543a59b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 15 additions and 7 deletions

View File

@ -137,6 +137,7 @@ export class OpenAIAPIClient extends OpenAIBaseClient<
// }
// openrouter: use reasoning
// openrouter 如果关闭思考,会隐藏思考内容,所以对于总是思考的模型需要特别处理
if (model.provider === SystemProviderIds.openrouter) {
// Don't disable reasoning for Gemini models that support thinking tokens
if (isSupportedThinkingTokenGeminiModel(model) && !GEMINI_FLASH_MODEL_REGEX.test(model.id)) {
@ -146,6 +147,9 @@ export class OpenAIAPIClient extends OpenAIBaseClient<
if (isGrokReasoningModel(model) || isOpenAIReasoningModel(model)) {
return {}
}
if (isReasoningModel(model) && !isSupportedThinkingTokenModel(model)) {
return {}
}
return { reasoning: { enabled: false, exclude: true } }
}
@ -203,10 +207,6 @@ export class OpenAIAPIClient extends OpenAIBaseClient<
enable_thinking: true,
incremental_output: true
}
case SystemProviderIds.silicon:
return {
enable_thinking: true
}
case SystemProviderIds.doubao:
return {
thinking: {
@ -225,10 +225,18 @@ export class OpenAIAPIClient extends OpenAIBaseClient<
thinking: true
}
}
case SystemProviderIds.silicon:
case SystemProviderIds.ppio:
return {
enable_thinking: true
}
default:
logger.warn(
`Skipping thinking options for provider ${this.provider.name} as DeepSeek v3.1 thinking control method is unknown`
`Use enable_thinking option as fallback for provider ${this.provider.name} since DeepSeek v3.1 thinking control method is unknown`
)
return {
enable_thinking: true
}
}
}
}

View File

@ -2584,7 +2584,7 @@ export function isSupportedThinkingTokenModel(model?: Model): boolean {
// Specifically for DeepSeek V3.1. White list for now
if (isDeepSeekHybridInferenceModel(model)) {
return (['openrouter', 'dashscope', 'doubao', 'silicon', 'nvidia'] satisfies SystemProviderId[]).some(
return (['openrouter', 'dashscope', 'doubao', 'silicon', 'nvidia', 'ppio'] satisfies SystemProviderId[]).some(
(id) => id === model.provider
)
}
@ -2813,7 +2813,7 @@ export const isDeepSeekHybridInferenceModel = (model: Model) => {
const modelId = getLowerBaseModelName(model.id)
// deepseek官方使用chat和reasoner做推理控制其他provider需要单独判断id可能会有所差别
// openrouter: deepseek/deepseek-chat-v3.1 不知道会不会有其他provider仿照ds官方分出一个同id的作为非思考模式的模型这里有风险
return /deepseek-v3(?:\.1|-1-\d+)?/.test(modelId) || modelId === 'deepseek-chat-v3.1'
return /deepseek-v3(?:\.1|-1-\d+)?/.test(modelId) || modelId.includes('deepseek-chat-v3.1')
}
export const isSupportedThinkingTokenDeepSeekModel = isDeepSeekHybridInferenceModel