Hotfix/gemini openrouter (#6080)

feat: 添加对openai兼容情况下Gemini模型的思考配置支持
This commit is contained in:
SuYao 2025-05-17 00:45:26 +08:00 committed by GitHub
parent 04cbeab1d9
commit 119125038d

View File

@ -10,6 +10,7 @@ import {
isSupportedReasoningEffortModel,
isSupportedReasoningEffortOpenAIModel,
isSupportedThinkingTokenClaudeModel,
isSupportedThinkingTokenGeminiModel,
isSupportedThinkingTokenModel,
isSupportedThinkingTokenQwenModel,
isVisionModel,
@ -258,6 +259,19 @@ export default class OpenAIProvider extends BaseOpenAIProvider {
return { thinking: { type: 'disabled' } }
}
if (isSupportedThinkingTokenGeminiModel(model)) {
// openrouter没有提供一个不推理的选项先隐藏
if (this.provider.id === 'openrouter') {
return { reasoning: { maxTokens: 0, exclude: true } }
}
return {
thinkingConfig: {
includeThoughts: false,
thinkingBudget: 0
}
}
}
return {}
}
const effortRatio = EFFORT_RATIO[reasoningEffort]
@ -313,6 +327,16 @@ export default class OpenAIProvider extends BaseOpenAIProvider {
}
}
// Gemini models
if (isSupportedThinkingTokenGeminiModel(model)) {
return {
thinkingConfig: {
thinkingBudget: budgetTokens,
includeThoughts: true
}
}
}
// Default case: no special thinking settings
return {}
}