diff --git a/src/renderer/src/providers/OpenAIProvider.ts b/src/renderer/src/providers/OpenAIProvider.ts index dbe39c4dce..b78492815f 100644 --- a/src/renderer/src/providers/OpenAIProvider.ts +++ b/src/renderer/src/providers/OpenAIProvider.ts @@ -166,7 +166,7 @@ export default class OpenAIProvider extends BaseProvider { } } - if (this.isOpenAIo1(model)) { + if (this.isOpenAIReasoning(model)) { return { max_tokens: undefined, max_completion_tokens: maxTokens @@ -232,8 +232,8 @@ export default class OpenAIProvider extends BaseProvider { return {} } - private isOpenAIo1(model: Model) { - return model.id.startsWith('o1') + private isOpenAIReasoning(model: Model) { + return model.id.startsWith('o1') || model.id.startsWith('o3') } async completions({ messages, assistant, onChunk, onFilterMessages, mcpTools }: CompletionsParams): Promise { @@ -262,10 +262,10 @@ export default class OpenAIProvider extends BaseProvider { userMessages.push(await this.getMessageParam(message, model)) } - const isOpenAIo1 = this.isOpenAIo1(model) + const isOpenAIReasoning = this.isOpenAIReasoning(model) const isSupportStreamOutput = () => { - if (isOpenAIo1) { + if (isOpenAIReasoning) { return false } return streamOutput @@ -482,13 +482,13 @@ export default class OpenAIProvider extends BaseProvider { ] : [{ role: 'user', content: assistant.prompt }] - const isOpenAIo1 = this.isOpenAIo1(model) + const isOpenAIReasoning = this.isOpenAIReasoning(model) const isSupportedStreamOutput = () => { if (!onResponse) { return false } - if (isOpenAIo1) { + if (isOpenAIReasoning) { return false } return true