diff --git a/package.json b/package.json index badb8c1cc6..b8efda1d7c 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "CherryStudio", - "version": "0.7.4", + "version": "0.7.5", "private": true, "description": "A powerful AI assistant for producer.", "main": "./out/main/index.js", diff --git a/src/renderer/src/providers/OpenAIProvider.ts b/src/renderer/src/providers/OpenAIProvider.ts index 66538ed549..c1cb3aeb2a 100644 --- a/src/renderer/src/providers/OpenAIProvider.ts +++ b/src/renderer/src/providers/OpenAIProvider.ts @@ -124,16 +124,25 @@ export default class OpenAIProvider extends BaseProvider { userMessages.push(await this.getMessageParam(message, model)) } + const isSupportStreamOutput = this.isSupportStreamOutput(model.id) + // @ts-ignore key is not typed const stream = await this.sdk.chat.completions.create({ model: model.id, messages: [systemMessage, ...userMessages].filter(Boolean) as ChatCompletionMessageParam[], - stream: this.isSupportStreamOutput(model.id), + stream: isSupportStreamOutput, temperature: assistant?.settings?.temperature, max_tokens: maxTokens, keep_alive: this.keepAliveTime }) + if (!isSupportStreamOutput) { + return onChunk({ + text: stream.choices[0].message?.content || '', + usage: stream.usage + }) + } + for await (const chunk of stream) { if (window.keyv.get(EVENT_NAMES.CHAT_COMPLETION_PAUSED)) { break