fix: enable stream output in assistant settings for chat completion (#7240)

This commit is contained in:
SuYao 2025-06-16 12:51:09 +08:00 committed by GitHub
parent 34fcf73a95
commit 629f19be32
2 changed files with 2 additions and 2 deletions

View File

@ -187,7 +187,7 @@ const HomeWindow: FC = () => {
fetchChatCompletion({
messages: [userMessage],
assistant: { ...assistant, model: quickAssistantModel || getDefaultModel() },
assistant: { ...assistant, model: quickAssistantModel || getDefaultModel(), settings: { streamOutput: true } },
onChunkReceived: (chunk: Chunk) => {
if (chunk.type === ChunkType.TEXT_DELTA) {
blockContent += chunk.text

View File

@ -51,7 +51,7 @@ export const processMessages = async (
await fetchChatCompletion({
messages: [userMessage],
assistant,
assistant: { ...assistant, settings: { streamOutput: true } },
onChunkReceived: (chunk: Chunk) => {
switch (chunk.type) {
case ChunkType.THINKING_DELTA: