feat: Update message handling to include optional uiMessages parameter

- Modify BaseParams type to make uiMessages optional.
- Refactor message preparation in HomeWindow and ActionUtils to handle modelMessages and uiMessages separately.
- Ensure compatibility with updated message structures in fetchChatCompletion calls.
This commit is contained in:
MyPrototypeWhat 2025-09-04 13:05:56 +08:00
parent 10301a1f79
commit f8a22b5e55
3 changed files with 10 additions and 5 deletions

View File

@ -1307,7 +1307,7 @@ type BaseParams = {
options?: FetchChatCompletionOptions
onChunkReceived: (chunk: Chunk) => void
topicId?: string // 添加 topicId 参数
uiMessages: Message[]
uiMessages?: Message[]
}
type MessagesParams = BaseParams & {

View File

@ -266,13 +266,17 @@ const HomeWindow: FC<{ draggable?: boolean }> = ({ draggable = true }) => {
newAssistant.webSearchProviderId = undefined
newAssistant.mcpServers = undefined
newAssistant.knowledge_bases = undefined
const llmMessages = await ConversationService.prepareMessagesForModel(messagesForContext, newAssistant)
const { modelMessages, uiMessages } = await ConversationService.prepareMessagesForModel(
messagesForContext,
newAssistant
)
await fetchChatCompletion({
messages: llmMessages,
messages: modelMessages,
assistant: newAssistant,
options: {},
topicId,
uiMessages: uiMessages,
onChunkReceived: (chunk: Chunk) => {
switch (chunk.type) {
case ChunkType.THINKING_START:

View File

@ -64,12 +64,13 @@ export const processMessages = async (
newAssistant.webSearchProviderId = undefined
newAssistant.mcpServers = undefined
newAssistant.knowledge_bases = undefined
const llmMessages = await ConversationService.prepareMessagesForModel([userMessage], newAssistant)
const { modelMessages, uiMessages } = await ConversationService.prepareMessagesForModel([userMessage], newAssistant)
await fetchChatCompletion({
messages: llmMessages,
messages: modelMessages,
assistant: newAssistant,
options: {},
uiMessages: uiMessages,
onChunkReceived: (chunk: Chunk) => {
if (finished) {
return