From 5d0841899b1332a1451d9ff26a65f88711ccc269 Mon Sep 17 00:00:00 2001 From: kangfenmao Date: Tue, 18 Nov 2025 13:52:57 +0800 Subject: [PATCH] fix(conversation): set context length to 1 for gemini-2.5-flash-image model Set context count to 1 for gemini-2.5-flash-image (Nano Banana) model to fix continuous conversation failures. This ensures the model only receives the most recent message, preventing context-related errors. Changes: - Add isGeminiGenerateImageModel utility to identify Nano Banana models - Override contextCount to 1 when using gemini-2.5-flash-image - Remove the previous +2 offset from contextCount calculation --- src/renderer/src/aiCore/utils/image.ts | 4 ++++ src/renderer/src/services/ConversationService.ts | 12 ++++++++++-- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/src/renderer/src/aiCore/utils/image.ts b/src/renderer/src/aiCore/utils/image.ts index 37dbe76a2c..a2a755e87f 100644 --- a/src/renderer/src/aiCore/utils/image.ts +++ b/src/renderer/src/aiCore/utils/image.ts @@ -14,3 +14,7 @@ export function isOpenRouterGeminiGenerateImageModel(model: Model, provider: Pro provider.id === SystemProviderIds.openrouter ) } + +export function isGeminiGenerateImageModel(model: Model): boolean { + return model.id.includes('gemini-2.5-flash-image') +} diff --git a/src/renderer/src/services/ConversationService.ts b/src/renderer/src/services/ConversationService.ts index f9e3f4dea5..576dd99c3a 100644 --- a/src/renderer/src/services/ConversationService.ts +++ b/src/renderer/src/services/ConversationService.ts @@ -1,4 +1,5 @@ import { convertMessagesToSdkMessages } from '@renderer/aiCore/prepareParams' +import { isGeminiGenerateImageModel } from '@renderer/aiCore/utils/image' import type { Assistant, Message } from '@renderer/types' import { filterAdjacentUserMessaegs, filterLastAssistantMessage } from '@renderer/utils/messageUtils/filters' import type { ModelMessage } from 'ai' @@ -17,7 +18,14 @@ export class ConversationService { messages: Message[], assistant: Assistant ): Promise<{ modelMessages: ModelMessage[]; uiMessages: Message[] }> { - const { contextCount } = getAssistantSettings(assistant) + let { contextCount } = getAssistantSettings(assistant) + + contextCount = contextCount + 2 + + if (assistant.model && isGeminiGenerateImageModel(assistant.model)) { + contextCount = 1 + } + // This logic is extracted from the original ApiService.fetchChatCompletion // const contextMessages = filterContextMessages(messages) const lastUserMessage = findLast(messages, (m) => m.role === 'user') @@ -37,7 +45,7 @@ export class ConversationService { const filteredMessages4 = filterAdjacentUserMessaegs(filteredMessages3) let uiMessages = filterUserRoleStartMessages( - filterEmptyMessages(filterAfterContextClearMessages(takeRight(filteredMessages4, contextCount + 2))) // 取原来几个provider的最大值 + filterEmptyMessages(filterAfterContextClearMessages(takeRight(filteredMessages4, contextCount))) // 取原来几个provider的最大值 ) // Fallback: ensure at least the last user message is present to avoid empty payloads