diff --git a/src/renderer/src/services/MemoryProcessor.ts b/src/renderer/src/services/MemoryProcessor.ts index ef7833494b..2c8ab8348b 100644 --- a/src/renderer/src/services/MemoryProcessor.ts +++ b/src/renderer/src/services/MemoryProcessor.ts @@ -238,12 +238,10 @@ export class MemoryProcessor { limit }) - logger.debug( - `Searching memories with query: ${query} for user: ${userId} and assistant: ${assistantId} result: ${result}` - ) + logger.debug('Searching memories successful', { query, userId, assistantId, result }) return result.results } catch (error) { - logger.error('Error searching memories:', error as Error) + logger.error('Searching memories error:', { error }) return [] } } diff --git a/src/renderer/src/services/MemoryService.ts b/src/renderer/src/services/MemoryService.ts index cf6cbbc41a..b584302ba4 100644 --- a/src/renderer/src/services/MemoryService.ts +++ b/src/renderer/src/services/MemoryService.ts @@ -12,6 +12,8 @@ import { MemorySearchResult } from '@types' +import { getAssistantById } from './AssistantService' + const logger = loggerService.withContext('MemoryService') // Main process SearchResult type (matches what the IPC actually returns) @@ -186,6 +188,16 @@ class MemoryService { userId: this.currentUserId } + // If agentId is provided, resolve userId from assistant's memoryUserId + if (optionsWithUser.agentId) { + const assistant = getAssistantById(optionsWithUser.agentId) + if (assistant) { + optionsWithUser.userId = assistant.memoryUserId || this.currentUserId + } + } + + logger.debug('Searching memories start with options', { query: query, options: optionsWithUser }) + try { const result: SearchResult = await window.api.memory.search(query, optionsWithUser) diff --git a/src/renderer/src/services/StreamProcessingService.ts b/src/renderer/src/services/StreamProcessingService.ts index 4861e5b11a..6075ddde69 100644 --- a/src/renderer/src/services/StreamProcessingService.ts +++ b/src/renderer/src/services/StreamProcessingService.ts @@ -51,7 +51,7 @@ export function createStreamProcessor(callbacks: StreamProcessorCallbacks = {}) return (chunk: Chunk) => { try { const data = chunk - logger.debug('data: ', data) + logger.silly('data: ', data) switch (data.type) { case ChunkType.BLOCK_COMPLETE: { if (callbacks.onComplete) callbacks.onComplete(AssistantMessageStatus.SUCCESS, data?.response)