refactor: replace console.log with Logger for improved logging consistency across the application

This commit is contained in:
kangfenmao 2025-05-11 19:01:12 +08:00
parent a64c8ded73
commit 3a0cd738ce
47 changed files with 143 additions and 119 deletions

View File

@ -1,5 +1,6 @@
import { BaseEmbeddings } from '@cherrystudio/embedjs-interfaces'
import { VoyageEmbeddings as _VoyageEmbeddings } from '@langchain/community/embeddings/voyage'
import Logger from 'electron-log'
export default class VoyageEmbeddings extends BaseEmbeddings {
private model: _VoyageEmbeddings
@ -11,7 +12,7 @@ export default class VoyageEmbeddings extends BaseEmbeddings {
if (!this.configuration.outputDimension) {
throw new Error('You need to pass in the optional dimensions parameter for this model')
}
console.log('VoyageEmbeddings', this.configuration)
Logger.log('VoyageEmbeddings', this.configuration)
this.model = new _VoyageEmbeddings(this.configuration)
}
override async getDimensions(): Promise<number> {

View File

@ -19,6 +19,8 @@ import { TrayService } from './services/TrayService'
import { windowService } from './services/WindowService'
import { setUserDataDir } from './utils/file'
Logger.initialize()
// Check for single instance lock
if (!app.requestSingleInstanceLock()) {
app.quit()

View File

@ -2,6 +2,7 @@ import { getConfigDir } from '@main/utils/file'
import { Server } from '@modelcontextprotocol/sdk/server/index.js'
import { CallToolRequestSchema, ErrorCode, ListToolsRequestSchema, McpError } from '@modelcontextprotocol/sdk/types.js'
import { Mutex } from 'async-mutex' // 引入 Mutex
import Logger from 'electron-log'
import { promises as fs } from 'fs'
import path from 'path'
@ -355,9 +356,9 @@ class MemoryServer {
private async _initializeManager(memoryPath: string): Promise<void> {
try {
this.knowledgeGraphManager = await KnowledgeGraphManager.create(memoryPath)
console.log('KnowledgeGraphManager initialized successfully.')
Logger.log('KnowledgeGraphManager initialized successfully.')
} catch (error) {
console.error('Failed to initialize KnowledgeGraphManager:', error)
Logger.error('Failed to initialize KnowledgeGraphManager:', error)
// Server might be unusable, consider how to handle this state
// Maybe set a flag and return errors for all tool calls?
this.knowledgeGraphManager = null // Ensure it's null if init fails

View File

@ -1,5 +1,6 @@
import { AxiosRequestConfig } from 'axios'
import { app, safeStorage } from 'electron'
import Logger from 'electron-log'
import fs from 'fs/promises'
import path from 'path'
@ -227,10 +228,10 @@ class CopilotService {
try {
await fs.access(this.tokenFilePath)
await fs.unlink(this.tokenFilePath)
console.log('Successfully logged out from Copilot')
Logger.log('Successfully logged out from Copilot')
} catch (error) {
// 文件不存在不是错误,只是记录一下
console.log('Token file not found, nothing to delete')
Logger.log('Token file not found, nothing to delete')
}
} catch (error) {
console.error('Failed to logout:', error)

View File

@ -459,7 +459,7 @@ class KnowledgeService {
{ uniqueId, uniqueIds, base }: { uniqueId: string; uniqueIds: string[]; base: KnowledgeBaseParams }
): Promise<void> => {
const ragApplication = await this.getRagApplication(base)
console.log(`[ KnowledgeService Remove Item UniqueId: ${uniqueId}]`)
Logger.log(`[ KnowledgeService Remove Item UniqueId: ${uniqueId}]`)
for (const id of uniqueIds) {
await ragApplication.deleteLoader(id)
}

View File

@ -593,7 +593,7 @@ class McpService {
const pathSeparator = process.platform === 'win32' ? ';' : ':'
const cherryBinPath = path.join(os.homedir(), '.cherrystudio', 'bin')
loginEnv.PATH = `${loginEnv.PATH}${pathSeparator}${cherryBinPath}`
Logger.info('[MCP] Successfully fetched login shell environment variables:', loginEnv)
Logger.info('[MCP] Successfully fetched login shell environment variables:')
return loginEnv
} catch (error) {
Logger.error('[MCP] Failed to fetch login shell environment variables:', error)

View File

@ -1,4 +1,5 @@
import { app } from 'electron'
import Logger from 'electron-log'
import fs from 'fs'
import path from 'path'
@ -155,7 +156,7 @@ class ObsidianVaultService {
return []
}
console.log('获取Vault文件结构:', vault.name, vault.path)
Logger.log('获取Vault文件结构:', vault.name, vault.path)
return this.getVaultStructure(vault.path)
} catch (error) {
console.error('获取Vault文件结构时发生错误:', error)

View File

@ -191,7 +191,7 @@ export const reduxService = new ReduxService()
try {
// 读取状态
const settings = await reduxService.select('state.settings')
console.log('settings', settings)
Logger.log('settings', settings)
// 派发 action
await reduxService.dispatch({
@ -201,7 +201,7 @@ export const reduxService = new ReduxService()
// 订阅状态变化
const unsubscribe = await reduxService.subscribe('state.settings.apiKey', (newValue) => {
console.log('API key changed:', newValue)
Logger.log('API key changed:', newValue)
})
// 批量执行 actions
@ -212,16 +212,16 @@ export const reduxService = new ReduxService()
// 同步方法虽然可能不是最新的数据,但响应更快
const apiKey = reduxService.selectSync('state.settings.apiKey')
console.log('apiKey', apiKey)
Logger.log('apiKey', apiKey)
// 处理保证是最新的数据
const apiKey1 = await reduxService.select('state.settings.apiKey')
console.log('apiKey1', apiKey1)
Logger.log('apiKey1', apiKey1)
// 取消订阅
unsubscribe()
} catch (error) {
console.error('Error:', error)
Logger.error('Error:', error)
}
}
*/

View File

@ -1,3 +1,4 @@
import Logger from '@renderer/config/logger'
import { useEffect, useState } from 'react'
import styled from 'styled-components'
@ -120,7 +121,7 @@ const FallbackFavicon: React.FC<FallbackFaviconProps> = ({ hostname, alt }) => {
setFaviconState({ status: 'loaded', src: url })
})
.catch((error) => {
console.log('All favicon requests failed:', error)
Logger.log('All favicon requests failed:', error)
setFaviconState({ status: 'loaded', src: faviconUrls[0] })
})

View File

@ -35,7 +35,6 @@ const TranslateButton: FC<Props> = ({ text, onTranslated, disabled, style, isLoa
}
const handleTranslate = async () => {
console.log('handleTranslate', text)
if (!text?.trim()) return
if (!(await translateConfirm())) {

View File

@ -0,0 +1,6 @@
import Logger from 'electron-log/renderer'
// 设置渲染进程的日志级别
Logger.transports.console.level = 'info'
export default Logger

View File

@ -469,8 +469,4 @@ function updateDefaultMinApps(param) {
DEFAULT_MIN_APPS = param
}
if (process.env.NODE_ENV === 'development') {
console.log('DEFAULT_MIN_APPS', DEFAULT_MIN_APPS)
}
export { DEFAULT_MIN_APPS, loadCustomMiniApp, ORIGIN_DEFAULT_MIN_APPS, updateDefaultMinApps }

View File

@ -1,3 +1,4 @@
import Logger from '@renderer/config/logger'
import type { LegacyMessage as OldMessage, Topic } from '@renderer/types'
import { FileTypes } from '@renderer/types' // Import FileTypes enum
import { WebSearchSource } from '@renderer/types'
@ -90,7 +91,7 @@ function mapOldStatusToNewMessageStatus(oldStatus: OldMessage['status']): NewMes
// --- UPDATED UPGRADE FUNCTION for Version 7 ---
export async function upgradeToV7(tx: Transaction): Promise<void> {
console.log('Starting DB migration to version 7: Normalizing messages and blocks...')
Logger.info('Starting DB migration to version 7: Normalizing messages and blocks...')
const oldTopicsTable = tx.table('topics')
const newBlocksTable = tx.table('message_blocks')
@ -303,8 +304,8 @@ export async function upgradeToV7(tx: Transaction): Promise<void> {
const updateOperations = Object.entries(topicUpdates).map(([id, data]) => ({ key: id, changes: data }))
if (updateOperations.length > 0) {
await oldTopicsTable.bulkUpdate(updateOperations)
console.log(`Updated message references for ${updateOperations.length} topics.`)
Logger.log(`Updated message references for ${updateOperations.length} topics.`)
}
console.log('DB migration to version 7 finished successfully.')
Logger.log('DB migration to version 7 finished successfully.')
}

View File

@ -1,4 +1,5 @@
import { createSelector } from '@reduxjs/toolkit'
import Logger from '@renderer/config/logger'
import { EVENT_NAMES, EventEmitter } from '@renderer/services/EventService'
import { estimateUserPromptUsage } from '@renderer/services/TokenService'
import store, { type RootState, useAppDispatch, useAppSelector } from '@renderer/store'
@ -301,7 +302,7 @@ export function useMessageOperations(topic: Topic) {
*/
const createTopicBranch = useCallback(
(sourceTopicId: string, branchPointIndex: number, newTopic: Topic) => {
console.log(`Cloning messages from topic ${sourceTopicId} to new topic ${newTopic.id}`)
Logger.log(`Cloning messages from topic ${sourceTopicId} to new topic ${newTopic.id}`)
return dispatch(cloneMessagesToNewTopicThunk(sourceTopicId, branchPointIndex, newTopic))
},
[dispatch]

View File

@ -84,9 +84,7 @@ const App: FC<Props> = ({ app, onClick, size = 60, isLast }) => {
}
const handleFileChange = async (info: any) => {
console.log(info)
const file = info.fileList[info.fileList.length - 1]?.originFileObj
console.log(file)
setFileList(info.fileList.slice(-1))
if (file) {

View File

@ -8,6 +8,7 @@ import {
import { Navbar, NavbarCenter } from '@renderer/components/app/Navbar'
import ListItem from '@renderer/components/ListItem'
import TextEditPopup from '@renderer/components/Popups/TextEditPopup'
import Logger from '@renderer/config/logger'
import db from '@renderer/databases'
import FileManager from '@renderer/services/FileManager'
import store from '@renderer/store'
@ -104,7 +105,7 @@ const FilesPage: FC = () => {
// This case should ideally not happen if relatedBlocks were found,
// but handle it just in case: only delete blocks.
await db.message_blocks.bulkDelete(blockIdsToDelete)
console.log(
Logger.log(
`Deleted ${blockIdsToDelete.length} blocks related to file ${fileId}. No associated messages found (unexpected).`
)
return
@ -151,9 +152,9 @@ const FilesPage: FC = () => {
await db.message_blocks.bulkDelete(blockIdsToDelete)
})
console.log(`Deleted ${blockIdsToDelete.length} blocks and updated relevant topic messages for file ${fileId}.`)
Logger.log(`Deleted ${blockIdsToDelete.length} blocks and updated relevant topic messages for file ${fileId}.`)
} catch (error) {
console.error(`Error updating topics or deleting blocks for file ${fileId}:`, error)
Logger.error(`Error updating topics or deleting blocks for file ${fileId}:`, error)
window.modal.error({ content: t('files.delete.db_error'), centered: true }) // 提示数据库操作失败
// Consider whether to attempt to restore the physical file (usually difficult)
}

View File

@ -1,6 +1,7 @@
import { HolderOutlined } from '@ant-design/icons'
import { QuickPanelListItem, QuickPanelView, useQuickPanel } from '@renderer/components/QuickPanel'
import TranslateButton from '@renderer/components/TranslateButton'
import Logger from '@renderer/config/logger'
import {
isGenerateImageModel,
isSupportedReasoningEffortModel,
@ -36,7 +37,6 @@ import { documentExts, imageExts, textExts } from '@shared/config/constant'
import { Button, Tooltip } from 'antd'
import TextArea, { TextAreaRef } from 'antd/es/input/TextArea'
import dayjs from 'dayjs'
import Logger from 'electron-log/renderer'
import { debounce, isEmpty } from 'lodash'
import {
AtSign,
@ -184,7 +184,7 @@ const Inputbar: FC<Props> = ({ assistant: _assistant, setActiveTopic, topic }) =
return
}
console.log('[DEBUG] Starting to send message')
Logger.log('[DEBUG] Starting to send message')
EventEmitter.emit(EVENT_NAMES.SEND_MESSAGE)
@ -193,7 +193,7 @@ const Inputbar: FC<Props> = ({ assistant: _assistant, setActiveTopic, topic }) =
const uploadedFiles = await FileManager.uploadFiles(files)
const baseUserMessage: MessageInputBaseParams = { assistant, topic, content: text }
console.log('baseUserMessage', baseUserMessage)
Logger.log('baseUserMessage', baseUserMessage)
// getUserMessage()
if (uploadedFiles) {
@ -220,10 +220,10 @@ const Inputbar: FC<Props> = ({ assistant: _assistant, setActiveTopic, topic }) =
const { message, blocks } = getUserMessage(baseUserMessage)
currentMessageId.current = message.id
console.log('[DEBUG] Created message and blocks:', message, blocks)
console.log('[DEBUG] Dispatching _sendMessage')
Logger.log('[DEBUG] Created message and blocks:', message, blocks)
Logger.log('[DEBUG] Dispatching _sendMessage')
dispatch(_sendMessage(message, blocks, assistant, topic.id))
console.log('[DEBUG] _sendMessage dispatched')
Logger.log('[DEBUG] _sendMessage dispatched')
// Clear input
setText('')
@ -459,7 +459,7 @@ const Inputbar: FC<Props> = ({ assistant: _assistant, setActiveTopic, topic }) =
}, 200)
if (spaceClickCount === 2) {
console.log('Triple space detected - trigger translation')
Logger.log('Triple space detected - trigger translation')
setSpaceClickCount(0)
setIsTranslating(true)
translate()

View File

@ -12,7 +12,6 @@ interface Props {
}
const MessageTools: FC<Props> = ({ blocks }) => {
console.log('blocks', blocks)
const [activeKeys, setActiveKeys] = useState<string[]>([])
const [copiedMap, setCopiedMap] = useState<Record<string, boolean>>({})
const [expandedResponse, setExpandedResponse] = useState<{ content: string; title: string } | null>(null)

View File

@ -5,6 +5,7 @@ import { HStack } from '@renderer/components/Layout'
import PromptPopup from '@renderer/components/Popups/PromptPopup'
import TextEditPopup from '@renderer/components/Popups/TextEditPopup'
import Scrollbar from '@renderer/components/Scrollbar'
import Logger from '@renderer/config/logger'
import { useKnowledge } from '@renderer/hooks/useKnowledge'
import FileManager from '@renderer/services/FileManager'
import { getProviderName } from '@renderer/services/ProviderService'
@ -194,7 +195,7 @@ const KnowledgeContent: FC<KnowledgeContentProps> = ({ selectedBase }) => {
}
const path = await window.api.file.selectFolder()
console.log('[KnowledgeContent] Selected directory:', path)
Logger.log('[KnowledgeContent] Selected directory:', path)
path && addDirectory(path)
}

View File

@ -57,7 +57,6 @@ const PopupContainer: React.FC<Props> = ({ title, resolve }) => {
const nameInputRef = useRef<any>(null)
const embeddingSelectOptions = useMemo(() => {
console.log(providers)
return providers
.filter((p) => p.models.length > 0)
.map((p) => ({

View File

@ -1,4 +1,5 @@
import { CheckOutlined, InfoCircleOutlined, LoadingOutlined } from '@ant-design/icons'
import Logger from '@renderer/config/logger'
import { useTheme } from '@renderer/context/ThemeProvider'
import { useBlacklist } from '@renderer/hooks/useWebSearchProviders'
import { useAppDispatch, useAppSelector } from '@renderer/store'
@ -55,7 +56,7 @@ const BlacklistSettings: FC = () => {
name: source.name
}))
)
console.log('subscribeSources', websearch.subscribeSources)
Logger.log('subscribeSources', websearch.subscribeSources)
}, [websearch.subscribeSources])
useEffect(() => {
@ -89,7 +90,7 @@ const BlacklistSettings: FC = () => {
})
}
const onSelectChange = (newSelectedRowKeys: React.Key[]) => {
console.log('selectedRowKeys changed: ', newSelectedRowKeys)
Logger.log('selectedRowKeys changed: ', newSelectedRowKeys)
setSelectedRowKeys(newSelectedRowKeys)
}

View File

@ -1,3 +1,4 @@
import Logger from '@renderer/config/logger'
import { isFunctionCallingModel } from '@renderer/config/models'
import { REFERENCE_PROMPT } from '@renderer/config/prompts'
import { getLMStudioKeepAliveTime } from '@renderer/hooks/useLMStudio'
@ -129,7 +130,7 @@ export default abstract class BaseProvider {
const allReferences = [...webSearchReferences, ...reindexedKnowledgeReferences]
console.log(`Found ${allReferences.length} references for ID: ${message.id}`, allReferences)
Logger.log(`Found ${allReferences.length} references for ID: ${message.id}`, allReferences)
if (!isEmpty(allReferences)) {
const referenceContent = `\`\`\`json\n${JSON.stringify(allReferences, null, 2)}\n\`\`\``
@ -172,10 +173,10 @@ export default abstract class BaseProvider {
const knowledgeReferences: KnowledgeReference[] = window.keyv.get(`knowledge-search-${message.id}`)
if (!isEmpty(knowledgeReferences)) {
// console.log(`Found ${knowledgeReferences.length} knowledge base references in cache for ID: ${message.id}`)
// Logger.log(`Found ${knowledgeReferences.length} knowledge base references in cache for ID: ${message.id}`)
return knowledgeReferences
}
// console.log(`No knowledge base references found in cache for ID: ${message.id}`)
// Logger.log(`No knowledge base references found in cache for ID: ${message.id}`)
return []
}

View File

@ -1,3 +1,5 @@
import Logger from '@renderer/config/logger'
import LocalSearchProvider, { SearchItem } from './LocalSearchProvider'
export default class LocalBaiduProvider extends LocalSearchProvider {
@ -22,7 +24,7 @@ export default class LocalBaiduProvider extends LocalSearchProvider {
} catch (error) {
console.error('Failed to parse Baidu search HTML:', error)
}
console.log('Parsed Baidu search results:', results)
Logger.log('Parsed Baidu search results:', results)
return results
}
}

View File

@ -50,11 +50,11 @@ export default class LocalSearchProvider extends BaseWebSearchProvider {
const validItems = searchItems
.filter((item) => item.url.startsWith('http') || item.url.startsWith('https'))
.slice(0, websearch.maxResults)
// console.log('Valid search items:', validItems)
// Logger.log('Valid search items:', validItems)
// Fetch content for each URL concurrently
const fetchPromises = validItems.map(async (item) => {
// console.log(`Fetching content for ${item.url}...`)
// Logger.log(`Fetching content for ${item.url}...`)
const result = await fetchWebContent(item.url, 'markdown', this.provider.usingBrowser, httpOptions)
if (websearch.contentLimit && result.content.length > websearch.contentLimit) {
result.content = result.content.slice(0, websearch.contentLimit) + '...'

View File

@ -1,4 +1,5 @@
import { SearxngClient } from '@agentic/searxng'
import Logger from '@renderer/config/logger'
import { WebSearchState } from '@renderer/store/websearch'
import { WebSearchProvider, WebSearchProviderResponse } from '@renderer/types'
import { fetchWebContent, noContent } from '@renderer/utils/fetch'
@ -44,7 +45,7 @@ export default class SearxngProvider extends BaseWebSearchProvider {
}
private async initEngines(): Promise<void> {
try {
console.log(`Initializing SearxNG with API host: ${this.apiHost}`)
Logger.log(`Initializing SearxNG with API host: ${this.apiHost}`)
const auth = this.basicAuthUsername
? {
username: this.basicAuthUsername,
@ -66,7 +67,7 @@ export default class SearxngProvider extends BaseWebSearchProvider {
}
const allEngines = response.data.engines
console.log(`Found ${allEngines.length} total engines in SearxNG`)
Logger.log(`Found ${allEngines.length} total engines in SearxNG`)
this.engines = allEngines
.filter(
@ -83,11 +84,11 @@ export default class SearxngProvider extends BaseWebSearchProvider {
}
this.isInitialized = true
console.log(`SearxNG initialized successfully with ${this.engines.length} engines: ${this.engines.join(', ')}`)
Logger.log(`SearxNG initialized successfully with ${this.engines.length} engines: ${this.engines.join(', ')}`)
} catch (err) {
this.isInitialized = false
console.error('Failed to fetch SearxNG engine configuration:', err)
Logger.error('Failed to fetch SearxNG engine configuration:', err)
throw new Error(`Failed to initialize SearxNG: ${err}`)
}
}
@ -116,11 +117,11 @@ export default class SearxngProvider extends BaseWebSearchProvider {
const validItems = result.results
.filter((item) => item.url.startsWith('http') || item.url.startsWith('https'))
.slice(0, websearch.maxResults)
// console.log('Valid search items:', validItems)
// Logger.log('Valid search items:', validItems)
// Fetch content for each URL concurrently
const fetchPromises = validItems.map(async (item) => {
// console.log(`Fetching content for ${item.url}...`)
// Logger.log(`Fetching content for ${item.url}...`)
const result = await fetchWebContent(item.url, 'markdown', this.provider.usingBrowser)
if (websearch.contentLimit && result.content.length > websearch.contentLimit) {
result.content = result.content.slice(0, websearch.contentLimit) + '...'
@ -136,7 +137,7 @@ export default class SearxngProvider extends BaseWebSearchProvider {
results: results.filter((result) => result.content != noContent)
}
} catch (error) {
console.error('Searxng search failed:', error)
Logger.error('Searxng search failed:', error)
throw new Error(`Search failed: ${error instanceof Error ? error.message : 'Unknown error'}`)
}
}

View File

@ -1,3 +1,4 @@
import Logger from '@renderer/config/logger'
import db from '@renderer/databases'
import { getKnowledgeBaseParams } from '@renderer/services/KnowledgeService'
import store from '@renderer/store'
@ -37,7 +38,7 @@ class KnowledgeQueue {
async processQueue(baseId: string): Promise<void> {
if (this.processing.get(baseId)) {
console.log(`[KnowledgeQueue] Queue for base ${baseId} is already being processed`)
Logger.log(`[KnowledgeQueue] Queue for base ${baseId} is already being processed`)
return
}
@ -71,7 +72,7 @@ class KnowledgeQueue {
processableItem = findProcessableItem()
}
} finally {
console.log(`[KnowledgeQueue] Finished processing queue for base ${baseId}`)
Logger.log(`[KnowledgeQueue] Finished processing queue for base ${baseId}`)
this.processing.set(baseId, false)
}
}
@ -89,11 +90,11 @@ class KnowledgeQueue {
private async processItem(baseId: string, item: KnowledgeItem): Promise<void> {
try {
if (item.retryCount && item.retryCount >= this.MAX_RETRIES) {
console.log(`[KnowledgeQueue] Item ${item.id} has reached max retries, skipping`)
Logger.log(`[KnowledgeQueue] Item ${item.id} has reached max retries, skipping`)
return
}
console.log(`[KnowledgeQueue] Starting to process item ${item.id} (${item.type})`)
Logger.log(`[KnowledgeQueue] Starting to process item ${item.id} (${item.type})`)
store.dispatch(
updateItemProcessingStatus({
@ -120,7 +121,7 @@ class KnowledgeQueue {
let result: LoaderReturn | null = null
let note, content
console.log(`[KnowledgeQueue] Processing item: ${sourceItem.content}`)
Logger.log(`[KnowledgeQueue] Processing item: ${sourceItem.content}`)
switch (item.type) {
case 'note':
@ -135,7 +136,7 @@ class KnowledgeQueue {
break
}
console.log(`[KnowledgeQueue] Successfully completed processing item ${item.id}`)
Logger.log(`[KnowledgeQueue] Successfully completed processing item ${item.id}`)
store.dispatch(
updateItemProcessingStatus({
@ -155,11 +156,11 @@ class KnowledgeQueue {
})
)
}
console.log(`[KnowledgeQueue] Updated uniqueId for item ${item.id} in base ${baseId} `)
Logger.log(`[KnowledgeQueue] Updated uniqueId for item ${item.id} in base ${baseId} `)
store.dispatch(clearCompletedProcessing({ baseId }))
} catch (error) {
console.error(`[KnowledgeQueue] Error processing item ${item.id}: `, error)
Logger.error(`[KnowledgeQueue] Error processing item ${item.id}: `, error)
store.dispatch(
updateItemProcessingStatus({
baseId,

View File

@ -1,3 +1,4 @@
import Logger from '@renderer/config/logger'
import { getOpenAIWebSearchParams, isOpenAIWebSearch } from '@renderer/config/models'
import {
SEARCH_SUMMARY_PROMPT,
@ -200,7 +201,7 @@ async function fetchExternalTool(
// 根据配置决定是否需要提取
if (shouldWebSearch || hasKnowledgeBase) {
extractResults = await extract()
console.log('Extraction results:', extractResults)
Logger.log('[fetchExternalTool] Extraction results:', extractResults)
}
let webSearchResponseFromSearch: WebSearchResponse | undefined

View File

@ -1,10 +1,10 @@
import Logger from '@renderer/config/logger'
import db from '@renderer/databases'
import { upgradeToV7 } from '@renderer/databases/upgrades'
import i18n from '@renderer/i18n'
import store from '@renderer/store'
import { setWebDAVSyncState } from '@renderer/store/backup'
import dayjs from 'dayjs'
import Logger from 'electron-log'
export async function backup() {
const filename = `cherry-studio.${dayjs().format('YYYYMMDDHHmm')}.zip`
@ -33,7 +33,7 @@ export async function restore() {
await handleData(data)
} catch (error) {
console.error(error)
Logger.error('[Backup] restore: Error restoring backup file:', error)
window.message.error({ content: i18n.t('error.backup.file_format'), key: 'restore' })
}
}
@ -228,7 +228,7 @@ export function startAutoSync(immediate = false) {
const { webdavAutoSync, webdavHost } = store.getState().settings
if (!webdavAutoSync || !webdavHost) {
console.log('[AutoSync] Invalid sync settings, auto sync disabled')
Logger.log('[AutoSync] Invalid sync settings, auto sync disabled')
return
}
@ -254,7 +254,7 @@ export function startAutoSync(immediate = false) {
const { webdavSync } = store.getState().backup
if (webdavSyncInterval <= 0) {
console.log('[AutoSync] Invalid sync interval, auto sync disabled')
Logger.log('[AutoSync] Invalid sync interval, auto sync disabled')
stopAutoSync()
return
}
@ -274,7 +274,7 @@ export function startAutoSync(immediate = false) {
syncTimeout = setTimeout(performAutoBackup, timeUntilNextSync)
console.log(
Logger.log(
`[AutoSync] Next sync scheduled in ${Math.floor(timeUntilNextSync / 1000 / 60)} minutes ${Math.floor(
(timeUntilNextSync / 1000) % 60
)} seconds`
@ -283,7 +283,7 @@ export function startAutoSync(immediate = false) {
async function performAutoBackup() {
if (isAutoBackupRunning || isManualBackupRunning) {
console.log('[AutoSync] Backup already in progress, rescheduling')
Logger.log('[AutoSync] Backup already in progress, rescheduling')
scheduleNextBackup()
return
}
@ -294,7 +294,7 @@ export function startAutoSync(immediate = false) {
while (retryCount < maxRetries) {
try {
console.log(`[AutoSync] Starting auto backup... (attempt ${retryCount + 1}/${maxRetries})`)
Logger.log(`[AutoSync] Starting auto backup... (attempt ${retryCount + 1}/${maxRetries})`)
await backupToWebdav({ autoBackupProcess: true })
@ -313,7 +313,7 @@ export function startAutoSync(immediate = false) {
} catch (error: any) {
retryCount++
if (retryCount === maxRetries) {
console.error('[AutoSync] Auto backup failed after all retries:', error)
Logger.error('[AutoSync] Auto backup failed after all retries:', error)
store.dispatch(
setWebDAVSyncState({
@ -334,13 +334,13 @@ export function startAutoSync(immediate = false) {
} else {
//Exponential Backoff with Base 2 7s、17s、37s
const backoffDelay = Math.pow(2, retryCount - 1) * 10000 - 3000
console.log(`[AutoSync] Failed, retry ${retryCount}/${maxRetries} after ${backoffDelay / 1000}s`)
Logger.log(`[AutoSync] Failed, retry ${retryCount}/${maxRetries} after ${backoffDelay / 1000}s`)
await new Promise((resolve) => setTimeout(resolve, backoffDelay))
//in case auto backup is stopped by user
if (!isAutoBackupRunning) {
console.log('[AutoSync] retry cancelled by user, exit')
Logger.log('[AutoSync] retry cancelled by user, exit')
break
}
}
@ -351,7 +351,7 @@ export function startAutoSync(immediate = false) {
export function stopAutoSync() {
if (syncTimeout) {
console.log('[AutoSync] Stopping auto sync')
Logger.log('[AutoSync] Stopping auto sync')
clearTimeout(syncTimeout)
syncTimeout = null
}

View File

@ -1,3 +1,4 @@
import Logger from '@renderer/config/logger'
import store from '@renderer/store'
import { LRUCache } from 'lru-cache'
@ -135,7 +136,7 @@ export const CodeCacheService = {
// 检查实际配置是否变化
if (maxSize !== newMaxSize || ttl !== newTTLMilliseconds) {
console.log('[CodeCacheService] Cache config changed, recreating cache')
Logger.log('[CodeCacheService] Cache config changed, recreating cache')
highlightCache.clear()
highlightCache = new LRUCache<string, string>({
max: 500,
@ -150,7 +151,7 @@ export const CodeCacheService = {
highlightCache = null
}
} catch (error) {
console.warn('[CodeCacheService] Failed to update cache config', error)
Logger.warn('[CodeCacheService] Failed to update cache config', error)
}
},
@ -181,7 +182,7 @@ export const CodeCacheService = {
return highlightCache?.get(key) || null
} catch (error) {
console.warn('[CodeCacheService] Failed to get cached result', error)
Logger.warn('[CodeCacheService] Failed to get cached result', error)
return null
}
},
@ -205,7 +206,7 @@ export const CodeCacheService = {
highlightCache?.set(key, html)
} catch (error) {
console.warn('[CodeCacheService] Failed to set cached result', error)
Logger.warn('[CodeCacheService] Failed to set cached result', error)
}
},

View File

@ -1,3 +1,4 @@
import Logger from '@renderer/config/logger'
import db from '@renderer/databases'
import i18n from '@renderer/i18n'
import store from '@renderer/store'
@ -39,7 +40,7 @@ class FileManager {
}
static async uploadFile(file: FileType): Promise<FileType> {
console.log(`[FileManager] Uploading file: ${JSON.stringify(file)}`)
Logger.log(`[FileManager] Uploading file: ${JSON.stringify(file)}`)
const uploadFile = await window.api.file.upload(file)
const fileRecord = await db.files.get(uploadFile.id)
@ -72,7 +73,7 @@ class FileManager {
static async deleteFile(id: string, force: boolean = false): Promise<void> {
const file = await this.getFile(id)
console.log('[FileManager] Deleting file:', file)
Logger.log('[FileManager] Deleting file:', file)
if (!file) {
return
@ -90,7 +91,7 @@ class FileManager {
try {
await window.api.file.delete(id + file.ext)
} catch (error) {
console.error('[FileManager] Failed to delete file:', error)
Logger.error('[FileManager] Failed to delete file:', error)
}
}

View File

@ -1,6 +1,7 @@
import type { ExtractChunkData } from '@cherrystudio/embedjs-interfaces'
import { DEFAULT_KNOWLEDGE_DOCUMENT_COUNT, DEFAULT_KNOWLEDGE_THRESHOLD } from '@renderer/config/constant'
import { getEmbeddingMaxContext } from '@renderer/config/embedings'
import Logger from '@renderer/config/logger'
import AiProvider from '@renderer/providers/AiProvider'
import store from '@renderer/store'
import { FileType, KnowledgeBase, KnowledgeBaseParams, KnowledgeReference } from '@renderer/types'
@ -96,7 +97,7 @@ export const processKnowledgeSearch = async (
extractResults.knowledge.question.length === 0 ||
isEmpty(knowledgeBaseIds)
) {
console.log('No valid question found in extractResults.knowledge')
Logger.log('No valid question found in extractResults.knowledge')
return []
}
const questions = extractResults.knowledge.question
@ -104,7 +105,7 @@ export const processKnowledgeSearch = async (
const bases = store.getState().knowledge.bases.filter((kb) => knowledgeBaseIds?.includes(kb.id))
if (!bases || bases.length === 0) {
console.log('Skipping knowledge search: No matching knowledge bases found.')
Logger.log('Skipping knowledge search: No matching knowledge bases found.')
return []
}
@ -133,7 +134,7 @@ export const processKnowledgeSearch = async (
new Map(allSearchResults.flat().map((item) => [item.metadata.uniqueId || item.pageContent, item])).values()
).sort((a, b) => b.score - a.score)
console.log(`Knowledge base ${base.name} search results:`, searchResults)
Logger.log(`Knowledge base ${base.name} search results:`, searchResults)
let rerankResults = searchResults
if (base.rerankModel && searchResults.length > 0) {
@ -167,7 +168,7 @@ export const processKnowledgeSearch = async (
})
)
} catch (error) {
console.error(`Error searching knowledge base ${base.name}:`, error)
Logger.error(`Error searching knowledge base ${base.name}:`, error)
return []
}
})

View File

@ -1,10 +1,10 @@
import Logger from '@renderer/config/logger'
import i18n from '@renderer/i18n'
import store from '@renderer/store'
import { setNutstoreSyncState } from '@renderer/store/nutstore'
import { WebDavConfig } from '@renderer/types'
import { NUTSTORE_HOST } from '@shared/config/nutstore'
import dayjs from 'dayjs'
import Logger from 'electron-log'
import { type CreateDirectoryOptions } from 'webdav'
import { getBackupData, handleData } from './BackupService'
@ -22,7 +22,7 @@ function getNutstoreToken() {
async function createNutstoreConfig(nutstoreToken: string): Promise<WebDavConfig | null> {
const result = await window.api.nutstore.decryptToken(nutstoreToken)
if (!result) {
console.log('Invalid nutstore token')
Logger.log('[createNutstoreConfig] Invalid nutstore token')
return null
}
@ -74,7 +74,7 @@ export async function backupToNutstore({
}
if (isManualBackupRunning) {
console.log('Backup already in progress')
Logger.log('[backupToNutstore] Backup already in progress')
return
}
@ -87,7 +87,7 @@ export async function backupToNutstore({
try {
deviceType = (await window.api.system.getDeviceType()) || 'unknown'
} catch (error) {
Logger.error('[Backup] Failed to get device type:', error)
Logger.error('[backupToNutstore] Failed to get device type:', error)
}
const timestamp = dayjs().format('YYYYMMDDHHmmss')
const backupFileName = customFileName || `cherry-studio.${timestamp}.${deviceType}.zip`
@ -180,7 +180,7 @@ export async function startNutstoreAutoSync() {
const { nutstoreSyncInterval, nutstoreSyncState } = store.getState().nutstore
if (nutstoreSyncInterval <= 0) {
console.log('[Nutstore AutoSync] Invalid sync interval, nutstore auto sync disabled')
Logger.log('[Nutstore AutoSync] Invalid sync interval, nutstore auto sync disabled')
stopNutstoreAutoSync()
return
}
@ -195,7 +195,7 @@ export async function startNutstoreAutoSync() {
syncTimeout = setTimeout(performAutoBackup, timeUntilNextSync)
console.log(
Logger.log(
`[Nutstore AutoSync] Next sync scheduled in ${Math.floor(timeUntilNextSync / 1000 / 60)} minutes ${Math.floor(
(timeUntilNextSync / 1000) % 60
)} seconds`
@ -204,17 +204,17 @@ export async function startNutstoreAutoSync() {
async function performAutoBackup() {
if (isAutoBackupRunning || isManualBackupRunning) {
console.log('[Nutstore AutoSync] Backup already in progress, rescheduling')
Logger.log('[Nutstore AutoSync] Backup already in progress, rescheduling')
scheduleNextBackup()
return
}
isAutoBackupRunning = true
try {
console.log('[Nutstore AutoSync] Starting auto backup...')
Logger.log('[Nutstore AutoSync] Starting auto backup...')
await backupToNutstore({ showMessage: false })
} catch (error) {
console.error('[Nutstore AutoSync] Auto backup failed:', error)
Logger.error('[Nutstore AutoSync] Auto backup failed:', error)
} finally {
isAutoBackupRunning = false
scheduleNextBackup()
@ -224,7 +224,7 @@ export async function startNutstoreAutoSync() {
export function stopNutstoreAutoSync() {
if (syncTimeout) {
console.log('[Nutstore AutoSync] Stopping nutstore auto sync')
Logger.log('[Nutstore AutoSync] Stopping nutstore auto sync')
clearTimeout(syncTimeout)
syncTimeout = null
}

View File

@ -40,7 +40,7 @@ export function createStreamProcessor(callbacks: StreamProcessorCallbacks = {})
// The returned function processes a single chunk or a final signal
return (chunk: Chunk) => {
try {
// console.log(`[${new Date().toLocaleString()}] createStreamProcessor ${chunk.type}`, chunk)
// Logger.log(`[${new Date().toLocaleString()}] createStreamProcessor ${chunk.type}`, chunk)
// 1. Handle the manual final signal first
if (chunk?.type === ChunkType.BLOCK_COMPLETE) {
callbacks.onComplete?.(AssistantMessageStatus.SUCCESS, chunk?.response)

View File

@ -1,3 +1,4 @@
import Logger from '@renderer/config/logger'
import WebSearchEngineProvider from '@renderer/providers/WebSearchProvider'
import store from '@renderer/store'
import { WebSearchState } from '@renderer/store/websearch'
@ -128,7 +129,7 @@ class WebSearchService {
public async checkSearch(provider: WebSearchProvider): Promise<{ valid: boolean; error?: any }> {
try {
const response = await this.search(provider, 'test query')
console.log('Search response:', response)
Logger.log('[checkSearch] Search response:', response)
// 优化的判断条件:检查结果是否有效且没有错误
return { valid: response.results !== undefined, error: undefined }
} catch (error) {
@ -142,7 +143,7 @@ class WebSearchService {
): Promise<WebSearchProviderResponse> {
// 检查 websearch 和 question 是否有效
if (!extractResults.websearch?.question || extractResults.websearch.question.length === 0) {
console.log('No valid question found in extractResults.websearch')
Logger.log('[processWebsearch] No valid question found in extractResults.websearch')
return { results: [] }
}

View File

@ -1,4 +1,5 @@
import { createSlice, nanoid, type PayloadAction } from '@reduxjs/toolkit'
import Logger from '@renderer/config/logger'
import type { MCPConfig, MCPServer } from '@renderer/types'
export const initialState: MCPConfig = {
@ -135,7 +136,7 @@ export const initializeMCPServers = (existingServers: MCPServer[], dispatch: (ac
// Filter out any built-in servers that are already present
const newServers = builtinMCPServers.filter((server) => !serverIds.has(server.name))
console.log('Adding new servers:', newServers)
Logger.log('[initializeMCPServers] Adding new servers:', newServers)
// Add the new built-in servers to the existing servers
newServers.forEach((server) => {
dispatch(addMCPServer(server))

View File

@ -245,7 +245,7 @@ export const selectMessagesForTopic = createSelector(
(state: RootState, topicId: string) => state.messages.messageIdsByTopic[topicId] // Input 2: Get the ordered IDs for the specific topic
],
(messageEntities, topicMessageIds) => {
// console.log(`[Selector selectMessagesForTopic] Running for topicId: ${topicId}`); // Uncomment for debugging selector runs
// Logger.log(`[Selector selectMessagesForTopic] Running for topicId: ${topicId}`); // Uncomment for debugging selector runs
if (!topicMessageIds) {
return [] // Return an empty array if the topic or its IDs don't exist
}

View File

@ -32,7 +32,6 @@ const nutstoreSlice = createSlice({
state.nutstoreToken = action.payload
},
setNutstorePath: (state, action: PayloadAction<string>) => {
console.log(state, action.payload)
state.nutstorePath = action.payload
},
setNutstoreAutoSync: (state, action: PayloadAction<boolean>) => {

View File

@ -91,7 +91,7 @@ const updateExistingMessageAndBlocksInDB = async (
const newMessages = [...topic.messages]
// Apply the updates passed in updatedMessage
Object.assign(newMessages[messageIndex], updatedMessage)
// console.log('updateExistingMessageAndBlocksInDB', updatedMessage)
// Logger.log('updateExistingMessageAndBlocksInDB', updatedMessage)
await db.topics.update(updatedMessage.topicId, { messages: newMessages })
} else {
console.error(`[updateExistingMsg] Message ${updatedMessage.id} not found in topic ${updatedMessage.topicId}`)
@ -1102,7 +1102,7 @@ export const initiateTranslationThunk =
export const updateTranslationBlockThunk =
(blockId: string, accumulatedText: string, isComplete: boolean = false) =>
async (dispatch: AppDispatch) => {
// console.log(`[updateTranslationBlockThunk] 更新翻译块 ${blockId}, isComplete: ${isComplete}`)
// Logger.log(`[updateTranslationBlockThunk] 更新翻译块 ${blockId}, isComplete: ${isComplete}`)
try {
const status = isComplete ? MessageBlockStatus.SUCCESS : MessageBlockStatus.STREAMING
const changes: Partial<MessageBlock> = {
@ -1115,7 +1115,7 @@ export const updateTranslationBlockThunk =
// 更新数据库
await db.message_blocks.update(blockId, changes)
// console.log(`[updateTranslationBlockThunk] Successfully updated translation block ${blockId}.`)
// Logger.log(`[updateTranslationBlockThunk] Successfully updated translation block ${blockId}.`)
} catch (error) {
console.error(`[updateTranslationBlockThunk] Failed to update translation block ${blockId}:`, error)
}

View File

@ -1,3 +1,5 @@
import Logger from '@renderer/config/logger'
export const abortMap = new Map<string, (() => void)[]>()
export const addAbortController = (id: string, abortFn: () => void) => {
@ -29,7 +31,7 @@ export function createAbortPromise(signal: AbortSignal, finallyPromise: Promise<
}
const abortHandler = (e: Event) => {
console.log('abortHandler', e)
Logger.log('[createAbortPromise] abortHandler', e)
reject(new DOMException('Operation aborted', 'AbortError'))
}

View File

@ -1,3 +1,4 @@
import Logger from '@renderer/config/logger'
import { WebSearchState } from '@renderer/store/websearch'
import { WebSearchProviderResponse } from '@renderer/types'
@ -179,7 +180,7 @@ export async function parseSubscribeContent(url: string): Promise<string[]> {
try {
// 获取订阅源内容
const response = await fetch(url)
console.log('response', response)
Logger.log('[parseSubscribeContent] response', response)
if (!response.ok) {
throw new Error('Failed to fetch subscribe content')
}
@ -203,7 +204,7 @@ export async function filterResultWithBlacklist(
response: WebSearchProviderResponse,
websearch: WebSearchState
): Promise<WebSearchProviderResponse> {
console.log('filterResultWithBlacklist', response)
Logger.log('[filterResultWithBlacklist]', response)
// 没有结果或者没有黑名单规则时,直接返回原始结果
if (
@ -269,7 +270,7 @@ export async function filterResultWithBlacklist(
}
})
console.log('filterResultWithBlacklist filtered results:', filteredResults)
Logger.log('filterResultWithBlacklist filtered results:', filteredResults)
return {
...response,

View File

@ -21,13 +21,13 @@ export interface KnowledgeExtractResults {
* @throws
*/
export const extractInfoFromXML = (text: string): ExtractResults => {
// console.log('extract text', text)
// Logger.log('extract text', text)
const parser = new XMLParser({
isArray: (name) => {
return name === 'question' || name === 'links'
}
})
const extractResults: ExtractResults = parser.parse(text)
// console.log('Extracted results:', extractResults)
// Logger.log('Extracted results:', extractResults)
return extractResults
}

View File

@ -89,7 +89,7 @@ export async function fetchWebContent(
const parser = new DOMParser()
const doc = parser.parseFromString(html, 'text/html')
const article = new Readability(doc).parse()
// console.log('Parsed article:', article)
// Logger.log('Parsed article:', article)
switch (format) {
case 'markdown': {

View File

@ -1,3 +1,4 @@
import Logger from '@renderer/config/logger'
import { Model } from '@renderer/types'
import { ModalFuncProps } from 'antd/es/modal/interface'
// @ts-ignore next-line`
@ -202,7 +203,7 @@ export function getMcpConfigSampleFromReadme(readme: string) {
}
}
} catch (e) {
console.log('getMcpConfigSampleFromReadme', e)
Logger.log('getMcpConfigSampleFromReadme', e)
}
}
return null

View File

@ -1,5 +1,5 @@
import Logger from '@renderer/config/logger'
import { FileType } from '@renderer/types'
import Logger from 'electron-log/renderer'
export const getFilesFromDropEvent = async (e: React.DragEvent<HTMLDivElement>): Promise<FileType[]> => {
if (e.dataTransfer.files.length > 0) {

View File

@ -6,6 +6,7 @@ import {
ToolUseBlock
} from '@anthropic-ai/sdk/resources'
import { Content, FunctionCall, Part, Tool, Type as GeminiSchemaType } from '@google/genai'
import Logger from '@renderer/config/logger'
import { isVisionModel } from '@renderer/config/models'
import store from '@renderer/store'
import { addMCPServer } from '@renderer/store/mcp'
@ -262,7 +263,7 @@ export function openAIToolsToMcpTool(
}
export async function callMCPTool(toolResponse: MCPToolResponse): Promise<MCPCallToolResponse> {
console.log(`[MCP] Calling Tool: ${toolResponse.tool.serverName} ${toolResponse.tool.name}`, toolResponse.tool)
Logger.log(`[MCP] Calling Tool: ${toolResponse.tool.serverName} ${toolResponse.tool.name}`, toolResponse.tool)
try {
const server = getMcpServerByTool(toolResponse.tool)
@ -293,7 +294,7 @@ export async function callMCPTool(toolResponse: MCPToolResponse): Promise<MCPCal
}
}
console.log(`[MCP] Tool called: ${toolResponse.tool.serverName} ${toolResponse.tool.name}`, resp)
Logger.log(`[MCP] Tool called: ${toolResponse.tool.serverName} ${toolResponse.tool.name}`, resp)
return resp
} catch (e) {
console.error(`[MCP] Error calling Tool: ${toolResponse.tool.serverName} ${toolResponse.tool.name}`, e)
@ -458,10 +459,10 @@ export function parseToolUse(content: string, mcpTools: MCPTool[]): ToolUseRespo
// If parsing fails, use the string as is
parsedArgs = toolArgs
}
// console.log(`Parsed arguments for tool "${toolName}":`, parsedArgs)
// Logger.log(`Parsed arguments for tool "${toolName}":`, parsedArgs)
const mcpTool = mcpTools.find((tool) => tool.id === toolName)
if (!mcpTool) {
console.error(`Tool "${toolName}" not found in MCP tools`)
Logger.error(`Tool "${toolName}" not found in MCP tools`)
continue
}

View File

@ -1,3 +1,4 @@
import Logger from '@renderer/config/logger'
import type { Assistant, FileType, Topic } from '@renderer/types'
import { FileTypes } from '@renderer/types'
import type {
@ -231,9 +232,9 @@ export function createToolBlock(
metadata: metadata,
...baseOnlyOverrides
}
console.log('createToolBlock_baseOverrides', baseOverrides.metadata)
Logger.log('createToolBlock_baseOverrides', baseOverrides.metadata)
const baseBlock = createBaseMessageBlock(messageId, MessageBlockType.TOOL, baseOverrides)
console.log('createToolBlock_baseBlock', baseBlock.metadata)
Logger.log('createToolBlock_baseBlock', baseBlock.metadata)
return {
...baseBlock,
toolId,

View File

@ -7,7 +7,6 @@ interface Props {
}
const MessageContent: React.FC<Props> = ({ block }) => {
console.log('block', block)
return (
<>
{/* <Flex gap="8px" wrap style={{ marginBottom: 10 }}>