refactor[Logger]: strict type check for Logger (#8363)

* fix: strict type check of logger

* feat: logger format in renderer

* fix: error type
This commit is contained in:
fullex 2025-07-23 13:24:03 +08:00 committed by GitHub
parent f6f55e0609
commit c2086fdb15
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
167 changed files with 542 additions and 548 deletions

View File

@ -38,20 +38,25 @@ const logger = loggerService.withContext('moduleName', CONTEXT)
### Logging ### Logging
In your code, you can call `logger` at any time to record logs. The supported methods are: `error`, `warn`, `info`, `verbose`, `debug`, `silly`. In your code, you can call `logger` at any time to record logs. The supported levels are: `error`, `warn`, `info`, `verbose`, `debug`, and `silly`.
For the meaning of each level, please refer to the section below. For the meaning of each level, please refer to the subsequent sections.
The following examples show how to use `logger.info` and `logger.error`. Other levels are used in the same way: The following are the supported parameters for logging (using `logger.LEVEL` as an example, where `LEVEL` represents one of the levels mentioned above):
```typescript ```typescript
logger.info('message', CONTEXT) logger.LEVEL(message)
logger.info('message %s %d', 'hello', 123, CONTEXT) logger.LEVEL(message, CONTEXT)
logger.error('message', new Error('error message'), CONTEXT) logger.LEVEL(message, error)
logger.LEVEL(message, error, CONTEXT)
``` ```
- `message` is a required string. All other options are optional. **Only the four calling methods above are supported**:
- `CONTEXT` as `{ key: value, ... }` is optional and will be recorded in the log file.
- If an `Error` type is passed, the error stack will be automatically recorded. | Parameter | Type | Description |
| --------- | -------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `message` | `string` | Required. This is the core field of the log, containing the main content to be recorded. |
| `CONTEXT` | `object` | Optional. Additional information to be recorded in the log file. It is recommended to use the `{ key: value, ...}` format. |
| `error` | `Error` | Optional. The error stack trace will also be printed.<br />Note that the `error` caught by `catch(error)` is of the `unknown` type. According to TypeScript best practices, you should first use `instanceof` for type checking. If you are certain it is an `Error` type, you can also use a type assertion like `as Error`. |
### Log Levels ### Log Levels

View File

@ -38,20 +38,24 @@ const logger = loggerService.withContext('moduleName', CONTEXT)
### 记录日志 ### 记录日志
在代码中,可以随时调用 `logger` 来记录日志,支持的方法有:`error`, `warn`, `info`, `verbose`, `debug`, `silly` 在代码中,可以随时调用 `logger` 来记录日志,支持的级别有:`error`, `warn`, `info`, `verbose`, `debug`, `silly`
各级别的含义,请参考面的章节。 各级别的含义,请参考面的章节。
以下`logger.info``logger.error` 举例如何使用,其他级别是一样的 以下支持的记录日志的参数(以 `logger.LEVEL` 举例如何使用,`LEVEL`指代为上述级别)
```typescript ```typescript
logger.info('message', CONTEXT) logger.LEVEL(message)
logger.info('message %s %d', 'hello', 123, CONTEXT) logger.LEVEL(message, CONTEXT)
logger.error('message', new Error('error message'), CONTEXT) logger.LEVEL(message, error)
logger.LEVEL(message, error, CONTEXT)
``` ```
- `message` 是必填的,`string`类型,其他选项都是可选的 **只支持上述四种调用方式**
- `CONTEXT`为`{ key: value, ...}` 是可选的,会在日志文件中记录 | 参数 | 类型 | 说明 |
- 如果传递了`Error`类型,会自动记录错误堆栈 | ----- | ----- | ----- |
| `message` | `string` | 必填项。这是日志的核心字段,记录的重点内容 |
| `CONTEXT` | `object` | 可选。其他需要再日志文件中记录的信息,建议为`{ key: value, ...}`格式
| `error` | `Error` | 可选。同时会打印错误堆栈信息。<br />注意`catch(error)`所捕获的`error`是`unknown`类型,按照`Typescript`最佳实践,请先用`instanceof`进行类型判断,如果确信一定是`Error`类型,也可用断言`as Error`。|
### 记录级别 ### 记录级别

View File

@ -5,6 +5,10 @@ export type LogSourceWithContext = {
context?: Record<string, any> context?: Record<string, any>
} }
type NullableObject = object | undefined | null
export type LogContextData = [] | [Error | NullableObject] | [Error | NullableObject, ...NullableObject[]]
export type LogLevel = 'error' | 'warn' | 'info' | 'debug' | 'verbose' | 'silly' | 'none' export type LogLevel = 'error' | 'warn' | 'info' | 'debug' | 'verbose' | 'silly' | 'none'
export const LEVEL = { export const LEVEL = {

View File

@ -71,7 +71,7 @@ app.on('web-contents-created', (_, webContents) => {
// Interrupt execution and collect call stack from unresponsive renderer // Interrupt execution and collect call stack from unresponsive renderer
logger.error('Renderer unresponsive start') logger.error('Renderer unresponsive start')
const callStack = await webContents.mainFrame.collectJavaScriptCallStack() const callStack = await webContents.mainFrame.collectJavaScriptCallStack()
logger.error('Renderer unresponsive js call stack\n', callStack) logger.error(`Renderer unresponsive js call stack\n ${callStack}`)
}) })
}) })
@ -84,7 +84,7 @@ if (!isDev) {
// handle unhandled rejection // handle unhandled rejection
process.on('unhandledRejection', (reason, promise) => { process.on('unhandledRejection', (reason, promise) => {
logger.error('Unhandled Rejection at:', promise, 'reason:', reason) logger.error(`Unhandled Rejection at: ${promise} reason: ${reason}`)
}) })
} }
@ -184,7 +184,7 @@ if (!app.requestSingleInstanceLock()) {
try { try {
await mcpService.cleanup() await mcpService.cleanup()
} catch (error) { } catch (error) {
logger.warn('Error cleaning up MCP service:', error) logger.warn('Error cleaning up MCP service:', error as Error)
} }
// finish the logger // finish the logger
logger.finish() logger.finish()

View File

@ -163,7 +163,7 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
}) })
ipcMain.handle(IpcChannel.App_SetTestPlan, async (_, isActive: boolean) => { ipcMain.handle(IpcChannel.App_SetTestPlan, async (_, isActive: boolean) => {
logger.info('set test plan', isActive) logger.info(`set test plan: ${isActive}`)
if (isActive !== configManager.getTestPlan()) { if (isActive !== configManager.getTestPlan()) {
appUpdater.cancelDownload() appUpdater.cancelDownload()
configManager.setTestPlan(isActive) configManager.setTestPlan(isActive)
@ -171,7 +171,7 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
}) })
ipcMain.handle(IpcChannel.App_SetTestChannel, async (_, channel: UpgradeChannel) => { ipcMain.handle(IpcChannel.App_SetTestChannel, async (_, channel: UpgradeChannel) => {
logger.info('set test channel', channel) logger.info(`set test channel: ${channel}`)
if (channel !== configManager.getTestChannel()) { if (channel !== configManager.getTestChannel()) {
appUpdater.cancelDownload() appUpdater.cancelDownload()
configManager.setTestChannel(channel) configManager.setTestChannel(channel)
@ -342,7 +342,7 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
ipcMain.handle(IpcChannel.App_RelaunchApp, (_, options?: Electron.RelaunchOptions) => { ipcMain.handle(IpcChannel.App_RelaunchApp, (_, options?: Electron.RelaunchOptions) => {
// Fix for .AppImage // Fix for .AppImage
if (isLinux && process.env.APPIMAGE) { if (isLinux && process.env.APPIMAGE) {
logger.info('Relaunching app with options:', process.env.APPIMAGE, options) logger.info(`Relaunching app with options: ${process.env.APPIMAGE}`, options)
// On Linux, we need to use the APPIMAGE environment variable to relaunch // On Linux, we need to use the APPIMAGE environment variable to relaunch
// https://github.com/electron-userland/electron-builder/issues/1727#issuecomment-769896927 // https://github.com/electron-userland/electron-builder/issues/1727#issuecomment-769896927
options = options || {} options = options || {}
@ -580,7 +580,7 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
// Process DXT file using the temporary path // Process DXT file using the temporary path
return await dxtService.uploadDxt(event, tempPath) return await dxtService.uploadDxt(event, tempPath)
} catch (error) { } catch (error) {
logger.error('DXT upload error:', error) logger.error('DXT upload error:', error as Error)
return { return {
success: false, success: false,
error: error instanceof Error ? error.message : 'Failed to upload DXT file' error: error instanceof Error ? error.message : 'Failed to upload DXT file'

View File

@ -185,7 +185,7 @@ export class EpubLoader extends BaseLoader<Record<string, string | number | bool
writeStream.write(text + '\n\n') writeStream.write(text + '\n\n')
} }
} catch (error) { } catch (error) {
logger.error(`[EpubLoader] Error processing chapter ${chapter.id}:`, error) logger.error(`[EpubLoader] Error processing chapter ${chapter.id}:`, error as Error)
} }
} }
@ -207,7 +207,7 @@ export class EpubLoader extends BaseLoader<Record<string, string | number | bool
// 只添加一条完成日志 // 只添加一条完成日志
logger.info(`[EpubLoader] 电子书 ${this.metadata?.title || path.basename(this.filePath)} 处理完成`) logger.info(`[EpubLoader] 电子书 ${this.metadata?.title || path.basename(this.filePath)} 处理完成`)
} catch (error) { } catch (error) {
logger.error('[EpubLoader] Error in extractTextFromEpub:', error) logger.error('[EpubLoader] Error in extractTextFromEpub:', error as Error)
throw error throw error
} }
} }
@ -223,7 +223,7 @@ export class EpubLoader extends BaseLoader<Record<string, string | number | bool
await this.extractTextFromEpub() await this.extractTextFromEpub()
} }
logger.info('[EpubLoader] 书名:', this.metadata?.title || '未知书名', ' 文本大小:', this.extractedText.length) logger.info(`[EpubLoader] 书名:${this.metadata?.title || '未知书名'} 文本大小:${this.extractedText.length}`)
// 创建文本分块器 // 创建文本分块器
const chunker = new RecursiveCharacterTextSplitter({ const chunker = new RecursiveCharacterTextSplitter({

View File

@ -129,7 +129,10 @@ export async function addFileLoader(
jsonObject = JSON.parse(await readTextFileWithAutoEncoding(file.path)) jsonObject = JSON.parse(await readTextFileWithAutoEncoding(file.path))
} catch (error) { } catch (error) {
jsonParsed = false jsonParsed = false
logger.warn('[KnowledgeBase] failed parsing json file, falling back to text processing:', file.path, error) logger.warn(
`[KnowledgeBase] failed parsing json file, falling back to text processing: ${file.path}`,
error as Error
)
} }
if (jsonParsed) { if (jsonParsed) {

View File

@ -45,7 +45,7 @@ export class OdLoader<OdType> extends BaseLoader<{ type: string }> {
try { try {
this.extractedText = await parseOfficeAsync(this.filePath, this.config) this.extractedText = await parseOfficeAsync(this.filePath, this.config)
} catch (err) { } catch (err) {
logger.error('odLoader error', err) logger.error('odLoader error', err as Error)
throw err throw err
} }
} }

View File

@ -23,7 +23,7 @@ export default class MacSysOcrProvider extends BaseOcrProvider {
const module = await import('@cherrystudio/mac-system-ocr') const module = await import('@cherrystudio/mac-system-ocr')
this.MacOCR = module.default this.MacOCR = module.default
} catch (error) { } catch (error) {
logger.error('Failed to load mac-system-ocr:', error) logger.error('Failed to load mac-system-ocr:', error as Error)
throw error throw error
} }
} }
@ -121,7 +121,7 @@ export default class MacSysOcrProvider extends BaseOcrProvider {
} }
} }
} catch (error) { } catch (error) {
logger.error('Error during OCR process:', error) logger.error('Error during OCR process:', error as Error)
throw error throw error
} }
} }

View File

@ -107,7 +107,7 @@ export default class MineruPreprocessProvider extends BasePreprocessProvider {
const response: QuotaResponse = await quota.json() const response: QuotaResponse = await quota.json()
return response.data.user_left_quota return response.data.user_left_quota
} catch (error) { } catch (error) {
logger.error('Error checking quota:', error) logger.error('Error checking quota:', error as Error)
throw error throw error
} }
} }

View File

@ -154,7 +154,7 @@ export default class MistralPreprocessProvider extends BasePreprocessProvider {
counter++ counter++
} catch (error) { } catch (error) {
logger.error(`Failed to save image ${imageFileName}:`, error) logger.error(`Failed to save image ${imageFileName}:`, error as Error)
} }
} }
}) })

View File

@ -166,7 +166,7 @@ class DifyKnowledgeServer {
content: [{ type: 'text', text: formattedText }] content: [{ type: 'text', text: formattedText }]
} }
} catch (error) { } catch (error) {
logger.error('Error fetching knowledge list:', error) logger.error('Error fetching knowledge list:', error as Error)
const errorMessage = error instanceof Error ? error.message : String(error) const errorMessage = error instanceof Error ? error.message : String(error)
// 返回包含错误信息的 MCP 响应 // 返回包含错误信息的 MCP 响应
return { return {
@ -245,7 +245,7 @@ class DifyKnowledgeServer {
content: [{ type: 'text', text: formattedText }] content: [{ type: 'text', text: formattedText }]
} }
} catch (error) { } catch (error) {
logger.error('Error searching knowledge:', error) logger.error('Error searching knowledge:', error as Error)
const errorMessage = error instanceof Error ? error.message : String(error) const errorMessage = error instanceof Error ? error.message : String(error)
return { return {
content: [{ type: 'text', text: `Search Knowledge Error: ${errorMessage}` }], content: [{ type: 'text', text: `Search Knowledge Error: ${errorMessage}` }],

View File

@ -65,7 +65,7 @@ class KnowledgeGraphManager {
await fs.writeFile(this.memoryPath, JSON.stringify({ entities: [], relations: [] }, null, 2)) await fs.writeFile(this.memoryPath, JSON.stringify({ entities: [], relations: [] }, null, 2))
} }
} catch (error) { } catch (error) {
logger.error('Failed to ensure memory path exists:', error) logger.error('Failed to ensure memory path exists:', error as Error)
// Propagate the error or handle it more gracefully depending on requirements // Propagate the error or handle it more gracefully depending on requirements
throw new McpError( throw new McpError(
ErrorCode.InternalError, ErrorCode.InternalError,
@ -104,7 +104,7 @@ class KnowledgeGraphManager {
this.relations = new Set() this.relations = new Set()
await this._persistGraph() await this._persistGraph()
} else { } else {
logger.error('Failed to load knowledge graph from disk:', error) logger.error('Failed to load knowledge graph from disk:', error as Error)
throw new McpError( throw new McpError(
ErrorCode.InternalError, ErrorCode.InternalError,
`Failed to load graph: ${error instanceof Error ? error.message : String(error)}` `Failed to load graph: ${error instanceof Error ? error.message : String(error)}`
@ -123,7 +123,7 @@ class KnowledgeGraphManager {
} }
await fs.writeFile(this.memoryPath, JSON.stringify(graphData, null, 2)) await fs.writeFile(this.memoryPath, JSON.stringify(graphData, null, 2))
} catch (error) { } catch (error) {
logger.error('Failed to save knowledge graph:', error) logger.error('Failed to save knowledge graph:', error as Error)
// Decide how to handle write errors - potentially retry or notify // Decide how to handle write errors - potentially retry or notify
throw new McpError( throw new McpError(
ErrorCode.InternalError, ErrorCode.InternalError,
@ -371,7 +371,7 @@ class MemoryServer {
this.knowledgeGraphManager = await KnowledgeGraphManager.create(memoryPath) this.knowledgeGraphManager = await KnowledgeGraphManager.create(memoryPath)
logger.debug('KnowledgeGraphManager initialized successfully.') logger.debug('KnowledgeGraphManager initialized successfully.')
} catch (error) { } catch (error) {
logger.error('Failed to initialize KnowledgeGraphManager:', error) logger.error('Failed to initialize KnowledgeGraphManager:', error as Error)
// Server might be unusable, consider how to handle this state // Server might be unusable, consider how to handle this state
// Maybe set a flag and return errors for all tool calls? // Maybe set a flag and return errors for all tool calls?
this.knowledgeGraphManager = null // Ensure it's null if init fails this.knowledgeGraphManager = null // Ensure it's null if init fails
@ -398,7 +398,7 @@ class MemoryServer {
await this._getManager() // Wait for initialization before confirming tools are available await this._getManager() // Wait for initialization before confirming tools are available
} catch (error) { } catch (error) {
// If manager failed to init, maybe return an empty tool list or throw? // If manager failed to init, maybe return an empty tool list or throw?
logger.error('Cannot list tools, manager initialization failed:', error) logger.error('Cannot list tools, manager initialization failed:', error as Error)
return { tools: [] } // Return empty list if server is not ready return { tools: [] } // Return empty list if server is not ready
} }
@ -700,7 +700,7 @@ class MemoryServer {
if (error instanceof McpError) { if (error instanceof McpError) {
throw error // Re-throw McpErrors directly throw error // Re-throw McpErrors directly
} }
logger.error(`Error executing tool ${name}:`, error) logger.error(`Error executing tool ${name}:`, error as Error)
// Throw a generic internal error for unexpected issues // Throw a generic internal error for unexpected issues
throw new McpError( throw new McpError(
ErrorCode.InternalError, ErrorCode.InternalError,

View File

@ -104,7 +104,7 @@ print('python code here')`,
} }
} catch (error) { } catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error) const errorMessage = error instanceof Error ? error.message : String(error)
logger.error('Python execution error:', errorMessage) logger.error(`Python execution error: ${errorMessage}`)
throw new McpError(ErrorCode.InternalError, `Python execution failed: ${errorMessage}`) throw new McpError(ErrorCode.InternalError, `Python execution failed: ${errorMessage}`)
} }

View File

@ -73,7 +73,7 @@ export class AppService {
} }
} }
} catch (error) { } catch (error) {
logger.error('Failed to set launch on boot for Linux:', error) logger.error('Failed to set launch on boot for Linux:', error as Error)
} }
} }
} }

View File

@ -6,7 +6,7 @@ import { FeedUrl, UpgradeChannel } from '@shared/config/constant'
import { IpcChannel } from '@shared/IpcChannel' import { IpcChannel } from '@shared/IpcChannel'
import { CancellationToken, UpdateInfo } from 'builder-util-runtime' import { CancellationToken, UpdateInfo } from 'builder-util-runtime'
import { app, BrowserWindow, dialog } from 'electron' import { app, BrowserWindow, dialog } from 'electron'
import { AppUpdater as _AppUpdater, autoUpdater, NsisUpdater, UpdateCheckResult } from 'electron-updater' import { AppUpdater as _AppUpdater, autoUpdater, Logger, NsisUpdater, UpdateCheckResult } from 'electron-updater'
import path from 'path' import path from 'path'
import icon from '../../../build/icon.png?asset' import icon from '../../../build/icon.png?asset'
@ -21,7 +21,7 @@ export default class AppUpdater {
private updateCheckResult: UpdateCheckResult | null = null private updateCheckResult: UpdateCheckResult | null = null
constructor(mainWindow: BrowserWindow) { constructor(mainWindow: BrowserWindow) {
autoUpdater.logger = logger autoUpdater.logger = logger as Logger
autoUpdater.forceDevUpdateConfig = !app.isPackaged autoUpdater.forceDevUpdateConfig = !app.isPackaged
autoUpdater.autoDownload = configManager.getAutoUpdate() autoUpdater.autoDownload = configManager.getAutoUpdate()
autoUpdater.autoInstallOnAppQuit = configManager.getAutoUpdate() autoUpdater.autoInstallOnAppQuit = configManager.getAutoUpdate()
@ -71,7 +71,7 @@ export default class AppUpdater {
private async _getPreReleaseVersionFromGithub(channel: UpgradeChannel) { private async _getPreReleaseVersionFromGithub(channel: UpgradeChannel) {
try { try {
logger.info('get pre release version from github', channel) logger.info(`get pre release version from github: ${channel}`)
const responses = await fetch('https://api.github.com/repos/CherryHQ/cherry-studio/releases?per_page=8', { const responses = await fetch('https://api.github.com/repos/CherryHQ/cherry-studio/releases?per_page=8', {
headers: { headers: {
Accept: 'application/vnd.github+json', Accept: 'application/vnd.github+json',
@ -90,10 +90,9 @@ export default class AppUpdater {
return null return null
} }
logger.info('release info', release.tag_name)
return `https://github.com/CherryHQ/cherry-studio/releases/download/${release.tag_name}` return `https://github.com/CherryHQ/cherry-studio/releases/download/${release.tag_name}`
} catch (error) { } catch (error) {
logger.error('Failed to get latest not draft version from github:', error) logger.error('Failed to get latest not draft version from github:', error as Error)
return null return null
} }
} }
@ -117,7 +116,7 @@ export default class AppUpdater {
const data = await ipinfo.json() const data = await ipinfo.json()
return data.country || 'CN' return data.country || 'CN'
} catch (error) { } catch (error) {
logger.error('Failed to get ipinfo:', error) logger.error('Failed to get ipinfo:', error as Error)
return 'CN' return 'CN'
} }
} }
@ -225,7 +224,7 @@ export default class AppUpdater {
updateInfo: this.updateCheckResult?.updateInfo updateInfo: this.updateCheckResult?.updateInfo
} }
} catch (error) { } catch (error) {
logger.error('Failed to check for update:', error) logger.error('Failed to check for update:', error as Error)
return { return {
currentVersion: app.getVersion(), currentVersion: app.getVersion(),
updateInfo: null updateInfo: null

View File

@ -60,7 +60,7 @@ class BackupManager {
// 确保根目录权限 // 确保根目录权限
await this.forceSetWritable(dirPath) await this.forceSetWritable(dirPath)
} catch (error) { } catch (error) {
logger.error(`权限设置失败:${dirPath}`, error) logger.error(`权限设置失败:${dirPath}`, error as Error)
throw error throw error
} }
} }
@ -83,7 +83,7 @@ class BackupManager {
} }
} catch (error) { } catch (error) {
if ((error as NodeJS.ErrnoException).code !== 'ENOENT') { if ((error as NodeJS.ErrnoException).code !== 'ENOENT') {
logger.warn(`权限设置警告:${targetPath}`, error) logger.warn(`权限设置警告:${targetPath}`, error as Error)
} }
} }
} }
@ -124,7 +124,7 @@ class BackupManager {
onProgress({ stage: 'writing_data', progress: 20, total: 100 }) onProgress({ stage: 'writing_data', progress: 20, total: 100 })
logger.debug('BackupManager IPC', skipBackupFile) logger.debug(`BackupManager IPC, skipBackupFile: ${skipBackupFile}`)
if (!skipBackupFile) { if (!skipBackupFile) {
// 复制 Data 目录到临时目录 // 复制 Data 目录到临时目录
@ -181,7 +181,7 @@ class BackupManager {
} }
} catch (error) { } catch (error) {
// 仅在出错时记录日志 // 仅在出错时记录日志
logger.error('[BackupManager] Error calculating totals:', error) logger.error('[BackupManager] Error calculating totals:', error as Error)
} }
} }
@ -241,7 +241,7 @@ class BackupManager {
logger.debug('Backup completed successfully') logger.debug('Backup completed successfully')
return backupedFilePath return backupedFilePath
} catch (error) { } catch (error) {
logger.error('[BackupManager] Backup failed:', error) logger.error('[BackupManager] Backup failed:', error as Error)
// 确保清理临时目录 // 确保清理临时目录
await fs.remove(this.tempDir).catch(() => {}) await fs.remove(this.tempDir).catch(() => {})
throw error throw error
@ -265,7 +265,7 @@ class BackupManager {
await fs.ensureDir(this.tempDir) await fs.ensureDir(this.tempDir)
onProgress({ stage: 'preparing', progress: 0, total: 100 }) onProgress({ stage: 'preparing', progress: 0, total: 100 })
logger.debug('step 1: unzip backup file', this.tempDir) logger.debug(`step 1: unzip backup file: ${this.tempDir}`)
const zip = new StreamZip.async({ file: backupPath }) const zip = new StreamZip.async({ file: backupPath })
onProgress({ stage: 'extracting', progress: 15, total: 100 }) onProgress({ stage: 'extracting', progress: 15, total: 100 })
@ -314,7 +314,7 @@ class BackupManager {
return data return data
} catch (error) { } catch (error) {
logger.error('Restore failed:', error) logger.error('Restore failed:', error as Error)
await fs.remove(this.tempDir).catch(() => {}) await fs.remove(this.tempDir).catch(() => {})
throw error throw error
} }
@ -509,7 +509,7 @@ class BackupManager {
const backupedFilePath = await this.backup(_, fileName, data, backupDir, localConfig.skipBackupFile) const backupedFilePath = await this.backup(_, fileName, data, backupDir, localConfig.skipBackupFile)
return backupedFilePath return backupedFilePath
} catch (error) { } catch (error) {
logger.error('[BackupManager] Local backup failed:', error) logger.error('[BackupManager] Local backup failed:', error as Error)
throw error throw error
} }
} }
@ -535,7 +535,7 @@ class BackupManager {
logger.debug(`S3 backup completed successfully: ${filename}`) logger.debug(`S3 backup completed successfully: ${filename}`)
return result return result
} catch (error) { } catch (error) {
logger.error(`[BackupManager] S3 backup failed:`, error) logger.error(`[BackupManager] S3 backup failed:`, error as Error)
await fs.remove(backupedFilePath) await fs.remove(backupedFilePath)
throw error throw error
} }
@ -552,7 +552,7 @@ class BackupManager {
return await this.restore(_, backupPath) return await this.restore(_, backupPath)
} catch (error) { } catch (error) {
logger.error('[BackupManager] Local restore failed:', error) logger.error('[BackupManager] Local restore failed:', error as Error)
throw error throw error
} }
} }
@ -578,7 +578,7 @@ class BackupManager {
// Sort by modified time, newest first // Sort by modified time, newest first
return result.sort((a, b) => new Date(b.modifiedTime).getTime() - new Date(a.modifiedTime).getTime()) return result.sort((a, b) => new Date(b.modifiedTime).getTime() - new Date(a.modifiedTime).getTime())
} catch (error) { } catch (error) {
logger.error('[BackupManager] List local backup files failed:', error) logger.error('[BackupManager] List local backup files failed:', error as Error)
throw error throw error
} }
} }
@ -594,7 +594,7 @@ class BackupManager {
await fs.remove(filePath) await fs.remove(filePath)
return true return true
} catch (error) { } catch (error) {
logger.error('[BackupManager] Delete local backup file failed:', error) logger.error('[BackupManager] Delete local backup file failed:', error as Error)
throw error throw error
} }
} }
@ -605,7 +605,7 @@ class BackupManager {
await fs.ensureDir(dirPath) await fs.ensureDir(dirPath)
return true return true
} catch (error) { } catch (error) {
logger.error('[BackupManager] Set local backup directory failed:', error) logger.error('[BackupManager] Set local backup directory failed:', error as Error)
throw error throw error
} }
} }

View File

@ -103,7 +103,7 @@ class CopilotService {
avatar: response.data.avatar_url avatar: response.data.avatar_url
} }
} catch (error) { } catch (error) {
logger.error('Failed to get user information:', error) logger.error('Failed to get user information:', error as Error)
throw new CopilotServiceError('无法获取GitHub用户信息', error) throw new CopilotServiceError('无法获取GitHub用户信息', error)
} }
} }
@ -129,7 +129,7 @@ class CopilotService {
return response.data return response.data
} catch (error) { } catch (error) {
logger.error('Failed to get auth message:', error) logger.error('Failed to get auth message:', error as Error)
throw new CopilotServiceError('无法获取GitHub授权信息', error) throw new CopilotServiceError('无法获取GitHub授权信息', error)
} }
} }
@ -171,7 +171,7 @@ class CopilotService {
// 仅在最后一次尝试失败时记录详细错误 // 仅在最后一次尝试失败时记录详细错误
const isLastAttempt = attempt === CONFIG.POLLING.MAX_ATTEMPTS - 1 const isLastAttempt = attempt === CONFIG.POLLING.MAX_ATTEMPTS - 1
if (isLastAttempt) { if (isLastAttempt) {
logger.error(`Token polling failed after ${CONFIG.POLLING.MAX_ATTEMPTS} attempts:`, error) logger.error(`Token polling failed after ${CONFIG.POLLING.MAX_ATTEMPTS} attempts:`, error as Error)
} }
} }
} }
@ -187,7 +187,7 @@ class CopilotService {
const encryptedToken = safeStorage.encryptString(token) const encryptedToken = safeStorage.encryptString(token)
await fs.writeFile(this.tokenFilePath, encryptedToken) await fs.writeFile(this.tokenFilePath, encryptedToken)
} catch (error) { } catch (error) {
logger.error('Failed to save token:', error) logger.error('Failed to save token:', error as Error)
throw new CopilotServiceError('无法保存访问令牌', error) throw new CopilotServiceError('无法保存访问令牌', error)
} }
} }
@ -216,7 +216,7 @@ class CopilotService {
return response.data return response.data
} catch (error) { } catch (error) {
logger.error('Failed to get Copilot token:', error) logger.error('Failed to get Copilot token:', error as Error)
throw new CopilotServiceError('无法获取Copilot令牌请重新授权', error) throw new CopilotServiceError('无法获取Copilot令牌请重新授权', error)
} }
} }
@ -235,7 +235,7 @@ class CopilotService {
logger.debug('Token file not found, nothing to delete') logger.debug('Token file not found, nothing to delete')
} }
} catch (error) { } catch (error) {
logger.error('Failed to logout:', error) logger.error('Failed to logout:', error as Error)
throw new CopilotServiceError('无法完成退出登录操作', error) throw new CopilotServiceError('无法完成退出登录操作', error)
} }
} }

View File

@ -176,7 +176,7 @@ class DxtService {
fs.mkdirSync(this.mcpDir, { recursive: true }) fs.mkdirSync(this.mcpDir, { recursive: true })
} }
} catch (error) { } catch (error) {
logger.error('Failed to create directories:', error) logger.error('Failed to create directories:', error as Error)
} }
} }
@ -232,7 +232,7 @@ class DxtService {
} }
// Extract the DXT file (which is a ZIP archive) to a temporary directory // Extract the DXT file (which is a ZIP archive) to a temporary directory
logger.debug('Extracting DXT file:', filePath) logger.debug(`Extracting DXT file: ${filePath}`)
const zip = new StreamZip.async({ file: filePath }) const zip = new StreamZip.async({ file: filePath })
await zip.extract(null, tempExtractDir) await zip.extract(null, tempExtractDir)
@ -278,14 +278,14 @@ class DxtService {
// Clean up any existing version of this server // Clean up any existing version of this server
if (fs.existsSync(finalExtractDir)) { if (fs.existsSync(finalExtractDir)) {
logger.debug('Removing existing server directory:', finalExtractDir) logger.debug(`Removing existing server directory: ${finalExtractDir}`)
fs.rmSync(finalExtractDir, { recursive: true, force: true }) fs.rmSync(finalExtractDir, { recursive: true, force: true })
} }
// Move the temporary directory to the final location // Move the temporary directory to the final location
// Use recursive copy + remove instead of rename to handle cross-filesystem moves // Use recursive copy + remove instead of rename to handle cross-filesystem moves
await this.moveDirectory(tempExtractDir, finalExtractDir) await this.moveDirectory(tempExtractDir, finalExtractDir)
logger.debug('DXT server extracted to:', finalExtractDir) logger.debug(`DXT server extracted to: ${finalExtractDir}`)
// Clean up the uploaded DXT file if it's in temp directory // Clean up the uploaded DXT file if it's in temp directory
if (filePath.startsWith(this.tempDir)) { if (filePath.startsWith(this.tempDir)) {
@ -307,7 +307,7 @@ class DxtService {
} }
const errorMessage = error instanceof Error ? error.message : 'Failed to process DXT file' const errorMessage = error instanceof Error ? error.message : 'Failed to process DXT file'
logger.error('DXT upload error:', error) logger.error('DXT upload error:', error as Error)
return { return {
success: false, success: false,
@ -324,7 +324,7 @@ class DxtService {
// Read the manifest from the DXT server directory // Read the manifest from the DXT server directory
const manifestPath = path.join(dxtPath, 'manifest.json') const manifestPath = path.join(dxtPath, 'manifest.json')
if (!fs.existsSync(manifestPath)) { if (!fs.existsSync(manifestPath)) {
logger.error('Manifest not found:', manifestPath) logger.error(`Manifest not found: ${manifestPath}`)
return null return null
} }
@ -347,7 +347,7 @@ class DxtService {
return resolvedConfig return resolvedConfig
} catch (error) { } catch (error) {
logger.error('Failed to resolve MCP config:', error) logger.error('Failed to resolve MCP config:', error as Error)
return null return null
} }
} }
@ -362,7 +362,7 @@ class DxtService {
// First try the sanitized path // First try the sanitized path
if (fs.existsSync(serverDir)) { if (fs.existsSync(serverDir)) {
logger.debug('Removing DXT server directory:', serverDir) logger.debug(`Removing DXT server directory: ${serverDir}`)
fs.rmSync(serverDir, { recursive: true, force: true }) fs.rmSync(serverDir, { recursive: true, force: true })
return true return true
} }
@ -370,15 +370,15 @@ class DxtService {
// Fallback: try with original name in case it was stored differently // Fallback: try with original name in case it was stored differently
const originalServerDir = path.join(this.mcpDir, `server-${serverName}`) const originalServerDir = path.join(this.mcpDir, `server-${serverName}`)
if (fs.existsSync(originalServerDir)) { if (fs.existsSync(originalServerDir)) {
logger.debug('Removing DXT server directory:', originalServerDir) logger.debug(`Removing DXT server directory: ${originalServerDir}`)
fs.rmSync(originalServerDir, { recursive: true, force: true }) fs.rmSync(originalServerDir, { recursive: true, force: true })
return true return true
} }
logger.warn('Server directory not found:', serverDir) logger.warn(`Server directory not found: ${serverDir}`)
return false return false
} catch (error) { } catch (error) {
logger.error('Failed to cleanup DXT server:', error) logger.error('Failed to cleanup DXT server:', error as Error)
return false return false
} }
} }
@ -390,7 +390,7 @@ class DxtService {
fs.rmSync(this.tempDir, { recursive: true, force: true }) fs.rmSync(this.tempDir, { recursive: true, force: true })
} }
} catch (error) { } catch (error) {
logger.error('Cleanup error:', error) logger.error('Cleanup error:', error as Error)
} }
} }
} }

View File

@ -403,7 +403,7 @@ export class ExportService {
logger.debug('Document exported successfully') logger.debug('Document exported successfully')
} }
} catch (error) { } catch (error) {
logger.error('Export to Word failed:', error) logger.error('Export to Word failed:', error as Error)
throw error throw error
} }
} }

View File

@ -40,7 +40,7 @@ class FileStorage {
fs.mkdirSync(this.tempDir, { recursive: true }) fs.mkdirSync(this.tempDir, { recursive: true })
} }
} catch (error) { } catch (error) {
logger.error('Failed to initialize storage directories:', error) logger.error('Failed to initialize storage directories:', error as Error)
throw error throw error
} }
} }
@ -58,7 +58,7 @@ class FileStorage {
findDuplicateFile = async (filePath: string): Promise<FileMetadata | null> => { findDuplicateFile = async (filePath: string): Promise<FileMetadata | null> => {
const stats = fs.statSync(filePath) const stats = fs.statSync(filePath)
logger.debug('stats', stats, filePath) logger.debug(`stats: ${stats}, filePath: ${filePath}`)
const fileSize = stats.size const fileSize = stats.size
const files = await fs.promises.readdir(this.storageDir) const files = await fs.promises.readdir(this.storageDir)
@ -139,9 +139,9 @@ class FileStorage {
if (fileSizeInMB > 1) { if (fileSizeInMB > 1) {
try { try {
await fs.promises.copyFile(sourcePath, destPath) await fs.promises.copyFile(sourcePath, destPath)
logger.debug('Image compressed successfully:', sourcePath) logger.debug(`Image compressed successfully: ${sourcePath}`)
} catch (jimpError) { } catch (jimpError) {
logger.error('Image compression failed:', jimpError) logger.error('Image compression failed:', jimpError as Error)
await fs.promises.copyFile(sourcePath, destPath) await fs.promises.copyFile(sourcePath, destPath)
} }
} else { } else {
@ -149,7 +149,7 @@ class FileStorage {
await fs.promises.copyFile(sourcePath, destPath) await fs.promises.copyFile(sourcePath, destPath)
} }
} catch (error) { } catch (error) {
logger.error('Image handling failed:', error) logger.error('Image handling failed:', error as Error)
// 错误情况下直接复制原文件 // 错误情况下直接复制原文件
await fs.promises.copyFile(sourcePath, destPath) await fs.promises.copyFile(sourcePath, destPath)
} }
@ -167,7 +167,7 @@ class FileStorage {
const ext = path.extname(origin_name).toLowerCase() const ext = path.extname(origin_name).toLowerCase()
const destPath = path.join(this.storageDir, uuid + ext) const destPath = path.join(this.storageDir, uuid + ext)
logger.info('[FileStorage] Uploading file:', file.path) logger.info(`[FileStorage] Uploading file: ${file.path}`)
// 根据文件类型选择处理方式 // 根据文件类型选择处理方式
if (imageExts.includes(ext)) { if (imageExts.includes(ext)) {
@ -191,7 +191,7 @@ class FileStorage {
count: 1 count: 1
} }
logger.debug('File uploaded:', fileMetadata) logger.debug(`File uploaded: ${fileMetadata}`)
return fileMetadata return fileMetadata
} }
@ -261,7 +261,7 @@ class FileStorage {
return data return data
} catch (error) { } catch (error) {
chdir(originalCwd) chdir(originalCwd)
logger.error('Failed to read file:', error) logger.error('Failed to read file:', error as Error)
throw error throw error
} }
} }
@ -273,7 +273,7 @@ class FileStorage {
return fs.readFileSync(filePath, 'utf-8') return fs.readFileSync(filePath, 'utf-8')
} }
} catch (error) { } catch (error) {
logger.error('Failed to read file:', error) logger.error('Failed to read file:', error as Error)
throw new Error(`Failed to read file: ${filePath}.`) throw new Error(`Failed to read file: ${filePath}.`)
} }
} }
@ -350,7 +350,7 @@ class FileStorage {
return fileMetadata return fileMetadata
} catch (error) { } catch (error) {
logger.error('Failed to save base64 image:', error) logger.error('Failed to save base64 image:', error as Error)
throw error throw error
} }
} }
@ -419,7 +419,7 @@ class FileStorage {
return null return null
} catch (err) { } catch (err) {
logger.error('[IPC - Error]', 'An error occurred opening the file:', err) logger.error('[IPC - Error] An error occurred opening the file:', err as Error)
return null return null
} }
} }
@ -437,7 +437,7 @@ class FileStorage {
if (fs.existsSync(filePath)) { if (fs.existsSync(filePath)) {
shell.openPath(filePath).catch((err) => logger.error('[IPC - Error] Failed to open file:', err)) shell.openPath(filePath).catch((err) => logger.error('[IPC - Error] Failed to open file:', err))
} else { } else {
logger.warn('[IPC - Warning] File does not exist:', filePath) logger.warn(`[IPC - Warning] File does not exist: ${filePath}`)
} }
} }
@ -464,7 +464,7 @@ class FileStorage {
return result.filePath return result.filePath
} catch (err: any) { } catch (err: any) {
logger.error('[IPC - Error]', 'An error occurred saving the file:', err) logger.error('[IPC - Error] An error occurred saving the file:', err as Error)
return Promise.reject('An error occurred saving the file: ' + err?.message) return Promise.reject('An error occurred saving the file: ' + err?.message)
} }
} }
@ -481,7 +481,7 @@ class FileStorage {
fs.writeFileSync(filePath, base64Data, 'base64') fs.writeFileSync(filePath, base64Data, 'base64')
} }
} catch (error) { } catch (error) {
logger.error('[IPC - Error]', 'An error occurred saving the image:', error) logger.error('[IPC - Error] An error occurred saving the image:', error as Error)
} }
} }
@ -499,7 +499,7 @@ class FileStorage {
return null return null
} catch (err) { } catch (err) {
logger.error('[IPC - Error]', 'An error occurred selecting the folder:', err) logger.error('[IPC - Error] An error occurred selecting the folder:', err as Error)
return null return null
} }
} }
@ -564,7 +564,7 @@ class FileStorage {
return fileMetadata return fileMetadata
} catch (error) { } catch (error) {
logger.error('Download file error:', error) logger.error('Download file error:', error as Error)
throw error throw error
} }
} }
@ -601,9 +601,9 @@ class FileStorage {
// 复制文件 // 复制文件
await fs.promises.copyFile(sourcePath, destPath) await fs.promises.copyFile(sourcePath, destPath)
logger.debug('File copied successfully:', { from: sourcePath, to: destPath }) logger.debug(`File copied successfully: ${sourcePath} to ${destPath}`)
} catch (error) { } catch (error) {
logger.error('Copy file failed:', error) logger.error('Copy file failed:', error as Error)
throw error throw error
} }
} }
@ -611,18 +611,18 @@ class FileStorage {
public writeFileWithId = async (_: Electron.IpcMainInvokeEvent, id: string, content: string): Promise<void> => { public writeFileWithId = async (_: Electron.IpcMainInvokeEvent, id: string, content: string): Promise<void> => {
try { try {
const filePath = path.join(this.storageDir, id) const filePath = path.join(this.storageDir, id)
logger.debug('Writing file:', filePath) logger.debug(`Writing file: ${filePath}`)
// 确保目录存在 // 确保目录存在
if (!fs.existsSync(this.storageDir)) { if (!fs.existsSync(this.storageDir)) {
logger.debug('Creating storage directory:', this.storageDir) logger.debug(`Creating storage directory: ${this.storageDir}`)
fs.mkdirSync(this.storageDir, { recursive: true }) fs.mkdirSync(this.storageDir, { recursive: true })
} }
await fs.promises.writeFile(filePath, content, 'utf8') await fs.promises.writeFile(filePath, content, 'utf8')
logger.debug('File written successfully:', filePath) logger.debug(`File written successfully: ${filePath}`)
} catch (error) { } catch (error) {
logger.error('Failed to write file:', error) logger.error('Failed to write file:', error as Error)
throw error throw error
} }
} }

View File

@ -144,7 +144,7 @@ class KnowledgeService {
logger.debug(`Removed database instance reference for id: ${id}`) logger.debug(`Removed database instance reference for id: ${id}`)
} }
} catch (error) { } catch (error) {
logger.warn(`Failed to cleanup resources for id: ${id}`, error) logger.warn(`Failed to cleanup resources for id: ${id}`, error as Error)
} }
} }
@ -176,7 +176,7 @@ class KnowledgeService {
return JSON.parse(fs.readFileSync(this.pendingDeleteFile, 'utf-8')) as string[] return JSON.parse(fs.readFileSync(this.pendingDeleteFile, 'utf-8')) as string[]
} }
} catch (error) { } catch (error) {
logger.warn('Failed to load pending delete IDs:', error) logger.warn('Failed to load pending delete IDs:', error as Error)
} }
return [] return []
}, },
@ -186,7 +186,7 @@ class KnowledgeService {
fs.writeFileSync(this.pendingDeleteFile, JSON.stringify(ids, null, 2)) fs.writeFileSync(this.pendingDeleteFile, JSON.stringify(ids, null, 2))
logger.debug(`Total ${ids.length} knowledge bases pending delete`) logger.debug(`Total ${ids.length} knowledge bases pending delete`)
} catch (error) { } catch (error) {
logger.warn('Failed to save pending delete IDs:', error) logger.warn('Failed to save pending delete IDs:', error as Error)
} }
}, },
@ -202,7 +202,7 @@ class KnowledgeService {
fs.unlinkSync(this.pendingDeleteFile) fs.unlinkSync(this.pendingDeleteFile)
} }
} catch (error) { } catch (error) {
logger.warn('Failed to clear pending delete file:', error) logger.warn('Failed to clear pending delete file:', error as Error)
} }
} }
} }
@ -257,7 +257,7 @@ class KnowledgeService {
.build() .build()
this.ragApplications.set(id, ragApplication) this.ragApplications.set(id, ragApplication)
} catch (e) { } catch (e) {
logger.error('Failed to create RAGApplication:', e) logger.error('Failed to create RAGApplication:', e as Error)
throw new Error(`Failed to create RAGApplication: ${e}`) throw new Error(`Failed to create RAGApplication: ${e}`)
} }
@ -274,7 +274,7 @@ class KnowledgeService {
} }
public async delete(_: Electron.IpcMainInvokeEvent, id: string): Promise<void> { public async delete(_: Electron.IpcMainInvokeEvent, id: string): Promise<void> {
logger.debug('delete id', id) logger.debug(`delete id: ${id}`)
await this.cleanupKnowledgeResources(id) await this.cleanupKnowledgeResources(id)

View File

@ -1,4 +1,5 @@
import type { LogLevel, LogSourceWithContext } from '@shared/config/logger' /* eslint-disable no-restricted-syntax */
import type { LogContextData, LogLevel, LogSourceWithContext } from '@shared/config/logger'
import { LEVEL, LEVEL_MAP } from '@shared/config/logger' import { LEVEL, LEVEL_MAP } from '@shared/config/logger'
import { IpcChannel } from '@shared/IpcChannel' import { IpcChannel } from '@shared/IpcChannel'
import { app, ipcMain } from 'electron' import { app, ipcMain } from 'electron'
@ -37,7 +38,7 @@ const SYSTEM_INFO = {
os: `${os.platform()}-${os.arch()} / ${os.version()}`, os: `${os.platform()}-${os.arch()} / ${os.version()}`,
hw: `${os.cpus()[0]?.model || 'Unknown CPU'} / ${(os.totalmem() / 1024 / 1024 / 1024).toFixed(2)}GB` hw: `${os.cpus()[0]?.model || 'Unknown CPU'} / ${(os.totalmem() / 1024 / 1024 / 1024).toFixed(2)}GB`
} }
const APP_VERSION = `v${app?.getVersion?.() || 'unknown'}` const APP_VERSION = `${app?.getVersion?.() || 'unknown'}`
const DEFAULT_LEVEL = isDev ? LEVEL.SILLY : LEVEL.INFO const DEFAULT_LEVEL = isDev ? LEVEL.SILLY : LEVEL.INFO
@ -77,7 +78,7 @@ class LoggerService {
Object.values(LEVEL).includes(process.env.CSLOGGER_MAIN_LEVEL as LogLevel) Object.values(LEVEL).includes(process.env.CSLOGGER_MAIN_LEVEL as LogLevel)
) { ) {
this.envLevel = process.env.CSLOGGER_MAIN_LEVEL as LogLevel this.envLevel = process.env.CSLOGGER_MAIN_LEVEL as LogLevel
// eslint-disable-next-line no-restricted-syntax
console.log(colorText(`[LoggerService] env CSLOGGER_MAIN_LEVEL loaded: ${this.envLevel}`, 'BLUE')) console.log(colorText(`[LoggerService] env CSLOGGER_MAIN_LEVEL loaded: ${this.envLevel}`, 'BLUE'))
} }
@ -88,7 +89,7 @@ class LoggerService {
.filter((module) => module !== '') .filter((module) => module !== '')
if (showModules.length > 0) { if (showModules.length > 0) {
this.envShowModules = showModules this.envShowModules = showModules
// eslint-disable-next-line no-restricted-syntax
console.log( console.log(
colorText(`[LoggerService] env CSLOGGER_MAIN_SHOW_MODULES loaded: ${this.envShowModules.join(' ')}`, 'BLUE') colorText(`[LoggerService] env CSLOGGER_MAIN_SHOW_MODULES loaded: ${this.envShowModules.join(' ')}`, 'BLUE')
) )
@ -125,7 +126,6 @@ class LoggerService {
// Development: all levels, Production: info and above // Development: all levels, Production: info and above
level: DEFAULT_LEVEL, level: DEFAULT_LEVEL,
format: winston.format.combine( format: winston.format.combine(
winston.format.splat(),
winston.format.timestamp({ winston.format.timestamp({
format: 'YYYY-MM-DD HH:mm:ss' format: 'YYYY-MM-DD HH:mm:ss'
}), }),
@ -138,7 +138,6 @@ class LoggerService {
// Handle transport events // Handle transport events
this.logger.on('error', (error) => { this.logger.on('error', (error) => {
// eslint-disable-next-line no-restricted-syntax
console.error('LoggerService fatal error:', error) console.error('LoggerService fatal error:', error)
}) })
@ -218,39 +217,33 @@ class LoggerService {
switch (level) { switch (level) {
case LEVEL.ERROR: case LEVEL.ERROR:
// eslint-disable-next-line no-restricted-syntax
console.error( console.error(
`${datetimeColored} ${colorText(colorText('<ERROR>', 'RED'), 'BOLD')}${moduleString}${message}`, `${datetimeColored} ${colorText(colorText('<ERROR>', 'RED'), 'BOLD')}${moduleString}${message}`,
...meta ...meta
) )
break break
case LEVEL.WARN: case LEVEL.WARN:
// eslint-disable-next-line no-restricted-syntax
console.warn( console.warn(
`${datetimeColored} ${colorText(colorText('<WARN>', 'YELLOW'), 'BOLD')}${moduleString}${message}`, `${datetimeColored} ${colorText(colorText('<WARN>', 'YELLOW'), 'BOLD')}${moduleString}${message}`,
...meta ...meta
) )
break break
case LEVEL.INFO: case LEVEL.INFO:
// eslint-disable-next-line no-restricted-syntax
console.info( console.info(
`${datetimeColored} ${colorText(colorText('<INFO>', 'GREEN'), 'BOLD')}${moduleString}${message}`, `${datetimeColored} ${colorText(colorText('<INFO>', 'GREEN'), 'BOLD')}${moduleString}${message}`,
...meta ...meta
) )
break break
case LEVEL.DEBUG: case LEVEL.DEBUG:
// eslint-disable-next-line no-restricted-syntax
console.debug( console.debug(
`${datetimeColored} ${colorText(colorText('<DEBUG>', 'BLUE'), 'BOLD')}${moduleString}${message}`, `${datetimeColored} ${colorText(colorText('<DEBUG>', 'BLUE'), 'BOLD')}${moduleString}${message}`,
...meta ...meta
) )
break break
case LEVEL.VERBOSE: case LEVEL.VERBOSE:
// eslint-disable-next-line no-restricted-syntax
console.log(`${datetimeColored} ${colorText('<VERBOSE>', 'BOLD')}${moduleString}${message}`, ...meta) console.log(`${datetimeColored} ${colorText('<VERBOSE>', 'BOLD')}${moduleString}${message}`, ...meta)
break break
case LEVEL.SILLY: case LEVEL.SILLY:
// eslint-disable-next-line no-restricted-syntax
console.log(`${datetimeColored} ${colorText('<SILLY>', 'BOLD')}${moduleString}${message}`, ...meta) console.log(`${datetimeColored} ${colorText('<SILLY>', 'BOLD')}${moduleString}${message}`, ...meta)
break break
} }
@ -283,42 +276,42 @@ class LoggerService {
/** /**
* Log error message * Log error message
*/ */
public error(message: string, ...data: any[]): void { public error(message: string, ...data: LogContextData): void {
this.processMainLog(LEVEL.ERROR, message, data) this.processMainLog(LEVEL.ERROR, message, data)
} }
/** /**
* Log warning message * Log warning message
*/ */
public warn(message: string, ...data: any[]): void { public warn(message: string, ...data: LogContextData): void {
this.processMainLog(LEVEL.WARN, message, data) this.processMainLog(LEVEL.WARN, message, data)
} }
/** /**
* Log info message * Log info message
*/ */
public info(message: string, ...data: any[]): void { public info(message: string, ...data: LogContextData): void {
this.processMainLog(LEVEL.INFO, message, data) this.processMainLog(LEVEL.INFO, message, data)
} }
/** /**
* Log verbose message * Log verbose message
*/ */
public verbose(message: string, ...data: any[]): void { public verbose(message: string, ...data: LogContextData): void {
this.processMainLog(LEVEL.VERBOSE, message, data) this.processMainLog(LEVEL.VERBOSE, message, data)
} }
/** /**
* Log debug message * Log debug message
*/ */
public debug(message: string, ...data: any[]): void { public debug(message: string, ...data: LogContextData): void {
this.processMainLog(LEVEL.DEBUG, message, data) this.processMainLog(LEVEL.DEBUG, message, data)
} }
/** /**
* Log silly level message * Log silly level message
*/ */
public silly(message: string, ...data: any[]): void { public silly(message: string, ...data: LogContextData): void {
this.processMainLog(LEVEL.SILLY, message, data) this.processMainLog(LEVEL.SILLY, message, data)
} }

View File

@ -206,7 +206,7 @@ class McpService {
headers['Authorization'] = `Bearer ${tokens.access_token}` headers['Authorization'] = `Bearer ${tokens.access_token}`
} }
} catch (error) { } catch (error) {
logger.error('Failed to fetch tokens:', error) logger.error('Failed to fetch tokens:', error as Error)
} }
} }
@ -348,7 +348,7 @@ class McpService {
logger.debug(`Successfully authenticated with server: ${server.name}`) logger.debug(`Successfully authenticated with server: ${server.name}`)
} catch (oauthError) { } catch (oauthError) {
logger.error(`OAuth authentication failed for server ${server.name}:`, oauthError) logger.error(`OAuth authentication failed for server ${server.name}:`, oauthError as Error)
throw new Error( throw new Error(
`OAuth authentication failed: ${oauthError instanceof Error ? oauthError.message : String(oauthError)}` `OAuth authentication failed: ${oauthError instanceof Error ? oauthError.message : String(oauthError)}`
) )
@ -454,7 +454,7 @@ class McpService {
logger.debug(`Set up notification handlers for server: ${server.name}`) logger.debug(`Set up notification handlers for server: ${server.name}`)
} catch (error) { } catch (error) {
logger.error(`Failed to set up notification handlers for server ${server.name}:`, error) logger.error(`Failed to set up notification handlers for server ${server.name}:`, error as Error)
} }
} }
@ -510,7 +510,7 @@ class McpService {
logger.debug(`Cleaned up DXT server directory for: ${server.name}`) logger.debug(`Cleaned up DXT server directory for: ${server.name}`)
} }
} catch (error) { } catch (error) {
logger.error(`Failed to cleanup DXT server: ${server.name}`, error) logger.error(`Failed to cleanup DXT server: ${server.name}`, error as Error)
} }
} }
} }
@ -552,7 +552,7 @@ class McpService {
logger.debug(`Connectivity check successful for server: ${server.name}`) logger.debug(`Connectivity check successful for server: ${server.name}`)
return true return true
} catch (error) { } catch (error) {
logger.error(`Connectivity check failed for server: ${server.name}`, error) logger.error(`Connectivity check failed for server: ${server.name}`, error as Error)
// Close the client if connectivity check fails to ensure a clean state for the next attempt // Close the client if connectivity check fails to ensure a clean state for the next attempt
const serverKey = this.getServerKey(server) const serverKey = this.getServerKey(server)
await this.closeClient(serverKey) await this.closeClient(serverKey)
@ -614,7 +614,7 @@ class McpService {
const callToolFunc = async ({ server, name, args }: CallToolArgs) => { const callToolFunc = async ({ server, name, args }: CallToolArgs) => {
try { try {
logger.debug('Calling:', server.name, name, args, 'callId:', toolCallId) logger.debug(`Calling: ${server.name} ${name} ${JSON.stringify(args)} callId: ${toolCallId}`)
if (typeof args === 'string') { if (typeof args === 'string') {
try { try {
args = JSON.parse(args) args = JSON.parse(args)
@ -633,7 +633,7 @@ class McpService {
}) })
return result as MCPCallToolResponse return result as MCPCallToolResponse
} catch (error) { } catch (error) {
logger.error(`Error calling tool ${name} on ${server.name}:`, error) logger.error(`Error calling tool ${name} on ${server.name}:`, error as Error)
throw error throw error
} finally { } finally {
this.activeToolCalls.delete(toolCallId) this.activeToolCalls.delete(toolCallId)
@ -820,7 +820,7 @@ class McpService {
logger.debug('Successfully fetched login shell environment variables:') logger.debug('Successfully fetched login shell environment variables:')
return loginEnv return loginEnv
} catch (error) { } catch (error) {
logger.error('Failed to fetch login shell environment variables:', error) logger.error('Failed to fetch login shell environment variables:', error as Error)
return {} return {}
} }
}) })

View File

@ -48,7 +48,7 @@ export async function decryptToken(token: string) {
}) })
return JSON.parse(decrypted) as OAuthResponse return JSON.parse(decrypted) as OAuthResponse
} catch (error) { } catch (error) {
logger.error('Failed to decrypt token:', error) logger.error('Failed to decrypt token:', error as Error)
return null return null
} }
} }

View File

@ -57,7 +57,7 @@ class ObsidianVaultService {
name: vault.name || path.basename(vault.path) name: vault.name || path.basename(vault.path)
})) }))
} catch (error) { } catch (error) {
logger.error('Failed to get Obsidian Vault:', error) logger.error('Failed to get Obsidian Vault:', error as Error)
return [] return []
} }
} }
@ -71,20 +71,20 @@ class ObsidianVaultService {
try { try {
// 检查vault路径是否存在 // 检查vault路径是否存在
if (!fs.existsSync(vaultPath)) { if (!fs.existsSync(vaultPath)) {
logger.error('Vault path does not exist:', vaultPath) logger.error(`Vault path does not exist: ${vaultPath}`)
return [] return []
} }
// 检查是否是目录 // 检查是否是目录
const stats = fs.statSync(vaultPath) const stats = fs.statSync(vaultPath)
if (!stats.isDirectory()) { if (!stats.isDirectory()) {
logger.error('Vault path is not a directory:', vaultPath) logger.error(`Vault path is not a directory: ${vaultPath}`)
return [] return []
} }
this.traverseDirectory(vaultPath, '', results) this.traverseDirectory(vaultPath, '', results)
} catch (error) { } catch (error) {
logger.error('Failed to read Vault folder structure:', error) logger.error('Failed to read Vault folder structure:', error as Error)
} }
return results return results
@ -106,7 +106,7 @@ class ObsidianVaultService {
// 确保目录存在且可访问 // 确保目录存在且可访问
if (!fs.existsSync(dirPath)) { if (!fs.existsSync(dirPath)) {
logger.error('Directory does not exist:', dirPath) logger.error(`Directory does not exist: ${dirPath}`)
return return
} }
@ -114,7 +114,7 @@ class ObsidianVaultService {
try { try {
items = fs.readdirSync(dirPath, { withFileTypes: true }) items = fs.readdirSync(dirPath, { withFileTypes: true })
} catch (err) { } catch (err) {
logger.error(`Failed to read directory ${dirPath}:`, err) logger.error(`Failed to read directory ${dirPath}:`, err as Error)
return return
} }
@ -139,7 +139,7 @@ class ObsidianVaultService {
} }
} }
} catch (error) { } catch (error) {
logger.error(`Failed to traverse directory ${dirPath}:`, error) logger.error(`Failed to traverse directory ${dirPath}:`, error as Error)
} }
} }
@ -153,14 +153,14 @@ class ObsidianVaultService {
const vault = vaults.find((v) => v.name === vaultName) const vault = vaults.find((v) => v.name === vaultName)
if (!vault) { if (!vault) {
logger.error('Vault not found:', vaultName) logger.error(`Vault not found: ${vaultName}`)
return [] return []
} }
logger.debug('Get Vault file structure:', vault.name, vault.path) logger.debug(`Get Vault file structure: ${vault.name} ${vault.path}`)
return this.getVaultStructure(vault.path) return this.getVaultStructure(vault.path)
} catch (error) { } catch (error) {
logger.error('Failed to get Vault file structure:', error) logger.error('Failed to get Vault file structure:', error as Error)
return [] return []
} }
} }

View File

@ -109,12 +109,12 @@ NoDisplay=true
logger.debug(`update-desktop-database stdout: ${stdout}`) logger.debug(`update-desktop-database stdout: ${stdout}`)
logger.debug('Desktop database updated successfully.') logger.debug('Desktop database updated successfully.')
} catch (updateError) { } catch (updateError) {
logger.error('Failed to update desktop database:', updateError) logger.error('Failed to update desktop database:', updateError as Error)
// Continue even if update fails, as the file is still created. // Continue even if update fails, as the file is still created.
} }
} catch (error) { } catch (error) {
// Log the error but don't prevent the app from starting // Log the error but don't prevent the app from starting
logger.error('Failed to setup AppImage deep link:', error) logger.error('Failed to setup AppImage deep link:', error as Error)
} }
} }

View File

@ -57,7 +57,7 @@ export class ProxyManager {
} }
async configureProxy(config: ProxyConfig): Promise<void> { async configureProxy(config: ProxyConfig): Promise<void> {
logger.debug('configureProxy: %s %s', config?.mode, config?.proxyRules) logger.debug(`configureProxy: ${config?.mode} ${config?.proxyRules}`)
if (this.isSettingProxy) { if (this.isSettingProxy) {
return return
} }
@ -75,7 +75,7 @@ export class ProxyManager {
if (config.mode === 'system') { if (config.mode === 'system') {
const currentProxy = await getSystemProxy() const currentProxy = await getSystemProxy()
if (currentProxy) { if (currentProxy) {
logger.info('current system proxy: %s', currentProxy.proxyUrl) logger.info(`current system proxy: ${currentProxy.proxyUrl}`)
this.config.proxyRules = currentProxy.proxyUrl.toLowerCase() this.config.proxyRules = currentProxy.proxyUrl.toLowerCase()
} }
this.monitorSystemProxy() this.monitorSystemProxy()
@ -83,7 +83,7 @@ export class ProxyManager {
this.setGlobalProxy() this.setGlobalProxy()
} catch (error) { } catch (error) {
logger.error('Failed to config proxy:', error) logger.error('Failed to config proxy:', error as Error)
throw error throw error
} finally { } finally {
this.isSettingProxy = false this.isSettingProxy = false

View File

@ -68,7 +68,7 @@ export class ReduxService extends EventEmitter {
const selectorFn = new Function('state', `return ${selector}`) const selectorFn = new Function('state', `return ${selector}`)
return selectorFn(this.stateCache) return selectorFn(this.stateCache)
} catch (error) { } catch (error) {
logger.error('Failed to select from cache:', error) logger.error('Failed to select from cache:', error as Error)
return undefined return undefined
} }
} }
@ -97,7 +97,7 @@ export class ReduxService extends EventEmitter {
})() })()
`) `)
} catch (error) { } catch (error) {
logger.error('Failed to select store value:', error) logger.error('Failed to select store value:', error as Error)
throw error throw error
} }
} }
@ -114,7 +114,7 @@ export class ReduxService extends EventEmitter {
window.store.dispatch(${JSON.stringify(action)}) window.store.dispatch(${JSON.stringify(action)})
`) `)
} catch (error) { } catch (error) {
logger.error('Failed to dispatch action:', error) logger.error('Failed to dispatch action:', error as Error)
throw error throw error
} }
} }
@ -152,7 +152,7 @@ export class ReduxService extends EventEmitter {
const newValue = await this.select(selector) const newValue = await this.select(selector)
callback(newValue) callback(newValue)
} catch (error) { } catch (error) {
logger.error('Error in subscription handler:', error) logger.error('Error in subscription handler:', error as Error)
} }
} }
@ -174,7 +174,7 @@ export class ReduxService extends EventEmitter {
window.store.getState() window.store.getState()
`) `)
} catch (error) { } catch (error) {
logger.error('Failed to get state:', error) logger.error('Failed to get state:', error as Error)
throw error throw error
} }
} }

View File

@ -52,7 +52,7 @@ export default class S3Storage {
const isInWhiteList = VIRTUAL_HOST_SUFFIXES.some((suffix) => hostname.endsWith(suffix)) const isInWhiteList = VIRTUAL_HOST_SUFFIXES.some((suffix) => hostname.endsWith(suffix))
return !isInWhiteList return !isInWhiteList
} catch (e) { } catch (e) {
logger.warn('[S3Storage] Failed to parse endpoint, fallback to Path-Style:', endpoint, e) logger.warn(`[S3Storage] Failed to parse endpoint, fallback to Path-Style: ${endpoint}`, e as Error)
return true return true
} }
})() })()
@ -98,7 +98,7 @@ export default class S3Storage {
}) })
) )
} catch (error) { } catch (error) {
logger.error('[S3Storage] Error putting object:', error) logger.error('[S3Storage] Error putting object:', error as Error)
throw error throw error
} }
} }
@ -111,7 +111,7 @@ export default class S3Storage {
} }
return await streamToBuffer(res.Body as Readable) return await streamToBuffer(res.Body as Readable)
} catch (error) { } catch (error) {
logger.error('[S3Storage] Error getting object:', error) logger.error('[S3Storage] Error getting object:', error as Error)
throw error throw error
} }
} }
@ -128,7 +128,7 @@ export default class S3Storage {
} }
} }
} catch (error) { } catch (error) {
logger.error('[S3Storage] Error deleting object:', error) logger.error('[S3Storage] Error deleting object:', error as Error)
throw error throw error
} }
} }
@ -165,7 +165,7 @@ export default class S3Storage {
return files return files
} catch (error) { } catch (error) {
logger.error('[S3Storage] Error listing objects:', error) logger.error('[S3Storage] Error listing objects:', error as Error)
throw error throw error
} }
} }
@ -178,7 +178,7 @@ export default class S3Storage {
await this.client.send(new HeadBucketCommand({ Bucket: this.bucket })) await this.client.send(new HeadBucketCommand({ Bucket: this.bucket }))
return true return true
} catch (error) { } catch (error) {
logger.error('[S3Storage] Error checking connection:', error) logger.error('[S3Storage] Error checking connection:', error as Error)
throw error throw error
} }
} }

View File

@ -27,7 +27,7 @@ try {
SelectionHook = require('selection-hook') SelectionHook = require('selection-hook')
} }
} catch (error) { } catch (error) {
logger.error('Failed to load selection-hook:', error) logger.error('Failed to load selection-hook:', error as Error)
} }
// Type definitions // Type definitions
@ -243,7 +243,7 @@ export class SelectionService {
} }
if (!this.selectionHook.setGlobalFilterMode(modeMap[combinedMode], combinedList)) { if (!this.selectionHook.setGlobalFilterMode(modeMap[combinedMode], combinedList)) {
this.logError(new Error('Failed to set selection-hook global filter mode')) this.logError('Failed to set selection-hook global filter mode')
} }
} }
@ -274,17 +274,17 @@ export class SelectionService {
*/ */
public start(): boolean { public start(): boolean {
if (!isSupportedOS) { if (!isSupportedOS) {
this.logError(new Error('SelectionService start(): not supported on this OS')) this.logError('SelectionService start(): not supported on this OS')
return false return false
} }
if (!this.selectionHook) { if (!this.selectionHook) {
this.logError(new Error('SelectionService start(): instance is null')) this.logError('SelectionService start(): instance is null')
return false return false
} }
if (this.started) { if (this.started) {
this.logError(new Error('SelectionService start(): already started')) this.logError('SelectionService start(): already started')
return false return false
} }
@ -292,10 +292,8 @@ export class SelectionService {
if (isMac) { if (isMac) {
if (!systemPreferences.isTrustedAccessibilityClient(false)) { if (!systemPreferences.isTrustedAccessibilityClient(false)) {
this.logError( this.logError(
new Error(
'SelectionSerice not started: process is not trusted on macOS, please turn on the Accessibility permission' 'SelectionSerice not started: process is not trusted on macOS, please turn on the Accessibility permission'
) )
)
return false return false
} }
} }
@ -325,7 +323,7 @@ export class SelectionService {
return true return true
} }
this.logError(new Error('Failed to start text selection hook.')) this.logError('Failed to start text selection hook.')
return false return false
} catch (error) { } catch (error) {
this.logError('Failed to set up text selection hook:', error as Error) this.logError('Failed to set up text selection hook:', error as Error)
@ -1510,8 +1508,8 @@ export class SelectionService {
} }
} }
private logError(...args: [...string[], Error]): void { private logError(message: string, error?: Error): void {
logger.error('[SelectionService] Error: ', ...args) logger.error(message, error)
} }
} }

View File

@ -214,7 +214,7 @@ class SpanCacheService implements TraceCache {
try { try {
await fs.rm(filePath, { recursive: true }) await fs.rm(filePath, { recursive: true })
} catch (error) { } catch (error) {
logger.error('Error cleaning local data:', error) logger.error('Error cleaning local data:', error as Error)
} }
} }
} }
@ -359,7 +359,7 @@ class SpanCacheService implements TraceCache {
try { try {
yield JSON.parse(trimmed) as SpanEntity yield JSON.parse(trimmed) as SpanEntity
} catch (e) { } catch (e) {
logger.error(`JSON解析失败: ${trimmed}`, e) logger.error(`JSON解析失败: ${trimmed}`, e as Error)
} }
} }
} }
@ -369,7 +369,7 @@ class SpanCacheService implements TraceCache {
.filter((span) => span.topicId === topicId && span.traceId === traceId && span.modelName) .filter((span) => span.topicId === topicId && span.traceId === traceId && span.modelName)
.filter((span) => !modelName || span.modelName === modelName) .filter((span) => !modelName || span.modelName === modelName)
} catch (err) { } catch (err) {
logger.error('Error parsing JSON:', err) logger.error('Error parsing JSON:', err as Error)
throw err throw err
} }
} }
@ -387,7 +387,7 @@ class SpanCacheService implements TraceCache {
await fs.access(filePath) await fs.access(filePath)
return true return true
} catch (err) { } catch (err) {
logger.error('delete trace file error:', err) logger.error('delete trace file error:', err as Error)
return false return false
} }
} }

View File

@ -53,7 +53,7 @@ export default class WebDav {
}) })
} }
} catch (error) { } catch (error) {
logger.error('Error creating directory on WebDAV:', error) logger.error('Error creating directory on WebDAV:', error as Error)
throw error throw error
} }
@ -62,7 +62,7 @@ export default class WebDav {
try { try {
return await this.instance.putFileContents(remoteFilePath, data, options) return await this.instance.putFileContents(remoteFilePath, data, options)
} catch (error) { } catch (error) {
logger.error('Error putting file contents on WebDAV:', error) logger.error('Error putting file contents on WebDAV:', error as Error)
throw error throw error
} }
} }
@ -77,7 +77,7 @@ export default class WebDav {
try { try {
return await this.instance.getFileContents(remoteFilePath, options) return await this.instance.getFileContents(remoteFilePath, options)
} catch (error) { } catch (error) {
logger.error('Error getting file contents on WebDAV:', error) logger.error('Error getting file contents on WebDAV:', error as Error)
throw error throw error
} }
} }
@ -90,7 +90,7 @@ export default class WebDav {
try { try {
return await this.instance.getDirectoryContents(this.webdavPath) return await this.instance.getDirectoryContents(this.webdavPath)
} catch (error) { } catch (error) {
logger.error('Error getting directory contents on WebDAV:', error) logger.error('Error getting directory contents on WebDAV:', error as Error)
throw error throw error
} }
} }
@ -103,7 +103,7 @@ export default class WebDav {
try { try {
return await this.instance.exists('/') return await this.instance.exists('/')
} catch (error) { } catch (error) {
logger.error('Error checking connection:', error) logger.error('Error checking connection:', error as Error)
throw error throw error
} }
} }
@ -116,7 +116,7 @@ export default class WebDav {
try { try {
return await this.instance.createDirectory(path, options) return await this.instance.createDirectory(path, options)
} catch (error) { } catch (error) {
logger.error('Error creating directory on WebDAV:', error) logger.error('Error creating directory on WebDAV:', error as Error)
throw error throw error
} }
} }
@ -131,7 +131,7 @@ export default class WebDav {
try { try {
return await this.instance.deleteFile(remoteFilePath) return await this.instance.deleteFile(remoteFilePath)
} catch (error) { } catch (error) {
logger.error('Error deleting file on WebDAV:', error) logger.error('Error deleting file on WebDAV:', error as Error)
throw error throw error
} }
} }

View File

@ -30,7 +30,7 @@ export class CallBackServer {
this.events.emit('auth-code-received', code) this.events.emit('auth-code-received', code)
} }
} catch (error) { } catch (error) {
logger.error('Error processing OAuth callback:', error) logger.error('Error processing OAuth callback:', error as Error)
res.writeHead(500, { 'Content-Type': 'text/plain' }) res.writeHead(500, { 'Content-Type': 'text/plain' })
res.end('Internal Server Error') res.end('Internal Server Error')
} }
@ -43,7 +43,7 @@ export class CallBackServer {
// Handle server errors // Handle server errors
server.on('error', (error) => { server.on('error', (error) => {
logger.error('OAuth callback server error:', error) logger.error('OAuth callback server error:', error as Error)
}) })
return new Promise<http.Server>((resolve, reject) => { return new Promise<http.Server>((resolve, reject) => {

View File

@ -40,7 +40,7 @@ export class JsonFileStorage implements IOAuthStorage {
await this.writeStorage(initial) await this.writeStorage(initial)
return initial return initial
} }
logger.error('Error reading OAuth storage:', error) logger.error('Error reading OAuth storage:', error as Error)
throw new Error(`Failed to read OAuth storage: ${error instanceof Error ? error.message : String(error)}`) throw new Error(`Failed to read OAuth storage: ${error instanceof Error ? error.message : String(error)}`)
} }
} }
@ -61,7 +61,7 @@ export class JsonFileStorage implements IOAuthStorage {
// Update cache // Update cache
this.cache = data this.cache = data
} catch (error) { } catch (error) {
logger.error('Error writing OAuth storage:', error) logger.error('Error writing OAuth storage:', error as Error)
throw new Error(`Failed to write OAuth storage: ${error instanceof Error ? error.message : String(error)}`) throw new Error(`Failed to write OAuth storage: ${error instanceof Error ? error.message : String(error)}`)
} }
} }
@ -114,7 +114,7 @@ export class JsonFileStorage implements IOAuthStorage {
this.cache = null this.cache = null
} catch (error) { } catch (error) {
if (error instanceof Error && 'code' in error && error.code !== 'ENOENT') { if (error instanceof Error && 'code' in error && error.code !== 'ENOENT') {
logger.error('Error clearing OAuth storage:', error) logger.error('Error clearing OAuth storage:', error as Error)
throw new Error(`Failed to clear OAuth storage: ${error instanceof Error ? error.message : String(error)}`) throw new Error(`Failed to clear OAuth storage: ${error instanceof Error ? error.message : String(error)}`)
} }
} }

View File

@ -109,7 +109,7 @@ function getLoginShellEnvironment(): Promise<Record<string, string>> {
logger.warn( logger.warn(
'Parsed environment is empty or output was very short. This might indicate an issue with shell execution or environment variable retrieval.' 'Parsed environment is empty or output was very short. This might indicate an issue with shell execution or environment variable retrieval.'
) )
logger.warn('Raw output from shell:\n', output) logger.warn(`Raw output from shell:\n${output}`)
} }
env.PATH = env.Path || env.PATH || '' env.PATH = env.Path || env.PATH || ''

View File

@ -92,7 +92,7 @@ export class MemoryService {
this.isInitialized = true this.isInitialized = true
logger.debug('Memory database initialized successfully') logger.debug('Memory database initialized successfully')
} catch (error) { } catch (error) {
logger.error('Failed to initialize memory database:', error) logger.error('Failed to initialize memory database:', error as Error)
throw new Error( throw new Error(
`Memory database initialization failed: ${error instanceof Error ? error.message : 'Unknown error'}` `Memory database initialization failed: ${error instanceof Error ? error.message : 'Unknown error'}`
) )
@ -120,7 +120,7 @@ export class MemoryService {
await this.db.execute(MemoryQueries.createIndexes.vector) await this.db.execute(MemoryQueries.createIndexes.vector)
} catch (error) { } catch (error) {
// Vector index might not be supported in all versions // Vector index might not be supported in all versions
logger.warn('Failed to create vector index, falling back to non-indexed search:', error) logger.warn('Failed to create vector index, falling back to non-indexed search:', error as Error)
} }
} }
@ -175,7 +175,7 @@ export class MemoryService {
`Generated embedding for restored memory with dimension: ${embedding.length} (target: ${this.config?.embedderDimensions || MemoryService.UNIFIED_DIMENSION})` `Generated embedding for restored memory with dimension: ${embedding.length} (target: ${this.config?.embedderDimensions || MemoryService.UNIFIED_DIMENSION})`
) )
} catch (error) { } catch (error) {
logger.error('Failed to generate embedding for restored memory:', error) logger.error('Failed to generate embedding for restored memory:', error as Error)
} }
} }
@ -237,7 +237,7 @@ export class MemoryService {
} }
} }
} catch (error) { } catch (error) {
logger.error('Failed to generate embedding:', error) logger.error('Failed to generate embedding:', error as Error)
} }
} }
@ -279,7 +279,7 @@ export class MemoryService {
count: addedMemories.length count: addedMemories.length
} }
} catch (error) { } catch (error) {
logger.error('Failed to add memories:', error) logger.error('Failed to add memories:', error as Error)
return { return {
memories: [], memories: [],
count: 0, count: 0,
@ -304,7 +304,7 @@ export class MemoryService {
const queryEmbedding = await this.generateEmbedding(query) const queryEmbedding = await this.generateEmbedding(query)
return await this.hybridSearch(query, queryEmbedding, { limit, userId, agentId, filters }) return await this.hybridSearch(query, queryEmbedding, { limit, userId, agentId, filters })
} catch (error) { } catch (error) {
logger.error('Vector search failed, falling back to text search:', error) logger.error('Vector search failed, falling back to text search:', error as Error)
} }
} }
@ -359,7 +359,7 @@ export class MemoryService {
count: memories.length count: memories.length
} }
} catch (error) { } catch (error) {
logger.error('Search failed:', error) logger.error('Search failed:', error as Error)
return { return {
memories: [], memories: [],
count: 0, count: 0,
@ -424,7 +424,7 @@ export class MemoryService {
count: totalCount count: totalCount
} }
} catch (error) { } catch (error) {
logger.error('List failed:', error) logger.error('List failed:', error as Error)
return { return {
memories: [], memories: [],
count: 0, count: 0,
@ -464,7 +464,7 @@ export class MemoryService {
logger.debug(`Memory deleted: ${id}`) logger.debug(`Memory deleted: ${id}`)
} catch (error) { } catch (error) {
logger.error('Delete failed:', error) logger.error('Delete failed:', error as Error)
throw new Error(`Failed to delete memory: ${error instanceof Error ? error.message : 'Unknown error'}`) throw new Error(`Failed to delete memory: ${error instanceof Error ? error.message : 'Unknown error'}`)
} }
} }
@ -503,7 +503,7 @@ export class MemoryService {
`Updated embedding with dimension: ${embedding.length} (target: ${this.config?.embedderDimensions || MemoryService.UNIFIED_DIMENSION})` `Updated embedding with dimension: ${embedding.length} (target: ${this.config?.embedderDimensions || MemoryService.UNIFIED_DIMENSION})`
) )
} catch (error) { } catch (error) {
logger.error('Failed to generate embedding for update:', error) logger.error('Failed to generate embedding for update:', error as Error)
} }
} }
@ -528,7 +528,7 @@ export class MemoryService {
logger.debug(`Memory updated: ${id}`) logger.debug(`Memory updated: ${id}`)
} catch (error) { } catch (error) {
logger.error('Update failed:', error) logger.error('Update failed:', error as Error)
throw new Error(`Failed to update memory: ${error instanceof Error ? error.message : 'Unknown error'}`) throw new Error(`Failed to update memory: ${error instanceof Error ? error.message : 'Unknown error'}`)
} }
} }
@ -557,7 +557,7 @@ export class MemoryService {
isDeleted: row.is_deleted === 1 isDeleted: row.is_deleted === 1
})) }))
} catch (error) { } catch (error) {
logger.error('Get history failed:', error) logger.error('Get history failed:', error as Error)
throw new Error(`Failed to get memory history: ${error instanceof Error ? error.message : 'Unknown error'}`) throw new Error(`Failed to get memory history: ${error instanceof Error ? error.message : 'Unknown error'}`)
} }
} }
@ -595,7 +595,7 @@ export class MemoryService {
logger.debug(`Reset all memories for user ${userId} (${totalCount} memories deleted)`) logger.debug(`Reset all memories for user ${userId} (${totalCount} memories deleted)`)
} catch (error) { } catch (error) {
logger.error('Reset user memories failed:', error) logger.error('Reset user memories failed:', error as Error)
throw new Error(`Failed to reset user memories: ${error instanceof Error ? error.message : 'Unknown error'}`) throw new Error(`Failed to reset user memories: ${error instanceof Error ? error.message : 'Unknown error'}`)
} }
} }
@ -637,7 +637,7 @@ export class MemoryService {
logger.debug(`Deleted user ${userId} and ${totalCount} memories`) logger.debug(`Deleted user ${userId} and ${totalCount} memories`)
} catch (error) { } catch (error) {
logger.error('Delete user failed:', error) logger.error('Delete user failed:', error as Error)
throw new Error(`Failed to delete user: ${error instanceof Error ? error.message : 'Unknown error'}`) throw new Error(`Failed to delete user: ${error instanceof Error ? error.message : 'Unknown error'}`)
} }
} }
@ -661,7 +661,7 @@ export class MemoryService {
lastMemoryDate: row.last_memory_date as string lastMemoryDate: row.last_memory_date as string
})) }))
} catch (error) { } catch (error) {
logger.error('Get users list failed:', error) logger.error('Get users list failed:', error as Error)
throw new Error(`Failed to get users list: ${error instanceof Error ? error.message : 'Unknown error'}`) throw new Error(`Failed to get users list: ${error instanceof Error ? error.message : 'Unknown error'}`)
} }
} }
@ -732,7 +732,7 @@ export class MemoryService {
// Normalize to unified dimension // Normalize to unified dimension
return this.normalizeEmbedding(embedding) return this.normalizeEmbedding(embedding)
} catch (error) { } catch (error) {
logger.error('Embedding generation failed:', error) logger.error('Embedding generation failed:', error as Error)
throw new Error(`Failed to generate embedding: ${error instanceof Error ? error.message : 'Unknown error'}`) throw new Error(`Failed to generate embedding: ${error instanceof Error ? error.message : 'Unknown error'}`)
} }
} }
@ -802,7 +802,7 @@ export class MemoryService {
count: memories.length count: memories.length
} }
} catch (error) { } catch (error) {
logger.error('Hybrid search failed:', error) logger.error('Hybrid search failed:', error as Error)
throw new Error(`Hybrid search failed: ${error instanceof Error ? error.message : 'Unknown error'}`) throw new Error(`Hybrid search failed: ${error instanceof Error ? error.message : 'Unknown error'}`)
} }
} }

View File

@ -71,7 +71,7 @@ export class GeminiService extends BaseFileService {
return response return response
} catch (error) { } catch (error) {
logger.error('Error uploading file to Gemini:', error) logger.error('Error uploading file to Gemini:', error as Error)
return { return {
fileId: '', fileId: '',
displayName: file.origin_name, displayName: file.origin_name,
@ -117,7 +117,7 @@ export class GeminiService extends BaseFileService {
originalFile: undefined originalFile: undefined
} }
} catch (error) { } catch (error) {
logger.error('Error retrieving file from Gemini:', error) logger.error('Error retrieving file from Gemini:', error as Error)
return { return {
fileId: fileId, fileId: fileId,
displayName: '', displayName: '',
@ -175,7 +175,7 @@ export class GeminiService extends BaseFileService {
CacheService.set(GeminiService.FILE_LIST_CACHE_KEY, fileList, GeminiService.LIST_CACHE_DURATION) CacheService.set(GeminiService.FILE_LIST_CACHE_KEY, fileList, GeminiService.LIST_CACHE_DURATION)
return fileList return fileList
} catch (error) { } catch (error) {
logger.error('Error listing files from Gemini:', error) logger.error('Error listing files from Gemini:', error as Error)
return { files: [] } return { files: [] }
} }
} }
@ -185,7 +185,7 @@ export class GeminiService extends BaseFileService {
await this.fileManager.delete({ name: fileId }) await this.fileManager.delete({ name: fileId })
logger.debug(`File ${fileId} deleted from Gemini`) logger.debug(`File ${fileId} deleted from Gemini`)
} catch (error) { } catch (error) {
logger.error('Error deleting file from Gemini:', error) logger.error('Error deleting file from Gemini:', error as Error)
throw error throw error
} }
} }

View File

@ -40,7 +40,7 @@ export class MistralService extends BaseFileService {
} }
} }
} catch (error) { } catch (error) {
logger.error('Error uploading file:', error) logger.error('Error uploading file:', error as Error)
return { return {
fileId: '', fileId: '',
displayName: file.origin_name, displayName: file.origin_name,
@ -65,7 +65,7 @@ export class MistralService extends BaseFileService {
})) }))
} }
} catch (error) { } catch (error) {
logger.error('Error listing files:', error) logger.error('Error listing files:', error as Error)
return { files: [] } return { files: [] }
} }
} }
@ -77,7 +77,7 @@ export class MistralService extends BaseFileService {
}) })
logger.debug(`File ${fileId} deleted`) logger.debug(`File ${fileId} deleted`)
} catch (error) { } catch (error) {
logger.error('Error deleting file:', error) logger.error('Error deleting file:', error as Error)
throw error throw error
} }
} }
@ -94,7 +94,7 @@ export class MistralService extends BaseFileService {
status: 'success' // Retrieved files are always processed status: 'success' // Retrieved files are always processed
} }
} catch (error) { } catch (error) {
logger.error('Error retrieving file:', error) logger.error('Error retrieving file:', error as Error)
return { return {
fileId: fileId, fileId: fileId,
displayName: '', displayName: '',

View File

@ -12,7 +12,7 @@ function ParseData(data: string) {
return JSON.stringify(result) return JSON.stringify(result)
} catch (error) { } catch (error) {
logger.error('ParseData error:', error) logger.error('ParseData error:', error as Error)
return null return null
} }
} }

View File

@ -51,9 +51,9 @@ export function handleMcpProtocolUrl(url: URL) {
if (data) { if (data) {
const stringify = Buffer.from(data, 'base64').toString('utf8') const stringify = Buffer.from(data, 'base64').toString('utf8')
logger.debug('install MCP servers from urlschema: ', stringify) logger.debug(`install MCP servers from urlschema: ${stringify}`)
const jsonConfig = JSON.parse(stringify) const jsonConfig = JSON.parse(stringify)
logger.debug('install MCP servers from urlschema: ', jsonConfig) logger.debug(`install MCP servers from urlschema: ${JSON.stringify(jsonConfig)}`)
// support both {mcpServers: [servers]}, [servers] and {server} // support both {mcpServers: [servers]}, [servers] and {server}
if (jsonConfig.mcpServers) { if (jsonConfig.mcpServers) {

View File

@ -19,7 +19,7 @@ export async function compress(str: string): Promise<Buffer> {
const buffer = Buffer.from(str, 'utf-8') const buffer = Buffer.from(str, 'utf-8')
return await gzipPromise(buffer) return await gzipPromise(buffer)
} catch (error) { } catch (error) {
logger.error('Compression failed:', error) logger.error('Compression failed:', error as Error)
throw error throw error
} }
} }
@ -34,7 +34,7 @@ export async function decompress(compressedBuffer: Buffer): Promise<string> {
const buffer = await gunzipPromise(compressedBuffer) const buffer = await gunzipPromise(compressedBuffer)
return buffer.toString('utf-8') return buffer.toString('utf-8')
} catch (error) { } catch (error) {
logger.error('Decompression failed:', error) logger.error('Decompression failed:', error as Error)
throw error throw error
} }
} }

View File

@ -401,7 +401,7 @@ if (process.contextIsolated) {
contextBridge.exposeInMainWorld('api', api) contextBridge.exposeInMainWorld('api', api)
} catch (error) { } catch (error) {
// eslint-disable-next-line no-restricted-syntax // eslint-disable-next-line no-restricted-syntax
console.error(error) console.error('[Preload]Failed to expose APIs:', error as Error)
} }
} else { } else {
// @ts-ignore (define in dts) // @ts-ignore (define in dts)

View File

@ -179,7 +179,7 @@ export class NewAPIClient extends BaseApiClient {
return models.filter(isSupportedModel) return models.filter(isSupportedModel)
} catch (error) { } catch (error) {
logger.error('Error listing models:', error) logger.error('Error listing models:', error as Error)
return [] return []
} }
} }

View File

@ -687,7 +687,7 @@ export class AnthropicAPIClient extends BaseApiClient<
tool_calls: [toolCall] tool_calls: [toolCall]
} as MCPToolCreatedChunk) } as MCPToolCreatedChunk)
} catch (error) { } catch (error) {
logger.error(`Error parsing tool call input: ${error}`) logger.error('Error parsing tool call input:', error as Error)
} }
} }
break break

View File

@ -142,7 +142,7 @@ export class GeminiAPIClient extends BaseApiClient<
// console.log(response?.generatedImages?.[0]?.image?.imageBytes); // console.log(response?.generatedImages?.[0]?.image?.imageBytes);
return images return images
} catch (error) { } catch (error) {
logger.error('[generateImage] error:', error) logger.error('[generateImage] error:', error as Error)
throw error throw error
} }
} }

View File

@ -128,7 +128,7 @@ export abstract class OpenAIBaseClient<
return models.filter(isSupportedModel) return models.filter(isSupportedModel)
} catch (error) { } catch (error) {
logger.error('Error listing models:', error) logger.error('Error listing models:', error as Error)
return [] return []
} }
} }

View File

@ -60,7 +60,7 @@ export class PPIOAPIClient extends OpenAIAPIClient {
return processedModels.filter(isSupportedModel) return processedModels.filter(isSupportedModel)
} catch (error) { } catch (error) {
logger.error('Error listing PPIO models:', error) logger.error('Error listing PPIO models:', error as Error)
return [] return []
} }
} }

View File

@ -160,7 +160,7 @@ export default class AiProvider {
const dimensions = await this.apiClient.getEmbeddingDimensions(model) const dimensions = await this.apiClient.getEmbeddingDimensions(model)
return dimensions return dimensions
} catch (error) { } catch (error) {
logger.error('Error getting embedding dimensions:', error) logger.error('Error getting embedding dimensions:', error as Error)
throw error throw error
} }
} }

View File

@ -86,7 +86,7 @@ const FinalChunkConsumerMiddleware: CompletionsMiddleware =
} }
} }
} catch (error) { } catch (error) {
logger.error(`Error consuming stream:`, error) logger.error(`Error consuming stream:`, error as Error)
throw error throw error
} finally { } finally {
if (params.onChunk && !isRecursiveCall) { if (params.onChunk && !isRecursiveCall) {
@ -160,7 +160,7 @@ function extractAndAccumulateUsageMetrics(ctx: CompletionsContext, chunk: Generi
) )
} }
} catch (error) { } catch (error) {
logger.error(`Error extracting usage/metrics from chunk:`, error) logger.error('Error extracting usage/metrics from chunk:', error as Error)
} }
} }

View File

@ -48,7 +48,7 @@ export const createGenericLoggingMiddleware: () => MethodMiddleware = () => {
return (_: MiddlewareAPI<BaseContext, any[]>) => (next) => async (ctx, args) => { return (_: MiddlewareAPI<BaseContext, any[]>) => (next) => async (ctx, args) => {
const methodName = ctx.methodName const methodName = ctx.methodName
const logPrefix = `[${middlewareName} (${methodName})]` const logPrefix = `[${middlewareName} (${methodName})]`
logger.debug(`${logPrefix} Initiating. Args:`, stringifyArgsForLogging(args)) logger.debug(`${logPrefix} Initiating. Args: ${stringifyArgsForLogging(args)}`)
const startTime = Date.now() const startTime = Date.now()
try { try {
const result = await next(ctx, args) const result = await next(ctx, args)
@ -61,7 +61,7 @@ export const createGenericLoggingMiddleware: () => MethodMiddleware = () => {
const duration = Date.now() - startTime const duration = Date.now() - startTime
// Log failure of the method call with duration and error information. / // Log failure of the method call with duration and error information. /
// 记录方法调用失败及其持续时间和错误信息。 // 记录方法调用失败及其持续时间和错误信息。
logger.error(`${logPrefix} Failed. Duration: ${duration}ms`, error) logger.error(`${logPrefix} Failed. Duration: ${duration}ms`, error as Error)
throw error // Re-throw the error to be handled by subsequent layers or the caller / 重新抛出错误,由后续层或调用者处理 throw error // Re-throw the error to be handled by subsequent layers or the caller / 重新抛出错误,由后续层或调用者处理
} }
} }

View File

@ -134,7 +134,7 @@ function createToolHandlingTransform(
executedToolResults.push(...result.toolResults) executedToolResults.push(...result.toolResults)
executedToolCalls.push(...result.confirmedToolCalls) executedToolCalls.push(...result.confirmedToolCalls)
} catch (error) { } catch (error) {
logger.error(`Error executing tool call asynchronously:`, error) logger.error(`Error executing tool call asynchronously:`, error as Error)
} }
})() })()
@ -162,7 +162,7 @@ function createToolHandlingTransform(
// 缓存执行结果 // 缓存执行结果
executedToolResults.push(...result.toolResults) executedToolResults.push(...result.toolResults)
} catch (error) { } catch (error) {
logger.error(`Error executing tool use response asynchronously:`, error) logger.error(`Error executing tool use response asynchronously:`, error as Error)
// 错误时不影响其他工具的执行 // 错误时不影响其他工具的执行
} }
})() })()
@ -174,7 +174,7 @@ function createToolHandlingTransform(
controller.enqueue(chunk) controller.enqueue(chunk)
} }
} catch (error) { } catch (error) {
logger.error(`Error processing chunk:`, error) logger.error(`Error processing chunk:`, error as Error)
controller.error(error) controller.error(error)
} }
}, },
@ -206,7 +206,7 @@ function createToolHandlingTransform(
await executeWithToolHandling(newParams, depth + 1) await executeWithToolHandling(newParams, depth + 1)
} }
} catch (error) { } catch (error) {
logger.error(`Error in tool processing:`, error) logger.error(`Error in tool processing:`, error as Error)
controller.error(error) controller.error(error)
} finally { } finally {
hasToolCalls = false hasToolCalls = false
@ -341,7 +341,7 @@ function buildParamsWithToolResults(
ctx._internal.observer.usage.total_tokens += additionalTokens ctx._internal.observer.usage.total_tokens += additionalTokens
} }
} catch (error) { } catch (error) {
logger.error(`Error estimating token usage for new messages:`, error) logger.error(`Error estimating token usage for new messages:`, error as Error)
} }
} }
@ -509,7 +509,7 @@ export async function parseAndCallTools<R>(
toolResults.push(convertedMessage) toolResults.push(convertedMessage)
} }
} catch (error) { } catch (error) {
logger.error(`Error executing tool ${toolResponse.id}:`, error) logger.error(`Error executing tool ${toolResponse.id}:`, error as Error)
// 更新为错误状态 // 更新为错误状态
upsertMCPToolResponse( upsertMCPToolResponse(
allToolResponses, allToolResponses,
@ -551,7 +551,7 @@ export async function parseAndCallTools<R>(
} }
}) })
.catch((error) => { .catch((error) => {
logger.error(`Error waiting for tool confirmation ${toolResponse.id}:`, error) logger.error(`Error waiting for tool confirmation ${toolResponse.id}:`, error as Error)
// 立即更新为cancelled状态 // 立即更新为cancelled状态
upsertMCPToolResponse( upsertMCPToolResponse(
allToolResponses, allToolResponses,

View File

@ -77,7 +77,7 @@ export const ResponseTransformMiddleware: CompletionsMiddleware =
stream: genericChunkTransformStream stream: genericChunkTransformStream
} }
} catch (error) { } catch (error) {
logger.error(`Error during chunk transformation:`, error) logger.error('Error during chunk transformation:', error as Error)
throw error throw error
} }
} }

View File

@ -74,7 +74,7 @@ export const TransformCoreToSdkParamsMiddleware: CompletionsMiddleware =
} }
return next(ctx, params) return next(ctx, params)
} catch (error) { } catch (error) {
logger.error(`Error during request transformation:`, error) logger.error('Error during request transformation:', error as Error)
// 让错误向上传播,或者可以在这里进行特定的错误处理 // 让错误向上传播,或者可以在这里进行特定的错误处理
throw error throw error
} }

View File

@ -110,7 +110,7 @@ function createToolUseExtractionTransform(
// 转发其他所有chunk // 转发其他所有chunk
controller.enqueue(chunk) controller.enqueue(chunk)
} catch (error) { } catch (error) {
logger.error(`Error processing chunk:`, error) logger.error('Error processing chunk:', error as Error)
controller.error(error) controller.error(error)
} }
}, },

View File

@ -42,7 +42,7 @@ async function loadLanguageExtension(language: string, languageMap: Record<strin
try { try {
return await specialLoader() return await specialLoader()
} catch (error) { } catch (error) {
logger.debug(`Failed to load language ${normalizedLang}`, error) logger.debug(`Failed to load language ${normalizedLang}`, error as Error)
return null return null
} }
} }
@ -53,7 +53,7 @@ async function loadLanguageExtension(language: string, languageMap: Record<strin
const extension = loadLanguage(normalizedLang as any) const extension = loadLanguage(normalizedLang as any)
return extension || null return extension || null
} catch (error) { } catch (error) {
logger.debug(`Failed to load language ${normalizedLang}`, error) logger.debug(`Failed to load language ${normalizedLang}`, error as Error)
return null return null
} }
} }
@ -68,7 +68,7 @@ async function loadLinterExtension(language: string): Promise<Extension | null>
try { try {
return await loader() return await loader()
} catch (error) { } catch (error) {
logger.debug(`Failed to load linter for ${language}`, error) logger.debug(`Failed to load linter for ${language}`, error as Error)
return null return null
} }
} }
@ -108,7 +108,7 @@ export const useLanguageExtensions = (language: string, lint?: boolean) => {
setExtensions(results) setExtensions(results)
} catch (error) { } catch (error) {
if (!cancelled) { if (!cancelled) {
logger.debug('Failed to load language extensions:', error) logger.debug('Failed to load language extensions:', error as Error)
setExtensions([]) setExtensions([])
} }
} }

View File

@ -208,7 +208,7 @@ export const usePreviewToolHandlers = (
} }
img.src = svgBase64 img.src = svgBase64
} catch (error) { } catch (error) {
logger.error('Copy failed:', error) logger.error('Copy failed:', error as Error)
window.message.error(t('message.copy.failed')) window.message.error(t('message.copy.failed'))
} }
}, [getImgElement, t]) }, [getImgElement, t])
@ -268,7 +268,7 @@ export const usePreviewToolHandlers = (
img.src = svgBase64 img.src = svgBase64
} }
} catch (error) { } catch (error) {
logger.error('Download failed:', error) logger.error('Download failed:', error as Error)
} }
}, },
[getImgElement, prefix, customDownloader] [getImgElement, prefix, customDownloader]

View File

@ -62,7 +62,7 @@ const ImageViewer: React.FC<ImageViewerProps> = ({ src, style, ...props }) => {
window.message.success(t('message.copy.success')) window.message.success(t('message.copy.success'))
} catch (error) { } catch (error) {
logger.error('复制图片失败:', error) logger.error('复制图片失败:', error as Error)
window.message.error(t('message.copy.failed')) window.message.error(t('message.copy.failed'))
} }
} }

View File

@ -83,7 +83,7 @@ export function useLocalBackupModal(localBackupDir: string | undefined) {
}) })
setIsModalVisible(false) setIsModalVisible(false)
} catch (error) { } catch (error) {
logger.error('Backup failed:', error) logger.error('Backup failed:', error as Error)
} finally { } finally {
setBackuping(false) setBackuping(false)
} }

View File

@ -299,7 +299,7 @@ const MinappPopupContainer: React.FC = () => {
const handleWebviewNavigate = (appid: string, url: string) => { const handleWebviewNavigate = (appid: string, url: string) => {
// 记录当前URL用于GoogleLoginTip判断 // 记录当前URL用于GoogleLoginTip判断
if (appid === currentMinappId) { if (appid === currentMinappId) {
logger.debug('URL changed:', url) logger.debug(`URL changed: ${url}`)
setCurrentUrl(url) setCurrentUrl(url)
} }
} }

View File

@ -184,7 +184,7 @@ const PopupContainer: React.FC<Props> = ({ provider: _provider, resolve }) => {
.filter((model) => !isEmpty(model.name)) .filter((model) => !isEmpty(model.name))
) )
} catch (error) { } catch (error) {
logger.error('Failed to fetch models', error) logger.error('Failed to fetch models', error as Error)
} finally { } finally {
setTimeout(() => setLoading(false), 300) setTimeout(() => setLoading(false), 300)
} }

View File

@ -98,7 +98,7 @@ function FileList(props: FileListProps) {
setFiles(items) setFiles(items)
} catch (error) { } catch (error) {
if (error instanceof Error) { if (error instanceof Error) {
logger.error('Error fetching files:', error) logger.error('Error fetching files:', error as Error)
window.modal.error({ window.modal.error({
content: error.message, content: error.message,
centered: true centered: true

View File

@ -195,7 +195,7 @@ const PopupContainer: React.FC<PopupContainerProps> = ({
setFiles(filesData) setFiles(filesData)
} }
} catch (error) { } catch (error) {
logger.error('获取Obsidian Vault失败:', error) logger.error('获取Obsidian Vault失败:', error as Error)
setError(i18n.t('chat.topics.export.obsidian_fetch_error')) setError(i18n.t('chat.topics.export.obsidian_fetch_error'))
} finally { } finally {
setLoading(false) setLoading(false)
@ -213,7 +213,7 @@ const PopupContainer: React.FC<PopupContainerProps> = ({
const filesData = await window.api.obsidian.getFiles(selectedVault) const filesData = await window.api.obsidian.getFiles(selectedVault)
setFiles(filesData) setFiles(filesData)
} catch (error) { } catch (error) {
logger.error('获取Obsidian文件失败:', error) logger.error('获取Obsidian文件失败:', error as Error)
setError(i18n.t('chat.topics.export.obsidian_fetch_folders_error')) setError(i18n.t('chat.topics.export.obsidian_fetch_folders_error'))
} finally { } finally {
setLoading(false) setLoading(false)

View File

@ -306,7 +306,7 @@ async function getModelForCheck(provider: Provider, t: TFunction): Promise<Model
if (!selectedModel) return null if (!selectedModel) return null
return selectedModel return selectedModel
} catch (error) { } catch (error) {
logger.error('failed to select model', error) logger.error('failed to select model', error as Error)
return null return null
} }
} }

View File

@ -37,7 +37,7 @@ const PopupContainer: React.FC<Props> = ({ resolve }) => {
}, []) }, [])
const onOk = async () => { const onOk = async () => {
logger.debug('skipBackupFile', skipBackupFile) logger.debug(`skipBackupFile: ${skipBackupFile}`)
await backup(skipBackupFile) await backup(skipBackupFile)
setOpen(false) setOpen(false)
} }

View File

@ -203,7 +203,7 @@ const PopupContainer: React.FC<Props> = ({ message, title, resolve }) => {
setOpen(false) setOpen(false)
resolve({ success: true, savedCount }) resolve({ success: true, savedCount })
} catch (error) { } catch (error) {
logger.error('save failed:', error) logger.error('save failed:', error as Error)
window.message.error(t('chat.save.knowledge.error.save_failed')) window.message.error(t('chat.save.knowledge.error.save_failed'))
setLoading(false) setLoading(false)
} }

View File

@ -121,7 +121,7 @@ const PopupContainer: React.FC<Props> = ({
setTextValue(translatedText) setTextValue(translatedText)
} }
} catch (error) { } catch (error) {
logger.error('Translation failed:', error) logger.error('Translation failed:', error as Error)
window.message.error({ window.message.error({
content: t('translate.error.failed'), content: t('translate.error.failed'),
key: 'translate-message' key: 'translate-message'

View File

@ -62,7 +62,7 @@ const TranslateButton: FC<Props> = ({ text, onTranslated, disabled, style, isLoa
const translatedText = await fetchTranslate({ content: text, assistant }) const translatedText = await fetchTranslate({ content: text, assistant })
onTranslated(translatedText) onTranslated(translatedText)
} catch (error) { } catch (error) {
logger.error('Translation failed:', error) logger.error('Translation failed:', error as Error)
window.message.error({ window.message.error({
content: t('translate.error.failed'), content: t('translate.error.failed'),
key: 'translate-message' key: 'translate-message'

View File

@ -82,7 +82,7 @@ const loadCustomMiniApp = async (): Promise<MinAppType[]> => {
addTime: app.addTime || now addTime: app.addTime || now
})) }))
} catch (error) { } catch (error) {
logger.error('Failed to load custom mini apps:', error) logger.error('Failed to load custom mini apps:', error as Error)
return [] return []
} }
} }

View File

@ -381,7 +381,7 @@ export async function upgradeToV8(tx: Transaction): Promise<void> {
targetLanguage: langMap[history.targetLanguage] targetLanguage: langMap[history.targetLanguage]
}) })
} catch (error) { } catch (error) {
logger.error('Error upgrading history:', error) logger.error('Error upgrading history:', error as Error)
} }
} }
logger.info('DB migration to version 8 finished.') logger.info('DB migration to version 8 finished.')

View File

@ -118,7 +118,7 @@ export const useChatContext = (activeTopic: Topic) => {
window.message.success(t('message.delete.success')) window.message.success(t('message.delete.success'))
handleToggleMultiSelectMode(false) handleToggleMultiSelectMode(false)
} catch (error) { } catch (error) {
logger.error('Failed to delete messages:', error) logger.error('Failed to delete messages:', error as Error)
window.message.error(t('message.delete.failed')) window.message.error(t('message.delete.failed'))
} }
} }

View File

@ -220,7 +220,7 @@ export function useMessageOperations(topic: Topic) {
const state = store.getState() const state = store.getState()
const message = state.messages.entities[messageId] const message = state.messages.entities[messageId]
if (!message) { if (!message) {
logger.error('[getTranslationUpdater] cannot find message:', messageId) logger.error(`[getTranslationUpdater] cannot find message: ${messageId}`)
return null return null
} }
@ -305,7 +305,7 @@ export function useMessageOperations(topic: Topic) {
const state = store.getState() const state = store.getState()
const message = state.messages.entities[messageId] const message = state.messages.entities[messageId]
if (!message) { if (!message) {
logger.error('[editMessageBlocks] Message not found:', messageId) logger.error(`[editMessageBlocks] Message not found: ${messageId}`)
return return
} }
@ -369,7 +369,7 @@ export function useMessageOperations(topic: Topic) {
await dispatch(removeBlocksThunk(topic.id, messageId, blockIdsToRemove)) await dispatch(removeBlocksThunk(topic.id, messageId, blockIdsToRemove))
} }
} catch (error) { } catch (error) {
logger.error('[editMessageBlocks] Failed to update message blocks:', error) logger.error('[editMessageBlocks] Failed to update message blocks:', error as Error)
} }
}, },
[dispatch, topic?.id] [dispatch, topic?.id]
@ -426,7 +426,7 @@ export function useMessageOperations(topic: Topic) {
const state = store.getState() const state = store.getState()
const message = state.messages.entities[messageId] const message = state.messages.entities[messageId]
if (!message || !message.blocks) { if (!message || !message.blocks) {
logger.error('[removeMessageBlock] Message not found or has no blocks:', messageId) logger.error(`[removeMessageBlock] Message not found or has no blocks: ${messageId}`)
return return
} }

View File

@ -14,7 +14,7 @@ export function useNutstoreSSO() {
if (!encryptedToken) return reject(null) if (!encryptedToken) return reject(null)
resolve(encryptedToken) resolve(encryptedToken)
} catch (error) { } catch (error) {
logger.error('解析URL失败:', error) logger.error('解析URL失败:', error as Error)
reject(null) reject(null)
} finally { } finally {
removeListener() removeListener()

View File

@ -56,7 +56,7 @@ export const usePinnedModels = () => {
: [...pinnedModels, modelId] : [...pinnedModels, modelId]
await updatePinnedModels(newPinnedModels) await updatePinnedModels(newPinnedModels)
} catch (error) { } catch (error) {
logger.error('Failed to toggle pinned model', error) logger.error('Failed to toggle pinned model', error as Error)
} }
}, },
[pinnedModels, updatePinnedModels] [pinnedModels, updatePinnedModels]

View File

@ -143,7 +143,7 @@ const PopupContainer: React.FC<Props> = ({ resolve }) => {
setOriginalPrompt(content) setOriginalPrompt(content)
setHasUnsavedChanges(true) setHasUnsavedChanges(true)
} catch (error) { } catch (error) {
logger.error('Error fetching data:', error) logger.error('Error fetching data:', error as Error)
} }
setLoading(false) setLoading(false)

View File

@ -44,7 +44,7 @@ export function useSystemAgents() {
setAgents(agentsData) setAgents(agentsData)
return return
} catch (error) { } catch (error) {
logger.error('Failed to load remote agents:', error) logger.error('Failed to load remote agents:', error as Error)
// 远程加载失败,继续尝试加载本地数据 // 远程加载失败,继续尝试加载本地数据
} }
} }
@ -69,7 +69,7 @@ export function useSystemAgents() {
setAgents(_agents) setAgents(_agents)
} catch (error) { } catch (error) {
logger.error('Failed to load agents:', error) logger.error('Failed to load agents:', error as Error)
// 发生错误时使用已加载的本地 agents // 发生错误时使用已加载的本地 agents
setAgents(_agents) setAgents(_agents)
} }

View File

@ -72,7 +72,7 @@ const App: FC<Props> = ({ app, onClick, size = 60, isLast }) => {
updateDisabledMinapps(disabled.filter((item) => item.id !== app.id)) updateDisabledMinapps(disabled.filter((item) => item.id !== app.id))
} catch (error) { } catch (error) {
window.message.error(t('settings.miniapps.custom.remove_error')) window.message.error(t('settings.miniapps.custom.remove_error'))
logger.error('Failed to remove custom mini app:', error) logger.error('Failed to remove custom mini app:', error as Error)
} }
} }
} }

View File

@ -63,7 +63,7 @@ const NewAppButton: FC<Props> = ({ size = 60 }) => {
updateMinapps([...minapps, newApp]) updateMinapps([...minapps, newApp])
} catch (error) { } catch (error) {
window.message.error(t('settings.miniapps.custom.save_error')) window.message.error(t('settings.miniapps.custom.save_error'))
logger.error('Failed to save custom mini app:', error) logger.error('Failed to save custom mini app:', error as Error)
} }
} }
@ -83,7 +83,7 @@ const NewAppButton: FC<Props> = ({ size = 60 }) => {
} }
reader.readAsDataURL(file) reader.readAsDataURL(file)
} catch (error) { } catch (error) {
logger.error('Failed to read file:', error) logger.error('Failed to read file:', error as Error)
window.message.error(t('settings.miniapps.custom.logo_upload_error')) window.message.error(t('settings.miniapps.custom.logo_upload_error'))
} }
} }

View File

@ -54,7 +54,7 @@ const Chat: FC<Props> = (props) => {
const selectedText = window.getSelection()?.toString().trim() const selectedText = window.getSelection()?.toString().trim()
contentSearchRef.current?.enable(selectedText) contentSearchRef.current?.enable(selectedText)
} catch (error) { } catch (error) {
logger.error('Error enabling content search:', error) logger.error('Error enabling content search:', error as Error)
} }
}) })

View File

@ -251,7 +251,7 @@ const Inputbar: FC<Props> = ({ assistant: _assistant, setActiveTopic, topic }) =
setTimeout(() => resizeTextArea(true), 0) setTimeout(() => resizeTextArea(true), 0)
setExpend(false) setExpend(false)
} catch (error) { } catch (error) {
logger.warn('Failed to send message:', error) logger.warn('Failed to send message:', error as Error)
parent?.recordException(error as Error) parent?.recordException(error as Error)
} }
}, [assistant, dispatch, files, inputEmpty, loading, mentionedModels, resizeTextArea, text, topic]) }, [assistant, dispatch, files, inputEmpty, loading, mentionedModels, resizeTextArea, text, topic])
@ -267,7 +267,7 @@ const Inputbar: FC<Props> = ({ assistant: _assistant, setActiveTopic, topic }) =
translatedText && setText(translatedText) translatedText && setText(translatedText)
setTimeout(() => resizeTextArea(), 0) setTimeout(() => resizeTextArea(), 0)
} catch (error) { } catch (error) {
logger.warn('Translation failed:', error) logger.warn('Translation failed:', error as Error)
} finally { } finally {
setIsTranslating(false) setIsTranslating(false)
} }

View File

@ -73,7 +73,7 @@ const MessageItem: FC<Props> = ({
editMessage(message.id, { usage: usage }) editMessage(message.id, { usage: usage })
stopEditing() stopEditing()
} catch (error) { } catch (error) {
logger.error('Failed to save message blocks:', error) logger.error('Failed to save message blocks:', error as Error)
} }
}, },
[message, editMessageBlocks, stopEditing, editMessage] [message, editMessageBlocks, stopEditing, editMessage]
@ -88,7 +88,7 @@ const MessageItem: FC<Props> = ({
await resendUserMessageWithEdit(message, blocks, assistantWithTopicPrompt) await resendUserMessageWithEdit(message, blocks, assistantWithTopicPrompt)
stopEditing() stopEditing()
} catch (error) { } catch (error) {
logger.error('Failed to resend message:', error) logger.error('Failed to resend message:', error as Error)
} }
}, },
[message, resendUserMessageWithEdit, assistant, stopEditing, topic.prompt] [message, resendUserMessageWithEdit, assistant, stopEditing, topic.prompt]

View File

@ -34,7 +34,7 @@ const MessageImage: FC<Props> = ({ block }) => {
document.body.removeChild(link) document.body.removeChild(link)
window.message.success(t('message.download.success')) window.message.success(t('message.download.success'))
} catch (error) { } catch (error) {
logger.error('下载图片失败:', error) logger.error('下载图片失败:', error as Error)
window.message.error(t('message.download.failed')) window.message.error(t('message.download.failed'))
} }
} }
@ -86,7 +86,7 @@ const MessageImage: FC<Props> = ({ block }) => {
window.message.success(t('message.copy.success')) window.message.success(t('message.copy.success'))
} catch (error) { } catch (error) {
logger.error('复制图片失败:', error) logger.error('复制图片失败:', error as Error)
window.message.error(t('message.copy.failed')) window.message.error(t('message.copy.failed'))
} }
} }

View File

@ -44,7 +44,7 @@ const MessageTools: FC<Props> = ({ block }) => {
if (countdown > 0) { if (countdown > 0) {
timer.current = setTimeout(() => { timer.current = setTimeout(() => {
logger.debug('countdown', countdown) logger.debug(`countdown: ${countdown}`)
setCountdown((prev) => prev - 1) setCountdown((prev) => prev - 1)
}, 1000) }, 1000)
} else if (countdown === 0) { } else if (countdown === 0) {
@ -121,7 +121,7 @@ const MessageTools: FC<Props> = ({ block }) => {
message.error({ content: t('message.tools.abort_failed'), key: 'abort-tool' }) message.error({ content: t('message.tools.abort_failed'), key: 'abort-tool' })
} }
} catch (error) { } catch (error) {
logger.error('Failed to abort tool:', error) logger.error('Failed to abort tool:', error as Error)
message.error({ content: t('message.tools.abort_failed'), key: 'abort-tool' }) message.error({ content: t('message.tools.abort_failed'), key: 'abort-tool' })
} }
} }
@ -294,7 +294,7 @@ const MessageTools: FC<Props> = ({ block }) => {
return <CollapsedContent isExpanded={true} resultString={JSON.stringify(parsedResult, null, 2)} /> return <CollapsedContent isExpanded={true} resultString={JSON.stringify(parsedResult, null, 2)} />
} }
} catch (e) { } catch (e) {
logger.error('failed to render the preview of mcp results:', e) logger.error('failed to render the preview of mcp results:', e as Error)
return <CollapsedContent isExpanded={true} resultString={JSON.stringify(e, null, 2)} /> return <CollapsedContent isExpanded={true} resultString={JSON.stringify(e, null, 2)} />
} }
} }

View File

@ -225,7 +225,10 @@ const Messages: React.FC<MessagesProps> = ({ assistant, topic, setActiveTopic, o
window.message.success({ content: t('code_block.edit.save.success'), key: 'save-code' }) window.message.success({ content: t('code_block.edit.save.success'), key: 'save-code' })
} catch (error) { } catch (error) {
logger.error(`Failed to save code block ${codeBlockId} content to message block ${msgBlockId}:`, error) logger.error(
`Failed to save code block ${codeBlockId} content to message block ${msgBlockId}:`,
error as Error
)
window.message.error({ content: t('code_block.edit.save.failed'), key: 'save-code-failed' }) window.message.error({ content: t('code_block.edit.save.failed'), key: 'save-code-failed' })
} }
} else { } else {

View File

@ -114,7 +114,7 @@ const PopupContainer: React.FC<Props> = ({ title, resolve }) => {
setDimensions(finalDimensions) setDimensions(finalDimensions)
} catch (error) { } catch (error) {
logger.error('Error getting embedding dimensions:', error) logger.error('Error getting embedding dimensions:', error as Error)
window.message.error(t('message.error.get_embedding_dimensions') + '\n' + getErrorMessage(error)) window.message.error(t('message.error.get_embedding_dimensions') + '\n' + getErrorMessage(error))
setLoading(false) setLoading(false)
return return
@ -144,7 +144,7 @@ const PopupContainer: React.FC<Props> = ({ title, resolve }) => {
resolve(_newBase) resolve(_newBase)
} }
} catch (error) { } catch (error) {
logger.error('Validation failed:', error) logger.error('Validation failed:', error as Error)
} }
} }
const onCancel = () => { const onCancel = () => {

View File

@ -44,7 +44,7 @@ const PopupContainer: React.FC<Props> = ({ base, resolve }) => {
const searchResults = await searchKnowledgeBase(value, base) const searchResults = await searchKnowledgeBase(value, base)
setResults(searchResults) setResults(searchResults)
} catch (error) { } catch (error) {
logger.error(`Failed to search knowledge base ${base.name}:`, error) logger.error(`Failed to search knowledge base ${base.name}:`, error as Error)
setResults([]) setResults([])
} finally { } finally {
setLoading(false) setLoading(false)
@ -82,7 +82,7 @@ const PopupContainer: React.FC<Props> = ({ base, resolve }) => {
await navigator.clipboard.writeText(text) await navigator.clipboard.writeText(text)
message.success(t('message.copied')) message.success(t('message.copied'))
} catch (error) { } catch (error) {
logger.error('Failed to copy text:', error) logger.error('Failed to copy text:', error as Error)
window.message.error(t('message.copyError') || 'Failed to copy text') window.message.error(t('message.copyError') || 'Failed to copy text')
} }
} }

View File

@ -72,7 +72,7 @@ const PopupContainer: React.FC<Props> = ({ base: _base, resolve }) => {
setOpen(false) setOpen(false)
resolve(newBase) resolve(newBase)
} catch (error) { } catch (error) {
logger.error('Validation failed:', error) logger.error('Validation failed:', error as Error)
} }
} }

View File

@ -37,7 +37,7 @@ const QuotaTag: FC<{ base: KnowledgeBase; providerId: string; quota?: number }>
}) })
setQuota(response) setQuota(response)
} catch (error) { } catch (error) {
logger.error('[KnowledgeContent] Error checking quota:', error) logger.error('[KnowledgeContent] Error checking quota:', error as Error)
} }
} }
} }

View File

@ -28,7 +28,7 @@ const StatusIcon: FC<StatusIconProps> = ({
const status = getProcessingStatus(sourceId) const status = getProcessingStatus(sourceId)
const item = base.items.find((item) => item.id === sourceId) const item = base.items.find((item) => item.id === sourceId)
const errorText = item?.processingError const errorText = item?.processingError
logger.debug('[StatusIcon] Rendering for item:', item?.id, 'Status:', status, 'Progress:', progress) logger.debug(`[StatusIcon] Rendering for item: ${item?.id} Status: ${status} Progress: ${progress}`)
return useMemo(() => { return useMemo(() => {
if (!status) { if (!status) {

View File

@ -74,7 +74,7 @@ const KnowledgeSitemaps: FC<KnowledgeContentProps> = ({ selectedBase }) => {
} }
addSitemap(url) addSitemap(url)
} catch (e) { } catch (e) {
logger.error('Invalid Sitemap URL:', url) logger.error(`Invalid Sitemap URL: ${url}`)
} }
} }
} }

View File

@ -330,7 +330,7 @@ const MemoriesPage = () => {
const users = usersList.map((user) => user.userId) const users = usersList.map((user) => user.userId)
setUniqueUsers(users) setUniqueUsers(users)
} catch (error) { } catch (error) {
logger.error('Failed to load users list:', error) logger.error('Failed to load users list:', error as Error)
} }
}, [memoryService]) }, [memoryService])
@ -338,7 +338,7 @@ const MemoriesPage = () => {
const loadMemories = useCallback( const loadMemories = useCallback(
async (userId?: string) => { async (userId?: string) => {
const targetUser = userId || currentUser const targetUser = userId || currentUser
logger.debug('Loading all memories for user:', targetUser) logger.debug(`Loading all memories for user: ${targetUser}`)
setLoading(true) setLoading(true)
try { try {
// First, ensure the memory service is using the correct user // First, ensure the memory service is using the correct user
@ -349,10 +349,10 @@ const MemoriesPage = () => {
// Get all memories for current user context (load up to 10000) // Get all memories for current user context (load up to 10000)
const result = await memoryService.list({ limit: 10000, offset: 0 }) const result = await memoryService.list({ limit: 10000, offset: 0 })
logger.debug('Loaded memories for user:', targetUser, 'count:', result.results?.length || 0) logger.debug(`Loaded memories for user: ${targetUser} count: ${result.results?.length || 0}`)
setAllMemories(result.results || []) setAllMemories(result.results || [])
} catch (error) { } catch (error) {
logger.error('Failed to load memories:', error) logger.error('Failed to load memories:', error as Error)
window.message.error(t('memory.load_failed')) window.message.error(t('memory.load_failed'))
} finally { } finally {
setLoading(false) setLoading(false)
@ -363,7 +363,7 @@ const MemoriesPage = () => {
// Sync memoryService with Redux store on mount and when currentUser changes // Sync memoryService with Redux store on mount and when currentUser changes
useEffect(() => { useEffect(() => {
logger.debug('useEffect triggered for currentUser:', currentUser) logger.debug(`useEffect triggered for currentUser: ${currentUser}`)
// Reset to first page when user changes // Reset to first page when user changes
setCurrentPage(1) setCurrentPage(1)
loadMemories(currentUser) loadMemories(currentUser)
@ -418,7 +418,7 @@ const MemoriesPage = () => {
setCurrentPage(1) setCurrentPage(1)
await loadMemories(currentUser) await loadMemories(currentUser)
} catch (error) { } catch (error) {
logger.error('Failed to add memory:', error) logger.error('Failed to add memory:', error as Error)
window.message.error(t('memory.add_failed')) window.message.error(t('memory.add_failed'))
} }
} }
@ -430,7 +430,7 @@ const MemoriesPage = () => {
// Reload all memories // Reload all memories
await loadMemories(currentUser) await loadMemories(currentUser)
} catch (error) { } catch (error) {
logger.error('Failed to delete memory:', error) logger.error('Failed to delete memory:', error as Error)
window.message.error(t('memory.delete_failed')) window.message.error(t('memory.delete_failed'))
} }
} }
@ -447,13 +447,13 @@ const MemoriesPage = () => {
// Reload all memories // Reload all memories
await loadMemories(currentUser) await loadMemories(currentUser)
} catch (error) { } catch (error) {
logger.error('Failed to update memory:', error) logger.error('Failed to update memory:', error as Error)
window.message.error(t('memory.update_failed')) window.message.error(t('memory.update_failed'))
} }
} }
const handleUserSwitch = async (userId: string) => { const handleUserSwitch = async (userId: string) => {
logger.debug('Switching to user:', userId) logger.debug(`Switching to user: ${userId}`)
// First update Redux state // First update Redux state
dispatch(setCurrentUserId(userId)) dispatch(setCurrentUserId(userId))
@ -472,7 +472,7 @@ const MemoriesPage = () => {
t('memory.user_switched', { user: userId === DEFAULT_USER_ID ? t('memory.default_user') : userId }) t('memory.user_switched', { user: userId === DEFAULT_USER_ID ? t('memory.default_user') : userId })
) )
} catch (error) { } catch (error) {
logger.error('Failed to switch user:', error) logger.error('Failed to switch user:', error as Error)
window.message.error(t('memory.user_switch_failed')) window.message.error(t('memory.user_switch_failed'))
} }
} }
@ -492,7 +492,7 @@ const MemoriesPage = () => {
window.message.success(t('memory.user_created', { user: userId })) window.message.success(t('memory.user_created', { user: userId }))
setAddUserModalVisible(false) setAddUserModalVisible(false)
} catch (error) { } catch (error) {
logger.error('Failed to add user:', error) logger.error('Failed to add user:', error as Error)
window.message.error(t('memory.add_user_failed')) window.message.error(t('memory.add_user_failed'))
} }
} }
@ -524,7 +524,7 @@ const MemoriesPage = () => {
// Reload memories to show the empty state // Reload memories to show the empty state
await loadMemories(currentUser) await loadMemories(currentUser)
} catch (error) { } catch (error) {
logger.error('Failed to reset memories:', error) logger.error('Failed to reset memories:', error as Error)
window.message.error(t('memory.reset_memories_failed')) window.message.error(t('memory.reset_memories_failed'))
} }
} }
@ -558,7 +558,6 @@ const MemoriesPage = () => {
await loadMemories(currentUser) await loadMemories(currentUser)
} }
} catch (error) { } catch (error) {
logger.error('Failed to delete user:', error)
window.message.error(t('memory.delete_user_failed')) window.message.error(t('memory.delete_user_failed'))
} }
} }

View File

@ -86,7 +86,7 @@ const MemoriesSettingsModal: FC<MemoriesSettingsModalProps> = ({ visible, onSubm
const aiProvider = new AiProvider(provider) const aiProvider = new AiProvider(provider)
finalDimensions = await aiProvider.getEmbeddingDimensions(embedderModel) finalDimensions = await aiProvider.getEmbeddingDimensions(embedderModel)
} catch (error) { } catch (error) {
logger.error('Error getting embedding dimensions:', error) logger.error('Error getting embedding dimensions:', error as Error)
window.message.error(t('message.error.get_embedding_dimensions') + '\n' + getErrorMessage(error)) window.message.error(t('message.error.get_embedding_dimensions') + '\n' + getErrorMessage(error))
setLoading(false) setLoading(false)
return return
@ -125,7 +125,7 @@ const MemoriesSettingsModal: FC<MemoriesSettingsModalProps> = ({ visible, onSubm
setLoading(false) setLoading(false)
} }
} catch (error) { } catch (error) {
logger.error('Error submitting form:', error) logger.error('Error submitting form:', error as Error)
setLoading(false) setLoading(false)
} }
} }

View File

@ -116,7 +116,7 @@ const AihubmixPage: FC<{ Options: string[] }> = ({ Options }) => {
} }
return await window.api.file.download(url) return await window.api.file.download(url)
} catch (error) { } catch (error) {
logger.error('下载图像失败:', error) logger.error('下载图像失败:', error as Error)
if ( if (
error instanceof Error && error instanceof Error &&
(error.message.includes('Failed to parse URL') || error.message.includes('Invalid URL')) (error.message.includes('Failed to parse URL') || error.message.includes('Invalid URL'))
@ -550,7 +550,7 @@ const AihubmixPage: FC<{ Options: string[] }> = ({ Options }) => {
const translatedText = await translateText(painting.prompt, LanguagesEnum.enUS) const translatedText = await translateText(painting.prompt, LanguagesEnum.enUS)
updatePaintingState({ prompt: translatedText }) updatePaintingState({ prompt: translatedText })
} catch (error) { } catch (error) {
logger.error('Translation failed:', error) logger.error('Translation failed:', error as Error)
} finally { } finally {
setIsTranslating(false) setIsTranslating(false)
} }

View File

@ -183,7 +183,7 @@ const NewApiPage: FC<{ Options: string[] }> = ({ Options }) => {
} }
return await window.api.file.download(url) return await window.api.file.download(url)
} catch (error) { } catch (error) {
logger.error('下载图像失败:', error) logger.error('下载图像失败:', error as Error)
if ( if (
error instanceof Error && error instanceof Error &&
(error.message.includes('Failed to parse URL') || error.message.includes('Invalid URL')) (error.message.includes('Failed to parse URL') || error.message.includes('Invalid URL'))
@ -396,7 +396,7 @@ const NewApiPage: FC<{ Options: string[] }> = ({ Options }) => {
const translatedText = await translateText(painting.prompt, LanguagesEnum.enUS) const translatedText = await translateText(painting.prompt, LanguagesEnum.enUS)
updatePaintingState({ prompt: translatedText }) updatePaintingState({ prompt: translatedText })
} catch (error) { } catch (error) {
logger.error('Translation failed:', error) logger.error('Translation failed:', error as Error)
} finally { } finally {
setIsTranslating(false) setIsTranslating(false)
} }

View File

@ -21,7 +21,7 @@ const PaintingsRoutePage: FC = () => {
const dispatch = useAppDispatch() const dispatch = useAppDispatch()
useEffect(() => { useEffect(() => {
logger.debug('defaultPaintingProvider', provider) logger.debug(`defaultPaintingProvider: ${provider}`)
if (provider && Options.includes(provider)) { if (provider && Options.includes(provider)) {
dispatch(setDefaultPaintingProvider(provider as PaintingProvider)) dispatch(setDefaultPaintingProvider(provider as PaintingProvider))
} }

View File

@ -218,7 +218,7 @@ const SiliconPage: FC<{ Options: string[] }> = ({ Options }) => {
} }
return await window.api.file.download(url) return await window.api.file.download(url)
} catch (error) { } catch (error) {
logger.error('Failed to download image:', error) logger.error('Failed to download image:', error as Error)
if ( if (
error instanceof Error && error instanceof Error &&
(error.message.includes('Failed to parse URL') || error.message.includes('Invalid URL')) (error.message.includes('Failed to parse URL') || error.message.includes('Invalid URL'))
@ -309,7 +309,7 @@ const SiliconPage: FC<{ Options: string[] }> = ({ Options }) => {
const translatedText = await translateText(painting.prompt, LanguagesEnum.enUS) const translatedText = await translateText(painting.prompt, LanguagesEnum.enUS)
updatePaintingState({ prompt: translatedText }) updatePaintingState({ prompt: translatedText })
} catch (error) { } catch (error) {
logger.error('Translation failed:', error) logger.error('Translation failed:', error as Error)
} finally { } finally {
setIsTranslating(false) setIsTranslating(false)
} }

View File

@ -262,7 +262,7 @@ const TokenFluxPage: FC<{ Options: string[] }> = ({ Options }) => {
const translatedText = await translateText(painting.prompt, LanguagesEnum.enUS) const translatedText = await translateText(painting.prompt, LanguagesEnum.enUS)
updatePaintingState({ prompt: translatedText }) updatePaintingState({ prompt: translatedText })
} catch (error) { } catch (error) {
logger.error('Translation failed:', error) logger.error('Translation failed:', error as Error)
} finally { } finally {
setIsTranslating(false) setIsTranslating(false)
} }

View File

@ -46,7 +46,7 @@ export const DynamicFormRender: React.FC<DynamicFormRenderProps> = ({
} }
} }
} catch (error) { } catch (error) {
logger.error('Error processing image:', error) logger.error('Error processing image:', error as Error)
} }
}, },
[] []

View File

@ -174,7 +174,7 @@ export class TokenFluxService {
// Continue polling for other statuses (processing, queued, etc.) // Continue polling for other statuses (processing, queued, etc.)
setTimeout(poll, intervalMs) setTimeout(poll, intervalMs)
} catch (error) { } catch (error) {
logger.error('Polling error:', error) logger.error('Polling error:', error as Error)
retryCount++ retryCount++
if (retryCount >= maxRetries) { if (retryCount >= maxRetries) {
@ -227,7 +227,7 @@ export class TokenFluxService {
} }
return await window.api.file.download(url) return await window.api.file.download(url)
} catch (error) { } catch (error) {
logger.error('Failed to download image:', error) logger.error('Failed to download image:', error as Error)
return null return null
} }
}) })

View File

@ -47,7 +47,7 @@ const AssistantMemorySettings: React.FC<Props> = ({ assistant, updateAssistant,
}) })
setMemoryStats({ count: result.results.length, loading: false }) setMemoryStats({ count: result.results.length, loading: false })
} catch (error) { } catch (error) {
logger.error('Failed to load memory stats:', error) logger.error('Failed to load memory stats:', error as Error)
setMemoryStats({ count: 0, loading: false }) setMemoryStats({ count: 0, loading: false })
} }
}, [assistant.id, memoryService]) }, [assistant.id, memoryService])

Some files were not shown because too many files have changed in this diff Show More