refactor: consolidate CacheService implementation and update references

- Moved CacheService functionality to a new implementation in @data/CacheService, enhancing modularity.
- Updated all references across the codebase to utilize the new cacheService instance.
- Removed the old CacheService files from both main and renderer directories to streamline the codebase.
This commit is contained in:
fullex 2025-09-15 14:47:30 +08:00
parent 6d89f94335
commit a6e19f7757
8 changed files with 49 additions and 183 deletions

View File

@ -21,13 +21,23 @@ const logger = loggerService.withContext('CacheService')
*/
export class CacheService {
private static instance: CacheService
private initialized = false
// Main process cache
private cache = new Map<string, CacheEntry>()
private constructor() {
// Private constructor for singleton pattern
}
public async initialize(): Promise<void> {
if (this.initialized) {
logger.warn('CacheService already initialized')
return
}
this.setupIpcHandlers()
logger.debug('CacheService initialized')
logger.info('CacheService initialized')
}
/**

View File

@ -9,7 +9,6 @@ import { loggerService } from '@logger'
import { electronApp, optimizer } from '@electron-toolkit/utils'
import { dbService } from '@data/db/DbService'
import { preferenceService } from '@data/PreferenceService'
import { dataApiService } from '@data/DataApiService'
import { replaceDevtoolsFont } from '@main/utils/windowUtil'
import { app, dialog } from 'electron'
import installExtension, { REACT_DEVELOPER_TOOLS, REDUX_DEVTOOLS } from 'electron-devtools-installer'
@ -32,6 +31,8 @@ import { windowService } from './services/WindowService'
import { dataRefactorMigrateService } from './data/migrate/dataRefactor/DataRefactorMigrateService'
import process from 'node:process'
import { apiServerService } from './services/ApiServerService'
import { dataApiService } from '@data/DataApiService'
import { cacheService } from '@data/CacheService'
const logger = loggerService.withContext('MainEntry')
@ -168,16 +169,19 @@ if (!app.requestSingleInstanceLock()) {
// Initialize DataApiService
await dataApiService.initialize()
// Create two test windows for cross-window preference sync testing
logger.info('Creating test windows for PreferenceService cross-window sync testing')
const testWindow1 = dataRefactorMigrateService.createTestWindow()
const testWindow2 = dataRefactorMigrateService.createTestWindow()
// Initialize CacheService
await cacheService.initialize()
// Position windows to avoid overlap
testWindow1.once('ready-to-show', () => {
const [x, y] = testWindow1.getPosition()
testWindow2.setPosition(x + 50, y + 50)
})
// // Create two test windows for cross-window preference sync testing
// logger.info('Creating test windows for PreferenceService cross-window sync testing')
// const testWindow1 = dataRefactorMigrateService.createTestWindow()
// const testWindow2 = dataRefactorMigrateService.createTestWindow()
// // Position windows to avoid overlap
// testWindow1.once('ready-to-show', () => {
// const [x, y] = testWindow1.getPosition()
// testWindow2.setPosition(x + 50, y + 50)
// })
/************FOR TESTING ONLY END****************/
// Set app user model id for windows

View File

@ -1,74 +0,0 @@
interface CacheItem<T> {
data: T
timestamp: number
duration: number
}
export class CacheService {
private static cache: Map<string, CacheItem<any>> = new Map()
/**
* Set cache
* @param key Cache key
* @param data Cache data
* @param duration Cache duration (in milliseconds)
*/
static set<T>(key: string, data: T, duration: number): void {
this.cache.set(key, {
data,
timestamp: Date.now(),
duration
})
}
/**
* Get cache
* @param key Cache key
* @returns Returns data if cache exists and not expired, otherwise returns null
*/
static get<T>(key: string): T | null {
const item = this.cache.get(key)
if (!item) return null
const now = Date.now()
if (now - item.timestamp > item.duration) {
this.remove(key)
return null
}
return item.data
}
/**
* Remove specific cache
* @param key Cache key
*/
static remove(key: string): void {
this.cache.delete(key)
}
/**
* Clear all cache
*/
static clear(): void {
this.cache.clear()
}
/**
* Check if cache exists and is valid
* @param key Cache key
* @returns boolean
*/
static has(key: string): boolean {
const item = this.cache.get(key)
if (!item) return false
const now = Date.now()
if (now - item.timestamp > item.duration) {
this.remove(key)
return false
}
return true
}
}

View File

@ -2,6 +2,7 @@ import crypto from 'node:crypto'
import os from 'node:os'
import path from 'node:path'
import { cacheService } from '@data/CacheService'
import { loggerService } from '@logger'
import { createInMemoryMCPServer } from '@main/mcpServers/factory'
import { makeSureDirExists, removeEnvProxy } from '@main/utils'
@ -46,7 +47,6 @@ import { EventEmitter } from 'events'
import { memoize } from 'lodash'
import { v4 as uuidv4 } from 'uuid'
import { CacheService } from './CacheService'
import DxtService from './DxtService'
import { CallBackServer } from './mcp/oauth/callback'
import { McpOAuthClientProvider } from './mcp/oauth/provider'
@ -116,9 +116,9 @@ function withCache<T extends unknown[], R>(
return async (...args: T): Promise<R> => {
const cacheKey = getCacheKey(...args)
if (CacheService.has(cacheKey)) {
if (cacheService.has(cacheKey)) {
logger.debug(`${logPrefix} loaded from cache`, { cacheKey })
const cachedData = CacheService.get<R>(cacheKey)
const cachedData = cacheService.get<R>(cacheKey)
if (cachedData) {
return cachedData
}
@ -126,7 +126,7 @@ function withCache<T extends unknown[], R>(
const start = Date.now()
const result = await fn(...args)
CacheService.set(cacheKey, result, ttl)
cacheService.set(cacheKey, result, ttl)
logger.debug(`${logPrefix} cached`, { cacheKey, ttlMs: ttl, durationMs: Date.now() - start })
return result
}
@ -469,21 +469,21 @@ class McpService {
client.setNotificationHandler(ToolListChangedNotificationSchema, async () => {
logger.debug(`Tools list changed for server: ${server.name}`)
// Clear tools cache
CacheService.remove(`mcp:list_tool:${serverKey}`)
cacheService.delete(`mcp:list_tool:${serverKey}`)
})
// Set up resources list changed notification handler
client.setNotificationHandler(ResourceListChangedNotificationSchema, async () => {
logger.debug(`Resources list changed for server: ${server.name}`)
// Clear resources cache
CacheService.remove(`mcp:list_resources:${serverKey}`)
cacheService.delete(`mcp:list_resources:${serverKey}`)
})
// Set up prompts list changed notification handler
client.setNotificationHandler(PromptListChangedNotificationSchema, async () => {
logger.debug(`Prompts list changed for server: ${server.name}`)
// Clear prompts cache
CacheService.remove(`mcp:list_prompts:${serverKey}`)
cacheService.delete(`mcp:list_prompts:${serverKey}`)
})
// Set up resource updated notification handler
@ -513,16 +513,16 @@ class McpService {
* Clear resource-specific caches for a server
*/
private clearResourceCaches(serverKey: string) {
CacheService.remove(`mcp:list_resources:${serverKey}`)
cacheService.delete(`mcp:list_resources:${serverKey}`)
}
/**
* Clear all caches for a specific server
*/
private clearServerCache(serverKey: string) {
CacheService.remove(`mcp:list_tool:${serverKey}`)
CacheService.remove(`mcp:list_prompts:${serverKey}`)
CacheService.remove(`mcp:list_resources:${serverKey}`)
cacheService.delete(`mcp:list_tool:${serverKey}`)
cacheService.delete(`mcp:list_prompts:${serverKey}`)
cacheService.delete(`mcp:list_resources:${serverKey}`)
logger.debug(`Cleared all caches for server`, { serverKey })
}

View File

@ -1,10 +1,10 @@
import { cacheService } from '@data/CacheService'
import { File, Files, FileState, GoogleGenAI } from '@google/genai'
import { loggerService } from '@logger'
import { fileStorage } from '@main/services/FileStorage'
import { FileListResponse, FileMetadata, FileUploadResponse, Provider } from '@types'
import { v4 as uuidv4 } from 'uuid'
import { CacheService } from '../CacheService'
import { BaseFileService } from './BaseFileService'
const logger = loggerService.withContext('GeminiService')
@ -67,7 +67,7 @@ export class GeminiService extends BaseFileService {
// 只缓存成功的文件
if (status === 'success') {
const cacheKey = `${GeminiService.FILE_LIST_CACHE_KEY}_${response.fileId}`
CacheService.set<FileUploadResponse>(cacheKey, response, GeminiService.FILE_CACHE_DURATION)
cacheService.set<FileUploadResponse>(cacheKey, response, GeminiService.FILE_CACHE_DURATION)
}
return response
@ -84,7 +84,7 @@ export class GeminiService extends BaseFileService {
async retrieveFile(fileId: string): Promise<FileUploadResponse> {
try {
const cachedResponse = CacheService.get<FileUploadResponse>(`${GeminiService.FILE_LIST_CACHE_KEY}_${fileId}`)
const cachedResponse = cacheService.get<FileUploadResponse>(`${GeminiService.FILE_LIST_CACHE_KEY}_${fileId}`)
logger.debug('[GeminiService] cachedResponse', cachedResponse)
if (cachedResponse) {
return cachedResponse
@ -130,7 +130,7 @@ export class GeminiService extends BaseFileService {
async listFiles(): Promise<FileListResponse> {
try {
const cachedList = CacheService.get<FileListResponse>(GeminiService.FILE_LIST_CACHE_KEY)
const cachedList = cacheService.get<FileListResponse>(GeminiService.FILE_LIST_CACHE_KEY)
if (cachedList) {
return cachedList
}
@ -153,7 +153,7 @@ export class GeminiService extends BaseFileService {
file
}
}
CacheService.set(
cacheService.set(
`${GeminiService.FILE_LIST_CACHE_KEY}_${file.name}`,
fileResponse,
GeminiService.FILE_CACHE_DURATION
@ -173,7 +173,7 @@ export class GeminiService extends BaseFileService {
}
// 更新文件列表缓存
CacheService.set(GeminiService.FILE_LIST_CACHE_KEY, fileList, GeminiService.LIST_CACHE_DURATION)
cacheService.set(GeminiService.FILE_LIST_CACHE_KEY, fileList, GeminiService.LIST_CACHE_DURATION)
return fileList
} catch (error) {
logger.error('Error listing files from Gemini:', error as Error)

View File

@ -1,10 +1,10 @@
import { cacheService } from '@data/CacheService'
import { loggerService } from '@logger'
import { fileStorage } from '@main/services/FileStorage'
import { FileListResponse, FileMetadata, FileUploadResponse, Provider } from '@types'
import * as fs from 'fs'
import OpenAI from 'openai'
import { CacheService } from '../CacheService'
import { BaseFileService } from './BaseFileService'
const logger = loggerService.withContext('OpenAIService')
@ -38,7 +38,7 @@ export class OpenaiService extends BaseFileService {
throw new Error('File id not found in response')
}
// 映射RemoteFileId到UIFileId上
CacheService.set<string>(
cacheService.set<string>(
OpenaiService.generateUIFileIdCacheKey(file.id),
response.id,
OpenaiService.FILE_CACHE_DURATION
@ -88,7 +88,7 @@ export class OpenaiService extends BaseFileService {
async deleteFile(fileId: string): Promise<void> {
try {
const cachedRemoteFileId = CacheService.get<string>(OpenaiService.generateUIFileIdCacheKey(fileId))
const cachedRemoteFileId = cacheService.get<string>(OpenaiService.generateUIFileIdCacheKey(fileId))
await this.client.files.delete(cachedRemoteFileId || fileId)
logger.debug(`File ${fileId} deleted`)
} catch (error) {
@ -100,7 +100,7 @@ export class OpenaiService extends BaseFileService {
async retrieveFile(fileId: string): Promise<FileUploadResponse> {
try {
// 尝试反映射RemoteFileId
const cachedRemoteFileId = CacheService.get<string>(OpenaiService.generateUIFileIdCacheKey(fileId))
const cachedRemoteFileId = cacheService.get<string>(OpenaiService.generateUIFileIdCacheKey(fileId))
const response = await this.client.files.retrieve(cachedRemoteFileId || fileId)
return {

View File

@ -1,5 +1,5 @@
import { cacheService } from '@data/CacheService'
import { loggerService } from '@logger'
import { CacheService } from '@renderer/services/CacheService'
import { FileMetadata, TokenFluxPainting } from '@renderer/types'
import type { TokenFluxModel } from '../config/tokenFluxConfig'
@ -61,7 +61,7 @@ export class TokenFluxService {
const cacheKey = `tokenflux_models_${this.apiHost}`
// Check cache first
const cachedModels = CacheService.get<TokenFluxModel[]>(cacheKey)
const cachedModels = cacheService.get<TokenFluxModel[]>(cacheKey)
if (cachedModels) {
return cachedModels
}
@ -79,7 +79,7 @@ export class TokenFluxService {
}
// Cache for 60 minutes (3,600,000 milliseconds)
CacheService.set(cacheKey, data.data, 60 * 60 * 1000)
cacheService.set(cacheKey, data.data, 60 * 60 * 1000)
return data.data
}

View File

@ -1,74 +0,0 @@
interface CacheItem<T> {
data: T
timestamp: number
duration: number
}
export class CacheService {
private static cache: Map<string, CacheItem<any>> = new Map()
/**
* Set cache
* @param key Cache key
* @param data Cache data
* @param duration Cache duration (in milliseconds)
*/
static set<T>(key: string, data: T, duration: number): void {
this.cache.set(key, {
data,
timestamp: Date.now(),
duration
})
}
/**
* Get cache
* @param key Cache key
* @returns Returns data if cache exists and not expired, otherwise returns null
*/
static get<T>(key: string): T | null {
const item = this.cache.get(key)
if (!item) return null
const now = Date.now()
if (now - item.timestamp > item.duration) {
this.remove(key)
return null
}
return item.data
}
/**
* Remove specific cache
* @param key Cache key
*/
static remove(key: string): void {
this.cache.delete(key)
}
/**
* Clear all cache
*/
static clear(): void {
this.cache.clear()
}
/**
* Check if cache exists and is valid
* @param key Cache key
* @returns boolean
*/
static has(key: string): boolean {
const item = this.cache.get(key)
if (!item) return false
const now = Date.now()
if (now - item.timestamp > item.duration) {
this.remove(key)
return false
}
return true
}
}