mirror of
https://github.com/CherryHQ/cherry-studio.git
synced 2026-01-10 15:49:29 +08:00
refactor: consolidate CacheService implementation and update references
- Moved CacheService functionality to a new implementation in @data/CacheService, enhancing modularity. - Updated all references across the codebase to utilize the new cacheService instance. - Removed the old CacheService files from both main and renderer directories to streamline the codebase.
This commit is contained in:
parent
6d89f94335
commit
a6e19f7757
@ -21,13 +21,23 @@ const logger = loggerService.withContext('CacheService')
|
|||||||
*/
|
*/
|
||||||
export class CacheService {
|
export class CacheService {
|
||||||
private static instance: CacheService
|
private static instance: CacheService
|
||||||
|
private initialized = false
|
||||||
|
|
||||||
// Main process cache
|
// Main process cache
|
||||||
private cache = new Map<string, CacheEntry>()
|
private cache = new Map<string, CacheEntry>()
|
||||||
|
|
||||||
private constructor() {
|
private constructor() {
|
||||||
|
// Private constructor for singleton pattern
|
||||||
|
}
|
||||||
|
|
||||||
|
public async initialize(): Promise<void> {
|
||||||
|
if (this.initialized) {
|
||||||
|
logger.warn('CacheService already initialized')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
this.setupIpcHandlers()
|
this.setupIpcHandlers()
|
||||||
logger.debug('CacheService initialized')
|
logger.info('CacheService initialized')
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@ -9,7 +9,6 @@ import { loggerService } from '@logger'
|
|||||||
import { electronApp, optimizer } from '@electron-toolkit/utils'
|
import { electronApp, optimizer } from '@electron-toolkit/utils'
|
||||||
import { dbService } from '@data/db/DbService'
|
import { dbService } from '@data/db/DbService'
|
||||||
import { preferenceService } from '@data/PreferenceService'
|
import { preferenceService } from '@data/PreferenceService'
|
||||||
import { dataApiService } from '@data/DataApiService'
|
|
||||||
import { replaceDevtoolsFont } from '@main/utils/windowUtil'
|
import { replaceDevtoolsFont } from '@main/utils/windowUtil'
|
||||||
import { app, dialog } from 'electron'
|
import { app, dialog } from 'electron'
|
||||||
import installExtension, { REACT_DEVELOPER_TOOLS, REDUX_DEVTOOLS } from 'electron-devtools-installer'
|
import installExtension, { REACT_DEVELOPER_TOOLS, REDUX_DEVTOOLS } from 'electron-devtools-installer'
|
||||||
@ -32,6 +31,8 @@ import { windowService } from './services/WindowService'
|
|||||||
import { dataRefactorMigrateService } from './data/migrate/dataRefactor/DataRefactorMigrateService'
|
import { dataRefactorMigrateService } from './data/migrate/dataRefactor/DataRefactorMigrateService'
|
||||||
import process from 'node:process'
|
import process from 'node:process'
|
||||||
import { apiServerService } from './services/ApiServerService'
|
import { apiServerService } from './services/ApiServerService'
|
||||||
|
import { dataApiService } from '@data/DataApiService'
|
||||||
|
import { cacheService } from '@data/CacheService'
|
||||||
|
|
||||||
const logger = loggerService.withContext('MainEntry')
|
const logger = loggerService.withContext('MainEntry')
|
||||||
|
|
||||||
@ -168,16 +169,19 @@ if (!app.requestSingleInstanceLock()) {
|
|||||||
// Initialize DataApiService
|
// Initialize DataApiService
|
||||||
await dataApiService.initialize()
|
await dataApiService.initialize()
|
||||||
|
|
||||||
// Create two test windows for cross-window preference sync testing
|
// Initialize CacheService
|
||||||
logger.info('Creating test windows for PreferenceService cross-window sync testing')
|
await cacheService.initialize()
|
||||||
const testWindow1 = dataRefactorMigrateService.createTestWindow()
|
|
||||||
const testWindow2 = dataRefactorMigrateService.createTestWindow()
|
|
||||||
|
|
||||||
// Position windows to avoid overlap
|
// // Create two test windows for cross-window preference sync testing
|
||||||
testWindow1.once('ready-to-show', () => {
|
// logger.info('Creating test windows for PreferenceService cross-window sync testing')
|
||||||
const [x, y] = testWindow1.getPosition()
|
// const testWindow1 = dataRefactorMigrateService.createTestWindow()
|
||||||
testWindow2.setPosition(x + 50, y + 50)
|
// const testWindow2 = dataRefactorMigrateService.createTestWindow()
|
||||||
})
|
|
||||||
|
// // Position windows to avoid overlap
|
||||||
|
// testWindow1.once('ready-to-show', () => {
|
||||||
|
// const [x, y] = testWindow1.getPosition()
|
||||||
|
// testWindow2.setPosition(x + 50, y + 50)
|
||||||
|
// })
|
||||||
/************FOR TESTING ONLY END****************/
|
/************FOR TESTING ONLY END****************/
|
||||||
|
|
||||||
// Set app user model id for windows
|
// Set app user model id for windows
|
||||||
|
|||||||
@ -1,74 +0,0 @@
|
|||||||
interface CacheItem<T> {
|
|
||||||
data: T
|
|
||||||
timestamp: number
|
|
||||||
duration: number
|
|
||||||
}
|
|
||||||
|
|
||||||
export class CacheService {
|
|
||||||
private static cache: Map<string, CacheItem<any>> = new Map()
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set cache
|
|
||||||
* @param key Cache key
|
|
||||||
* @param data Cache data
|
|
||||||
* @param duration Cache duration (in milliseconds)
|
|
||||||
*/
|
|
||||||
static set<T>(key: string, data: T, duration: number): void {
|
|
||||||
this.cache.set(key, {
|
|
||||||
data,
|
|
||||||
timestamp: Date.now(),
|
|
||||||
duration
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get cache
|
|
||||||
* @param key Cache key
|
|
||||||
* @returns Returns data if cache exists and not expired, otherwise returns null
|
|
||||||
*/
|
|
||||||
static get<T>(key: string): T | null {
|
|
||||||
const item = this.cache.get(key)
|
|
||||||
if (!item) return null
|
|
||||||
|
|
||||||
const now = Date.now()
|
|
||||||
if (now - item.timestamp > item.duration) {
|
|
||||||
this.remove(key)
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
return item.data
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Remove specific cache
|
|
||||||
* @param key Cache key
|
|
||||||
*/
|
|
||||||
static remove(key: string): void {
|
|
||||||
this.cache.delete(key)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clear all cache
|
|
||||||
*/
|
|
||||||
static clear(): void {
|
|
||||||
this.cache.clear()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if cache exists and is valid
|
|
||||||
* @param key Cache key
|
|
||||||
* @returns boolean
|
|
||||||
*/
|
|
||||||
static has(key: string): boolean {
|
|
||||||
const item = this.cache.get(key)
|
|
||||||
if (!item) return false
|
|
||||||
|
|
||||||
const now = Date.now()
|
|
||||||
if (now - item.timestamp > item.duration) {
|
|
||||||
this.remove(key)
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -2,6 +2,7 @@ import crypto from 'node:crypto'
|
|||||||
import os from 'node:os'
|
import os from 'node:os'
|
||||||
import path from 'node:path'
|
import path from 'node:path'
|
||||||
|
|
||||||
|
import { cacheService } from '@data/CacheService'
|
||||||
import { loggerService } from '@logger'
|
import { loggerService } from '@logger'
|
||||||
import { createInMemoryMCPServer } from '@main/mcpServers/factory'
|
import { createInMemoryMCPServer } from '@main/mcpServers/factory'
|
||||||
import { makeSureDirExists, removeEnvProxy } from '@main/utils'
|
import { makeSureDirExists, removeEnvProxy } from '@main/utils'
|
||||||
@ -46,7 +47,6 @@ import { EventEmitter } from 'events'
|
|||||||
import { memoize } from 'lodash'
|
import { memoize } from 'lodash'
|
||||||
import { v4 as uuidv4 } from 'uuid'
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
|
|
||||||
import { CacheService } from './CacheService'
|
|
||||||
import DxtService from './DxtService'
|
import DxtService from './DxtService'
|
||||||
import { CallBackServer } from './mcp/oauth/callback'
|
import { CallBackServer } from './mcp/oauth/callback'
|
||||||
import { McpOAuthClientProvider } from './mcp/oauth/provider'
|
import { McpOAuthClientProvider } from './mcp/oauth/provider'
|
||||||
@ -116,9 +116,9 @@ function withCache<T extends unknown[], R>(
|
|||||||
return async (...args: T): Promise<R> => {
|
return async (...args: T): Promise<R> => {
|
||||||
const cacheKey = getCacheKey(...args)
|
const cacheKey = getCacheKey(...args)
|
||||||
|
|
||||||
if (CacheService.has(cacheKey)) {
|
if (cacheService.has(cacheKey)) {
|
||||||
logger.debug(`${logPrefix} loaded from cache`, { cacheKey })
|
logger.debug(`${logPrefix} loaded from cache`, { cacheKey })
|
||||||
const cachedData = CacheService.get<R>(cacheKey)
|
const cachedData = cacheService.get<R>(cacheKey)
|
||||||
if (cachedData) {
|
if (cachedData) {
|
||||||
return cachedData
|
return cachedData
|
||||||
}
|
}
|
||||||
@ -126,7 +126,7 @@ function withCache<T extends unknown[], R>(
|
|||||||
|
|
||||||
const start = Date.now()
|
const start = Date.now()
|
||||||
const result = await fn(...args)
|
const result = await fn(...args)
|
||||||
CacheService.set(cacheKey, result, ttl)
|
cacheService.set(cacheKey, result, ttl)
|
||||||
logger.debug(`${logPrefix} cached`, { cacheKey, ttlMs: ttl, durationMs: Date.now() - start })
|
logger.debug(`${logPrefix} cached`, { cacheKey, ttlMs: ttl, durationMs: Date.now() - start })
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
@ -469,21 +469,21 @@ class McpService {
|
|||||||
client.setNotificationHandler(ToolListChangedNotificationSchema, async () => {
|
client.setNotificationHandler(ToolListChangedNotificationSchema, async () => {
|
||||||
logger.debug(`Tools list changed for server: ${server.name}`)
|
logger.debug(`Tools list changed for server: ${server.name}`)
|
||||||
// Clear tools cache
|
// Clear tools cache
|
||||||
CacheService.remove(`mcp:list_tool:${serverKey}`)
|
cacheService.delete(`mcp:list_tool:${serverKey}`)
|
||||||
})
|
})
|
||||||
|
|
||||||
// Set up resources list changed notification handler
|
// Set up resources list changed notification handler
|
||||||
client.setNotificationHandler(ResourceListChangedNotificationSchema, async () => {
|
client.setNotificationHandler(ResourceListChangedNotificationSchema, async () => {
|
||||||
logger.debug(`Resources list changed for server: ${server.name}`)
|
logger.debug(`Resources list changed for server: ${server.name}`)
|
||||||
// Clear resources cache
|
// Clear resources cache
|
||||||
CacheService.remove(`mcp:list_resources:${serverKey}`)
|
cacheService.delete(`mcp:list_resources:${serverKey}`)
|
||||||
})
|
})
|
||||||
|
|
||||||
// Set up prompts list changed notification handler
|
// Set up prompts list changed notification handler
|
||||||
client.setNotificationHandler(PromptListChangedNotificationSchema, async () => {
|
client.setNotificationHandler(PromptListChangedNotificationSchema, async () => {
|
||||||
logger.debug(`Prompts list changed for server: ${server.name}`)
|
logger.debug(`Prompts list changed for server: ${server.name}`)
|
||||||
// Clear prompts cache
|
// Clear prompts cache
|
||||||
CacheService.remove(`mcp:list_prompts:${serverKey}`)
|
cacheService.delete(`mcp:list_prompts:${serverKey}`)
|
||||||
})
|
})
|
||||||
|
|
||||||
// Set up resource updated notification handler
|
// Set up resource updated notification handler
|
||||||
@ -513,16 +513,16 @@ class McpService {
|
|||||||
* Clear resource-specific caches for a server
|
* Clear resource-specific caches for a server
|
||||||
*/
|
*/
|
||||||
private clearResourceCaches(serverKey: string) {
|
private clearResourceCaches(serverKey: string) {
|
||||||
CacheService.remove(`mcp:list_resources:${serverKey}`)
|
cacheService.delete(`mcp:list_resources:${serverKey}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Clear all caches for a specific server
|
* Clear all caches for a specific server
|
||||||
*/
|
*/
|
||||||
private clearServerCache(serverKey: string) {
|
private clearServerCache(serverKey: string) {
|
||||||
CacheService.remove(`mcp:list_tool:${serverKey}`)
|
cacheService.delete(`mcp:list_tool:${serverKey}`)
|
||||||
CacheService.remove(`mcp:list_prompts:${serverKey}`)
|
cacheService.delete(`mcp:list_prompts:${serverKey}`)
|
||||||
CacheService.remove(`mcp:list_resources:${serverKey}`)
|
cacheService.delete(`mcp:list_resources:${serverKey}`)
|
||||||
logger.debug(`Cleared all caches for server`, { serverKey })
|
logger.debug(`Cleared all caches for server`, { serverKey })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,10 +1,10 @@
|
|||||||
|
import { cacheService } from '@data/CacheService'
|
||||||
import { File, Files, FileState, GoogleGenAI } from '@google/genai'
|
import { File, Files, FileState, GoogleGenAI } from '@google/genai'
|
||||||
import { loggerService } from '@logger'
|
import { loggerService } from '@logger'
|
||||||
import { fileStorage } from '@main/services/FileStorage'
|
import { fileStorage } from '@main/services/FileStorage'
|
||||||
import { FileListResponse, FileMetadata, FileUploadResponse, Provider } from '@types'
|
import { FileListResponse, FileMetadata, FileUploadResponse, Provider } from '@types'
|
||||||
import { v4 as uuidv4 } from 'uuid'
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
|
|
||||||
import { CacheService } from '../CacheService'
|
|
||||||
import { BaseFileService } from './BaseFileService'
|
import { BaseFileService } from './BaseFileService'
|
||||||
|
|
||||||
const logger = loggerService.withContext('GeminiService')
|
const logger = loggerService.withContext('GeminiService')
|
||||||
@ -67,7 +67,7 @@ export class GeminiService extends BaseFileService {
|
|||||||
// 只缓存成功的文件
|
// 只缓存成功的文件
|
||||||
if (status === 'success') {
|
if (status === 'success') {
|
||||||
const cacheKey = `${GeminiService.FILE_LIST_CACHE_KEY}_${response.fileId}`
|
const cacheKey = `${GeminiService.FILE_LIST_CACHE_KEY}_${response.fileId}`
|
||||||
CacheService.set<FileUploadResponse>(cacheKey, response, GeminiService.FILE_CACHE_DURATION)
|
cacheService.set<FileUploadResponse>(cacheKey, response, GeminiService.FILE_CACHE_DURATION)
|
||||||
}
|
}
|
||||||
|
|
||||||
return response
|
return response
|
||||||
@ -84,7 +84,7 @@ export class GeminiService extends BaseFileService {
|
|||||||
|
|
||||||
async retrieveFile(fileId: string): Promise<FileUploadResponse> {
|
async retrieveFile(fileId: string): Promise<FileUploadResponse> {
|
||||||
try {
|
try {
|
||||||
const cachedResponse = CacheService.get<FileUploadResponse>(`${GeminiService.FILE_LIST_CACHE_KEY}_${fileId}`)
|
const cachedResponse = cacheService.get<FileUploadResponse>(`${GeminiService.FILE_LIST_CACHE_KEY}_${fileId}`)
|
||||||
logger.debug('[GeminiService] cachedResponse', cachedResponse)
|
logger.debug('[GeminiService] cachedResponse', cachedResponse)
|
||||||
if (cachedResponse) {
|
if (cachedResponse) {
|
||||||
return cachedResponse
|
return cachedResponse
|
||||||
@ -130,7 +130,7 @@ export class GeminiService extends BaseFileService {
|
|||||||
|
|
||||||
async listFiles(): Promise<FileListResponse> {
|
async listFiles(): Promise<FileListResponse> {
|
||||||
try {
|
try {
|
||||||
const cachedList = CacheService.get<FileListResponse>(GeminiService.FILE_LIST_CACHE_KEY)
|
const cachedList = cacheService.get<FileListResponse>(GeminiService.FILE_LIST_CACHE_KEY)
|
||||||
if (cachedList) {
|
if (cachedList) {
|
||||||
return cachedList
|
return cachedList
|
||||||
}
|
}
|
||||||
@ -153,7 +153,7 @@ export class GeminiService extends BaseFileService {
|
|||||||
file
|
file
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
CacheService.set(
|
cacheService.set(
|
||||||
`${GeminiService.FILE_LIST_CACHE_KEY}_${file.name}`,
|
`${GeminiService.FILE_LIST_CACHE_KEY}_${file.name}`,
|
||||||
fileResponse,
|
fileResponse,
|
||||||
GeminiService.FILE_CACHE_DURATION
|
GeminiService.FILE_CACHE_DURATION
|
||||||
@ -173,7 +173,7 @@ export class GeminiService extends BaseFileService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// 更新文件列表缓存
|
// 更新文件列表缓存
|
||||||
CacheService.set(GeminiService.FILE_LIST_CACHE_KEY, fileList, GeminiService.LIST_CACHE_DURATION)
|
cacheService.set(GeminiService.FILE_LIST_CACHE_KEY, fileList, GeminiService.LIST_CACHE_DURATION)
|
||||||
return fileList
|
return fileList
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Error listing files from Gemini:', error as Error)
|
logger.error('Error listing files from Gemini:', error as Error)
|
||||||
|
|||||||
@ -1,10 +1,10 @@
|
|||||||
|
import { cacheService } from '@data/CacheService'
|
||||||
import { loggerService } from '@logger'
|
import { loggerService } from '@logger'
|
||||||
import { fileStorage } from '@main/services/FileStorage'
|
import { fileStorage } from '@main/services/FileStorage'
|
||||||
import { FileListResponse, FileMetadata, FileUploadResponse, Provider } from '@types'
|
import { FileListResponse, FileMetadata, FileUploadResponse, Provider } from '@types'
|
||||||
import * as fs from 'fs'
|
import * as fs from 'fs'
|
||||||
import OpenAI from 'openai'
|
import OpenAI from 'openai'
|
||||||
|
|
||||||
import { CacheService } from '../CacheService'
|
|
||||||
import { BaseFileService } from './BaseFileService'
|
import { BaseFileService } from './BaseFileService'
|
||||||
|
|
||||||
const logger = loggerService.withContext('OpenAIService')
|
const logger = loggerService.withContext('OpenAIService')
|
||||||
@ -38,7 +38,7 @@ export class OpenaiService extends BaseFileService {
|
|||||||
throw new Error('File id not found in response')
|
throw new Error('File id not found in response')
|
||||||
}
|
}
|
||||||
// 映射RemoteFileId到UIFileId上
|
// 映射RemoteFileId到UIFileId上
|
||||||
CacheService.set<string>(
|
cacheService.set<string>(
|
||||||
OpenaiService.generateUIFileIdCacheKey(file.id),
|
OpenaiService.generateUIFileIdCacheKey(file.id),
|
||||||
response.id,
|
response.id,
|
||||||
OpenaiService.FILE_CACHE_DURATION
|
OpenaiService.FILE_CACHE_DURATION
|
||||||
@ -88,7 +88,7 @@ export class OpenaiService extends BaseFileService {
|
|||||||
|
|
||||||
async deleteFile(fileId: string): Promise<void> {
|
async deleteFile(fileId: string): Promise<void> {
|
||||||
try {
|
try {
|
||||||
const cachedRemoteFileId = CacheService.get<string>(OpenaiService.generateUIFileIdCacheKey(fileId))
|
const cachedRemoteFileId = cacheService.get<string>(OpenaiService.generateUIFileIdCacheKey(fileId))
|
||||||
await this.client.files.delete(cachedRemoteFileId || fileId)
|
await this.client.files.delete(cachedRemoteFileId || fileId)
|
||||||
logger.debug(`File ${fileId} deleted`)
|
logger.debug(`File ${fileId} deleted`)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@ -100,7 +100,7 @@ export class OpenaiService extends BaseFileService {
|
|||||||
async retrieveFile(fileId: string): Promise<FileUploadResponse> {
|
async retrieveFile(fileId: string): Promise<FileUploadResponse> {
|
||||||
try {
|
try {
|
||||||
// 尝试反映射RemoteFileId
|
// 尝试反映射RemoteFileId
|
||||||
const cachedRemoteFileId = CacheService.get<string>(OpenaiService.generateUIFileIdCacheKey(fileId))
|
const cachedRemoteFileId = cacheService.get<string>(OpenaiService.generateUIFileIdCacheKey(fileId))
|
||||||
const response = await this.client.files.retrieve(cachedRemoteFileId || fileId)
|
const response = await this.client.files.retrieve(cachedRemoteFileId || fileId)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
|
import { cacheService } from '@data/CacheService'
|
||||||
import { loggerService } from '@logger'
|
import { loggerService } from '@logger'
|
||||||
import { CacheService } from '@renderer/services/CacheService'
|
|
||||||
import { FileMetadata, TokenFluxPainting } from '@renderer/types'
|
import { FileMetadata, TokenFluxPainting } from '@renderer/types'
|
||||||
|
|
||||||
import type { TokenFluxModel } from '../config/tokenFluxConfig'
|
import type { TokenFluxModel } from '../config/tokenFluxConfig'
|
||||||
@ -61,7 +61,7 @@ export class TokenFluxService {
|
|||||||
const cacheKey = `tokenflux_models_${this.apiHost}`
|
const cacheKey = `tokenflux_models_${this.apiHost}`
|
||||||
|
|
||||||
// Check cache first
|
// Check cache first
|
||||||
const cachedModels = CacheService.get<TokenFluxModel[]>(cacheKey)
|
const cachedModels = cacheService.get<TokenFluxModel[]>(cacheKey)
|
||||||
if (cachedModels) {
|
if (cachedModels) {
|
||||||
return cachedModels
|
return cachedModels
|
||||||
}
|
}
|
||||||
@ -79,7 +79,7 @@ export class TokenFluxService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Cache for 60 minutes (3,600,000 milliseconds)
|
// Cache for 60 minutes (3,600,000 milliseconds)
|
||||||
CacheService.set(cacheKey, data.data, 60 * 60 * 1000)
|
cacheService.set(cacheKey, data.data, 60 * 60 * 1000)
|
||||||
|
|
||||||
return data.data
|
return data.data
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,74 +0,0 @@
|
|||||||
interface CacheItem<T> {
|
|
||||||
data: T
|
|
||||||
timestamp: number
|
|
||||||
duration: number
|
|
||||||
}
|
|
||||||
|
|
||||||
export class CacheService {
|
|
||||||
private static cache: Map<string, CacheItem<any>> = new Map()
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set cache
|
|
||||||
* @param key Cache key
|
|
||||||
* @param data Cache data
|
|
||||||
* @param duration Cache duration (in milliseconds)
|
|
||||||
*/
|
|
||||||
static set<T>(key: string, data: T, duration: number): void {
|
|
||||||
this.cache.set(key, {
|
|
||||||
data,
|
|
||||||
timestamp: Date.now(),
|
|
||||||
duration
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get cache
|
|
||||||
* @param key Cache key
|
|
||||||
* @returns Returns data if cache exists and not expired, otherwise returns null
|
|
||||||
*/
|
|
||||||
static get<T>(key: string): T | null {
|
|
||||||
const item = this.cache.get(key)
|
|
||||||
if (!item) return null
|
|
||||||
|
|
||||||
const now = Date.now()
|
|
||||||
if (now - item.timestamp > item.duration) {
|
|
||||||
this.remove(key)
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
return item.data
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Remove specific cache
|
|
||||||
* @param key Cache key
|
|
||||||
*/
|
|
||||||
static remove(key: string): void {
|
|
||||||
this.cache.delete(key)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clear all cache
|
|
||||||
*/
|
|
||||||
static clear(): void {
|
|
||||||
this.cache.clear()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if cache exists and is valid
|
|
||||||
* @param key Cache key
|
|
||||||
* @returns boolean
|
|
||||||
*/
|
|
||||||
static has(key: string): boolean {
|
|
||||||
const item = this.cache.get(key)
|
|
||||||
if (!item) return false
|
|
||||||
|
|
||||||
const now = Date.now()
|
|
||||||
if (now - item.timestamp > item.duration) {
|
|
||||||
this.remove(key)
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Loading…
Reference in New Issue
Block a user