mirror of
https://github.com/CherryHQ/cherry-studio.git
synced 2025-12-31 16:49:07 +08:00
feat(cache): enhance shared cache functionality and synchronization in main
- Introduced type-safe access methods for shared cache in the Main process, including `getShared`, `setShared`, `hasShared`, and `deleteShared`. - Implemented `getAllShared` for initializing new Renderer windows with the complete shared cache state. - Updated IPC communication to support bidirectional synchronization of shared cache between Main and Renderer processes. - Enhanced cache management with absolute timestamps for TTL, ensuring precise expiration handling across windows. - Added ready state tracking in Renderer for improved synchronization feedback during initialization. - Refactored related documentation to reflect new features and usage patterns for shared cache.
This commit is contained in:
parent
ba107b2f6f
commit
b156ee68e0
@ -23,8 +23,9 @@ CacheService handles data that:
|
||||
- Best for: expensive computations, API response caching
|
||||
|
||||
### Shared Cache
|
||||
- Synchronized across all windows via IPC
|
||||
- Main process acts as the source of truth
|
||||
- Synchronized bidirectionally between Main and all Renderer windows via IPC
|
||||
- Main process maintains authoritative copy and provides initialization sync for new windows
|
||||
- New windows fetch complete shared cache state from Main on startup
|
||||
- Best for: window layouts, shared UI state
|
||||
|
||||
### Persist Cache
|
||||
@ -101,14 +102,17 @@ cacheService.set('temp.calculation', result, 30000)
|
||||
## Main vs Renderer Responsibilities
|
||||
|
||||
### Main Process CacheService
|
||||
- Manages shared and persist cache storage
|
||||
- Handles IPC requests from renderers
|
||||
- Broadcasts updates to all windows
|
||||
- Manages TTL expiration for shared caches
|
||||
- Manages internal cache for Main process services
|
||||
- Maintains authoritative SharedCache with type-safe access (`getShared`, `setShared`, `hasShared`, `deleteShared`)
|
||||
- Provides `getAllShared()` for new window initialization sync
|
||||
- Handles IPC requests from renderers and broadcasts updates to all windows
|
||||
- Manages TTL expiration using absolute timestamps (`expireAt`) for precise cross-window sync
|
||||
|
||||
### Renderer Process CacheService
|
||||
- Manages local memory cache
|
||||
- Proxies shared/persist operations to Main
|
||||
- Manages local memory cache and SharedCache local copy
|
||||
- Syncs SharedCache from Main on window initialization (async, non-blocking)
|
||||
- Provides ready state tracking via `isSharedCacheReady()` and `onSharedCacheReady()`
|
||||
- Broadcasts cache updates to Main for cross-window sync
|
||||
- Handles hook subscriptions and updates
|
||||
- Local TTL management for memory cache
|
||||
|
||||
|
||||
@ -106,6 +106,39 @@ const files = cacheService.getPersist('app.recent_files')
|
||||
cacheService.deletePersist('app.recent_files')
|
||||
```
|
||||
|
||||
## Main Process Usage
|
||||
|
||||
Main process CacheService provides SharedCache for cross-window state management.
|
||||
|
||||
### SharedCache in Main Process
|
||||
|
||||
```typescript
|
||||
import { cacheService } from '@main/data/CacheService'
|
||||
|
||||
// Type-safe (schema key) - matches Renderer's type system
|
||||
cacheService.setShared('window.layout', layoutConfig)
|
||||
const layout = cacheService.getShared('window.layout')
|
||||
|
||||
// With TTL (30 seconds)
|
||||
cacheService.setShared('temp.state', state, 30000)
|
||||
|
||||
// Check existence
|
||||
if (cacheService.hasShared('window.layout')) {
|
||||
// ...
|
||||
}
|
||||
|
||||
// Delete
|
||||
cacheService.deleteShared('window.layout')
|
||||
```
|
||||
|
||||
**Note**: Main CacheService does NOT support Casual methods (`getSharedCasual`, etc.). Only schema-based type-safe access is available in Main process.
|
||||
|
||||
### Sync Strategy
|
||||
|
||||
- **Renderer → Main**: When Renderer calls `setShared()`, it broadcasts to Main via IPC. Main updates its SharedCache and relays to other windows.
|
||||
- **Main → Renderer**: When Main calls `setShared()`, it broadcasts to all Renderer windows.
|
||||
- **New Window Initialization**: New windows fetch complete SharedCache state from Main via `getAllShared()`. Uses Main-priority override strategy for conflicts.
|
||||
|
||||
## Type-Safe vs Casual Methods
|
||||
|
||||
### Type-Safe Methods
|
||||
@ -237,6 +270,34 @@ export interface MyDataType {
|
||||
const [data, setData] = useCache('myFeature.data', defaultValue)
|
||||
```
|
||||
|
||||
## Shared Cache Ready State
|
||||
|
||||
Renderer CacheService provides ready state tracking for SharedCache initialization sync.
|
||||
|
||||
```typescript
|
||||
import { cacheService } from '@data/CacheService'
|
||||
|
||||
// Check if shared cache is ready
|
||||
if (cacheService.isSharedCacheReady()) {
|
||||
// SharedCache has been synced from Main
|
||||
}
|
||||
|
||||
// Register callback when ready
|
||||
const unsubscribe = cacheService.onSharedCacheReady(() => {
|
||||
// Called immediately if already ready, or when sync completes
|
||||
console.log('SharedCache ready!')
|
||||
})
|
||||
|
||||
// Cleanup
|
||||
unsubscribe()
|
||||
```
|
||||
|
||||
**Behavior notes**:
|
||||
- `getShared()` returns `undefined` before ready (expected behavior)
|
||||
- `setShared()` works immediately and broadcasts to Main (Main updates its cache)
|
||||
- Hooks like `useSharedCache` work normally - they set initial values and update when sync completes
|
||||
- Main-priority override: when sync completes, Main's values override local values
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Choose the right tier**: Memory for temp, Shared for cross-window, Persist for survival
|
||||
@ -244,3 +305,4 @@ const [data, setData] = useCache('myFeature.data', defaultValue)
|
||||
3. **Prefer type-safe keys**: Add to schema when possible
|
||||
4. **Clean up dynamic keys**: Remove casual cache entries when no longer needed
|
||||
5. **Consider data size**: Persist cache uses localStorage (limited to ~5MB)
|
||||
6. **Use absolute timestamps for sync**: CacheSyncMessage uses `expireAt` (absolute Unix timestamp) for precise cross-window TTL sync
|
||||
|
||||
@ -343,6 +343,7 @@ export enum IpcChannel {
|
||||
// Data: Cache
|
||||
Cache_Sync = 'cache:sync',
|
||||
Cache_SyncBatch = 'cache:sync-batch',
|
||||
Cache_GetAllShared = 'cache:get-all-shared',
|
||||
|
||||
// Data: API Channels
|
||||
DataApi_Request = 'data-api:request',
|
||||
|
||||
6
packages/shared/data/cache/cacheSchemas.ts
vendored
6
packages/shared/data/cache/cacheSchemas.ts
vendored
@ -101,11 +101,11 @@ export const DefaultUseCache: UseCacheSchema = {
|
||||
/**
|
||||
* Use shared cache schema for renderer hook
|
||||
*/
|
||||
export type UseSharedCacheSchema = {
|
||||
export type SharedCacheSchema = {
|
||||
'example_scope.example_key': string
|
||||
}
|
||||
|
||||
export const DefaultUseSharedCache: UseSharedCacheSchema = {
|
||||
export const DefaultSharedCache: SharedCacheSchema = {
|
||||
'example_scope.example_key': 'example default value'
|
||||
}
|
||||
|
||||
@ -126,4 +126,4 @@ export const DefaultRendererPersistCache: RendererPersistCacheSchema = {
|
||||
*/
|
||||
export type RendererPersistCacheKey = keyof RendererPersistCacheSchema
|
||||
export type UseCacheKey = keyof UseCacheSchema
|
||||
export type UseSharedCacheKey = keyof UseSharedCacheSchema
|
||||
export type SharedCacheKey = keyof SharedCacheSchema
|
||||
|
||||
4
packages/shared/data/cache/cacheTypes.ts
vendored
4
packages/shared/data/cache/cacheTypes.ts
vendored
@ -22,7 +22,7 @@ export interface CacheSyncMessage {
|
||||
type: 'shared' | 'persist'
|
||||
key: string
|
||||
value: any
|
||||
ttl?: number
|
||||
expireAt?: number // Absolute Unix timestamp for precise cross-window sync
|
||||
}
|
||||
|
||||
/**
|
||||
@ -33,7 +33,7 @@ export interface CacheSyncBatchMessage {
|
||||
entries: Array<{
|
||||
key: string
|
||||
value: any
|
||||
ttl?: number
|
||||
expireAt?: number // Absolute Unix timestamp for precise cross-window sync
|
||||
}>
|
||||
}
|
||||
|
||||
|
||||
@ -18,6 +18,7 @@
|
||||
*/
|
||||
|
||||
import { loggerService } from '@logger'
|
||||
import type { SharedCacheKey, SharedCacheSchema } from '@shared/data/cache/cacheSchemas'
|
||||
import type { CacheEntry, CacheSyncMessage } from '@shared/data/cache/cacheTypes'
|
||||
import { IpcChannel } from '@shared/IpcChannel'
|
||||
import { BrowserWindow, ipcMain } from 'electron'
|
||||
@ -42,9 +43,12 @@ export class CacheService {
|
||||
private static instance: CacheService
|
||||
private initialized = false
|
||||
|
||||
// Main process cache
|
||||
// Main process internal cache
|
||||
private cache = new Map<string, CacheEntry>()
|
||||
|
||||
// Shared cache (synchronized with renderer windows)
|
||||
private sharedCache = new Map<string, CacheEntry>()
|
||||
|
||||
// GC timer reference and interval time (e.g., every 10 minutes)
|
||||
private gcInterval: NodeJS.Timeout | null = null
|
||||
private readonly GC_INTERVAL_MS = 10 * 60 * 1000
|
||||
@ -79,7 +83,7 @@ export class CacheService {
|
||||
// ============ Main Process Cache (Internal) ============
|
||||
|
||||
/**
|
||||
* Garbage collection logic
|
||||
* Garbage collection logic for both internal and shared cache
|
||||
*/
|
||||
private startGarbageCollection() {
|
||||
if (this.gcInterval) return
|
||||
@ -88,6 +92,7 @@ export class CacheService {
|
||||
const now = Date.now()
|
||||
let removedCount = 0
|
||||
|
||||
// Clean internal cache
|
||||
for (const [key, entry] of this.cache.entries()) {
|
||||
if (entry.expireAt && now > entry.expireAt) {
|
||||
this.cache.delete(key)
|
||||
@ -95,6 +100,14 @@ export class CacheService {
|
||||
}
|
||||
}
|
||||
|
||||
// Clean shared cache
|
||||
for (const [key, entry] of this.sharedCache.entries()) {
|
||||
if (entry.expireAt && now > entry.expireAt) {
|
||||
this.sharedCache.delete(key)
|
||||
removedCount++
|
||||
}
|
||||
}
|
||||
|
||||
if (removedCount > 0) {
|
||||
logger.debug(`Garbage collection removed ${removedCount} expired items`)
|
||||
}
|
||||
@ -155,6 +168,110 @@ export class CacheService {
|
||||
return this.cache.delete(key)
|
||||
}
|
||||
|
||||
// ============ Shared Cache (Cross-window via IPC) ============
|
||||
|
||||
/**
|
||||
* Get value from shared cache with TTL validation (type-safe)
|
||||
* @param key - Schema-defined shared cache key
|
||||
* @returns Cached value or undefined if not found or expired
|
||||
*/
|
||||
getShared<K extends SharedCacheKey>(key: K): SharedCacheSchema[K] | undefined {
|
||||
const entry = this.sharedCache.get(key)
|
||||
if (!entry) return undefined
|
||||
|
||||
// Check TTL (lazy cleanup)
|
||||
if (entry.expireAt && Date.now() > entry.expireAt) {
|
||||
this.sharedCache.delete(key)
|
||||
return undefined
|
||||
}
|
||||
|
||||
return entry.value as SharedCacheSchema[K]
|
||||
}
|
||||
|
||||
/**
|
||||
* Set value in shared cache with cross-window broadcast (type-safe)
|
||||
* @param key - Schema-defined shared cache key
|
||||
* @param value - Value to cache (type inferred from schema)
|
||||
* @param ttl - Time to live in milliseconds (optional)
|
||||
*/
|
||||
setShared<K extends SharedCacheKey>(key: K, value: SharedCacheSchema[K], ttl?: number): void {
|
||||
const expireAt = ttl ? Date.now() + ttl : undefined
|
||||
const entry: CacheEntry = { value, expireAt }
|
||||
|
||||
this.sharedCache.set(key, entry)
|
||||
|
||||
// Broadcast to all renderer windows
|
||||
this.broadcastSync({
|
||||
type: 'shared',
|
||||
key,
|
||||
value,
|
||||
expireAt
|
||||
})
|
||||
|
||||
logger.verbose(`Set shared cache key "${key}"`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if key exists in shared cache and is not expired (type-safe)
|
||||
* @param key - Schema-defined shared cache key
|
||||
* @returns True if key exists and is valid, false otherwise
|
||||
*/
|
||||
hasShared<K extends SharedCacheKey>(key: K): boolean {
|
||||
const entry = this.sharedCache.get(key)
|
||||
if (!entry) return false
|
||||
|
||||
// Check TTL
|
||||
if (entry.expireAt && Date.now() > entry.expireAt) {
|
||||
this.sharedCache.delete(key)
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete from shared cache with cross-window broadcast (type-safe)
|
||||
* @param key - Schema-defined shared cache key
|
||||
* @returns True if deletion succeeded
|
||||
*/
|
||||
deleteShared<K extends SharedCacheKey>(key: K): boolean {
|
||||
if (!this.sharedCache.has(key)) {
|
||||
return true
|
||||
}
|
||||
|
||||
this.sharedCache.delete(key)
|
||||
|
||||
// Broadcast deletion to all renderer windows
|
||||
this.broadcastSync({
|
||||
type: 'shared',
|
||||
key,
|
||||
value: undefined // undefined means deletion
|
||||
})
|
||||
|
||||
logger.verbose(`Deleted shared cache key "${key}"`)
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all shared cache entries (for renderer initialization sync)
|
||||
* @returns Record of all shared cache entries with their metadata
|
||||
*/
|
||||
private getAllShared(): Record<string, CacheEntry> {
|
||||
const now = Date.now()
|
||||
const result: Record<string, CacheEntry> = {}
|
||||
|
||||
for (const [key, entry] of this.sharedCache.entries()) {
|
||||
// Skip expired entries
|
||||
if (entry.expireAt && now > entry.expireAt) {
|
||||
this.sharedCache.delete(key)
|
||||
continue
|
||||
}
|
||||
result[key] = entry
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// ============ Persist Cache Interface (Reserved) ============
|
||||
|
||||
// TODO: Implement persist cache in future
|
||||
@ -180,10 +297,32 @@ export class CacheService {
|
||||
// Handle cache sync broadcast from renderer
|
||||
ipcMain.on(IpcChannel.Cache_Sync, (event, message: CacheSyncMessage) => {
|
||||
const senderWindowId = BrowserWindow.fromWebContents(event.sender)?.id
|
||||
|
||||
// Update Main's sharedCache when receiving shared type sync
|
||||
if (message.type === 'shared') {
|
||||
if (message.value === undefined) {
|
||||
// Handle deletion
|
||||
this.sharedCache.delete(message.key)
|
||||
} else {
|
||||
// Handle set - use expireAt directly (absolute timestamp)
|
||||
const entry: CacheEntry = {
|
||||
value: message.value,
|
||||
expireAt: message.expireAt
|
||||
}
|
||||
this.sharedCache.set(message.key, entry)
|
||||
}
|
||||
}
|
||||
|
||||
// Broadcast to other windows
|
||||
this.broadcastSync(message, senderWindowId)
|
||||
logger.verbose(`Broadcasted cache sync: ${message.type}:${message.key}`)
|
||||
})
|
||||
|
||||
// Handle getAllShared request for renderer initialization
|
||||
ipcMain.handle(IpcChannel.Cache_GetAllShared, () => {
|
||||
return this.getAllShared()
|
||||
})
|
||||
|
||||
logger.debug('Cache sync IPC handlers registered')
|
||||
}
|
||||
|
||||
@ -197,11 +336,13 @@ export class CacheService {
|
||||
this.gcInterval = null
|
||||
}
|
||||
|
||||
// Clear cache
|
||||
// Clear caches
|
||||
this.cache.clear()
|
||||
this.sharedCache.clear()
|
||||
|
||||
// Remove IPC handlers
|
||||
ipcMain.removeAllListeners(IpcChannel.Cache_Sync)
|
||||
ipcMain.removeHandler(IpcChannel.Cache_GetAllShared)
|
||||
|
||||
logger.debug('CacheService cleanup completed')
|
||||
}
|
||||
|
||||
@ -14,7 +14,7 @@ import type {
|
||||
WebviewKeyEvent
|
||||
} from '@shared/config/types'
|
||||
import type { MCPServerLogEntry } from '@shared/config/types'
|
||||
import type { CacheSyncMessage } from '@shared/data/cache/cacheTypes'
|
||||
import type { CacheEntry, CacheSyncMessage } from '@shared/data/cache/cacheTypes'
|
||||
import type {
|
||||
PreferenceDefaultScopeType,
|
||||
PreferenceKeyType,
|
||||
@ -580,7 +580,10 @@ const api = {
|
||||
const listener = (_: any, message: CacheSyncMessage) => callback(message)
|
||||
ipcRenderer.on(IpcChannel.Cache_Sync, listener)
|
||||
return () => ipcRenderer.off(IpcChannel.Cache_Sync, listener)
|
||||
}
|
||||
},
|
||||
|
||||
// Get all shared cache entries from Main for initialization sync
|
||||
getAllShared: (): Promise<Record<string, CacheEntry>> => ipcRenderer.invoke(IpcChannel.Cache_GetAllShared)
|
||||
},
|
||||
|
||||
// PreferenceService related APIs
|
||||
|
||||
@ -21,10 +21,10 @@ import { loggerService } from '@logger'
|
||||
import type {
|
||||
RendererPersistCacheKey,
|
||||
RendererPersistCacheSchema,
|
||||
SharedCacheKey,
|
||||
SharedCacheSchema,
|
||||
UseCacheKey,
|
||||
UseCacheSchema,
|
||||
UseSharedCacheKey,
|
||||
UseSharedCacheSchema
|
||||
UseCacheSchema
|
||||
} from '@shared/data/cache/cacheSchemas'
|
||||
import { DefaultRendererPersistCache } from '@shared/data/cache/cacheSchemas'
|
||||
import type { CacheEntry, CacheSubscriber, CacheSyncMessage } from '@shared/data/cache/cacheTypes'
|
||||
@ -66,6 +66,10 @@ export class CacheService {
|
||||
private persistSaveTimer?: NodeJS.Timeout
|
||||
private persistDirty = false
|
||||
|
||||
// Shared cache ready state for initialization sync
|
||||
private sharedCacheReady = false
|
||||
private sharedCacheReadyCallbacks: Array<() => void> = []
|
||||
|
||||
private constructor() {
|
||||
this.initialize()
|
||||
}
|
||||
@ -87,6 +91,10 @@ export class CacheService {
|
||||
this.loadPersistCache()
|
||||
this.setupIpcListeners()
|
||||
this.setupWindowUnloadHandler()
|
||||
|
||||
// Async sync SharedCache from Main (does not block initialization)
|
||||
this.syncSharedCacheFromMain()
|
||||
|
||||
logger.debug('CacheService initialized')
|
||||
}
|
||||
|
||||
@ -279,7 +287,7 @@ export class CacheService {
|
||||
* @param key - Schema-defined shared cache key
|
||||
* @returns True if key has TTL configured
|
||||
*/
|
||||
hasSharedTTL<K extends UseSharedCacheKey>(key: K): boolean {
|
||||
hasSharedTTL<K extends SharedCacheKey>(key: K): boolean {
|
||||
const entry = this.sharedCache.get(key)
|
||||
return entry?.expireAt !== undefined
|
||||
}
|
||||
@ -289,7 +297,7 @@ export class CacheService {
|
||||
* @param key - Dynamic shared cache key
|
||||
* @returns True if key has TTL configured
|
||||
*/
|
||||
hasSharedTTLCasual(key: Exclude<string, UseSharedCacheKey>): boolean {
|
||||
hasSharedTTLCasual(key: Exclude<string, SharedCacheKey>): boolean {
|
||||
const entry = this.sharedCache.get(key)
|
||||
return entry?.expireAt !== undefined
|
||||
}
|
||||
@ -301,7 +309,7 @@ export class CacheService {
|
||||
* @param key - Schema-defined shared cache key
|
||||
* @returns Cached value or undefined if not found or expired
|
||||
*/
|
||||
getShared<K extends UseSharedCacheKey>(key: K): UseSharedCacheSchema[K] | undefined {
|
||||
getShared<K extends SharedCacheKey>(key: K): SharedCacheSchema[K] | undefined {
|
||||
return this.getSharedInternal(key)
|
||||
}
|
||||
|
||||
@ -310,7 +318,7 @@ export class CacheService {
|
||||
* @param key - Dynamic shared cache key (e.g., `window:${id}`)
|
||||
* @returns Cached value or undefined if not found or expired
|
||||
*/
|
||||
getSharedCasual<T>(key: Exclude<string, UseSharedCacheKey>): T | undefined {
|
||||
getSharedCasual<T>(key: Exclude<string, SharedCacheKey>): T | undefined {
|
||||
return this.getSharedInternal(key)
|
||||
}
|
||||
|
||||
@ -337,7 +345,7 @@ export class CacheService {
|
||||
* @param value - Value to cache (type inferred from schema)
|
||||
* @param ttl - Time to live in milliseconds (optional)
|
||||
*/
|
||||
setShared<K extends UseSharedCacheKey>(key: K, value: UseSharedCacheSchema[K], ttl?: number): void {
|
||||
setShared<K extends SharedCacheKey>(key: K, value: SharedCacheSchema[K], ttl?: number): void {
|
||||
this.setSharedInternal(key, value, ttl)
|
||||
}
|
||||
|
||||
@ -347,7 +355,7 @@ export class CacheService {
|
||||
* @param value - Value to cache
|
||||
* @param ttl - Time to live in milliseconds (optional)
|
||||
*/
|
||||
setSharedCasual<T>(key: Exclude<string, UseSharedCacheKey>, value: T, ttl?: number): void {
|
||||
setSharedCasual<T>(key: Exclude<string, SharedCacheKey>, value: T, ttl?: number): void {
|
||||
this.setSharedInternal(key, value, ttl)
|
||||
}
|
||||
|
||||
@ -356,11 +364,11 @@ export class CacheService {
|
||||
*/
|
||||
private setSharedInternal(key: string, value: any, ttl?: number): void {
|
||||
const existingEntry = this.sharedCache.get(key)
|
||||
const newExpireAt = ttl ? Date.now() + ttl : undefined
|
||||
|
||||
// Value comparison optimization
|
||||
if (existingEntry && Object.is(existingEntry.value, value)) {
|
||||
// Value is same, only update TTL if needed
|
||||
const newExpireAt = ttl ? Date.now() + ttl : undefined
|
||||
if (!Object.is(existingEntry.expireAt, newExpireAt)) {
|
||||
existingEntry.expireAt = newExpireAt
|
||||
logger.verbose(`Updated TTL for shared cache key "${key}"`)
|
||||
@ -369,7 +377,7 @@ export class CacheService {
|
||||
type: 'shared',
|
||||
key,
|
||||
value,
|
||||
ttl
|
||||
expireAt: newExpireAt // Use absolute timestamp for precise sync
|
||||
})
|
||||
} else {
|
||||
logger.verbose(`Skipped shared cache update for key "${key}" - value and TTL unchanged`)
|
||||
@ -379,7 +387,7 @@ export class CacheService {
|
||||
|
||||
const entry: CacheEntry = {
|
||||
value,
|
||||
expireAt: ttl ? Date.now() + ttl : undefined
|
||||
expireAt: newExpireAt
|
||||
}
|
||||
|
||||
// Update local copy first
|
||||
@ -391,7 +399,7 @@ export class CacheService {
|
||||
type: 'shared',
|
||||
key,
|
||||
value,
|
||||
ttl
|
||||
expireAt: newExpireAt // Use absolute timestamp for precise sync
|
||||
})
|
||||
logger.verbose(`Updated shared cache for key "${key}"`)
|
||||
}
|
||||
@ -401,7 +409,7 @@ export class CacheService {
|
||||
* @param key - Schema-defined shared cache key
|
||||
* @returns True if key exists and is valid, false otherwise
|
||||
*/
|
||||
hasShared<K extends UseSharedCacheKey>(key: K): boolean {
|
||||
hasShared<K extends SharedCacheKey>(key: K): boolean {
|
||||
return this.hasSharedInternal(key)
|
||||
}
|
||||
|
||||
@ -410,7 +418,7 @@ export class CacheService {
|
||||
* @param key - Dynamic shared cache key
|
||||
* @returns True if key exists and is valid, false otherwise
|
||||
*/
|
||||
hasSharedCasual(key: Exclude<string, UseSharedCacheKey>): boolean {
|
||||
hasSharedCasual(key: Exclude<string, SharedCacheKey>): boolean {
|
||||
return this.hasSharedInternal(key)
|
||||
}
|
||||
|
||||
@ -436,7 +444,7 @@ export class CacheService {
|
||||
* @param key - Schema-defined shared cache key
|
||||
* @returns True if deletion succeeded, false if key is protected by active hooks
|
||||
*/
|
||||
deleteShared<K extends UseSharedCacheKey>(key: K): boolean {
|
||||
deleteShared<K extends SharedCacheKey>(key: K): boolean {
|
||||
return this.deleteSharedInternal(key)
|
||||
}
|
||||
|
||||
@ -445,7 +453,7 @@ export class CacheService {
|
||||
* @param key - Dynamic shared cache key
|
||||
* @returns True if deletion succeeded, false if key is protected by active hooks
|
||||
*/
|
||||
deleteSharedCasual(key: Exclude<string, UseSharedCacheKey>): boolean {
|
||||
deleteSharedCasual(key: Exclude<string, SharedCacheKey>): boolean {
|
||||
return this.deleteSharedInternal(key)
|
||||
}
|
||||
|
||||
@ -557,6 +565,91 @@ export class CacheService {
|
||||
this.activeHooks.delete(key)
|
||||
}
|
||||
|
||||
// ============ Shared Cache Ready State Management ============
|
||||
|
||||
/**
|
||||
* Check if shared cache has finished initial sync from Main
|
||||
* @returns True if shared cache is ready
|
||||
*/
|
||||
isSharedCacheReady(): boolean {
|
||||
return this.sharedCacheReady
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a callback to be called when shared cache is ready
|
||||
* If already ready, callback is invoked immediately
|
||||
* @param callback - Function to call when ready
|
||||
* @returns Unsubscribe function
|
||||
*/
|
||||
onSharedCacheReady(callback: () => void): () => void {
|
||||
if (this.sharedCacheReady) {
|
||||
callback()
|
||||
return () => {}
|
||||
}
|
||||
|
||||
this.sharedCacheReadyCallbacks.push(callback)
|
||||
return () => {
|
||||
const idx = this.sharedCacheReadyCallbacks.indexOf(callback)
|
||||
if (idx >= 0) {
|
||||
this.sharedCacheReadyCallbacks.splice(idx, 1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark shared cache as ready and notify all waiting callbacks
|
||||
*/
|
||||
private markSharedCacheReady(): void {
|
||||
this.sharedCacheReady = true
|
||||
this.sharedCacheReadyCallbacks.forEach((cb) => cb())
|
||||
this.sharedCacheReadyCallbacks = []
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync shared cache from Main process during initialization
|
||||
* Uses Main-priority override strategy for conflict resolution
|
||||
*/
|
||||
private async syncSharedCacheFromMain(): Promise<void> {
|
||||
if (!window.api?.cache?.getAllShared) {
|
||||
logger.warn('Cache getAllShared API not available')
|
||||
this.markSharedCacheReady()
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
const allShared = await window.api.cache.getAllShared()
|
||||
let syncedCount = 0
|
||||
|
||||
for (const [key, entry] of Object.entries(allShared)) {
|
||||
// Skip expired entries
|
||||
if (entry.expireAt && Date.now() > entry.expireAt) {
|
||||
continue
|
||||
}
|
||||
|
||||
const existingEntry = this.sharedCache.get(key)
|
||||
|
||||
// Compare value and expireAt to determine if update is needed
|
||||
const valueChanged = !existingEntry || !Object.is(existingEntry.value, entry.value)
|
||||
const ttlChanged = !existingEntry || !Object.is(existingEntry.expireAt, entry.expireAt)
|
||||
|
||||
if (valueChanged || ttlChanged) {
|
||||
// Main-priority override: always use Main's value
|
||||
this.sharedCache.set(key, entry)
|
||||
this.notifySubscribers(key) // Only notify on actual change
|
||||
syncedCount++
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
`Synced ${syncedCount} changed shared cache entries from Main (total: ${Object.keys(allShared).length})`
|
||||
)
|
||||
} catch (error) {
|
||||
logger.error('Failed to sync shared cache from Main:', error as Error)
|
||||
} finally {
|
||||
this.markSharedCacheReady()
|
||||
}
|
||||
}
|
||||
|
||||
// ============ Subscription Management ============
|
||||
|
||||
/**
|
||||
@ -746,10 +839,10 @@ export class CacheService {
|
||||
// Handle deletion
|
||||
this.sharedCache.delete(message.key)
|
||||
} else {
|
||||
// Handle set
|
||||
// Handle set - use expireAt directly (absolute timestamp from sender)
|
||||
const entry: CacheEntry = {
|
||||
value: message.value,
|
||||
expireAt: message.ttl ? Date.now() + message.ttl : undefined
|
||||
expireAt: message.expireAt
|
||||
}
|
||||
this.sharedCache.set(message.key, entry)
|
||||
}
|
||||
|
||||
@ -3,12 +3,12 @@ import { loggerService } from '@logger'
|
||||
import type {
|
||||
RendererPersistCacheKey,
|
||||
RendererPersistCacheSchema,
|
||||
SharedCacheKey,
|
||||
SharedCacheSchema,
|
||||
UseCacheKey,
|
||||
UseCacheSchema,
|
||||
UseSharedCacheKey,
|
||||
UseSharedCacheSchema
|
||||
UseCacheSchema
|
||||
} from '@shared/data/cache/cacheSchemas'
|
||||
import { DefaultUseCache, DefaultUseSharedCache } from '@shared/data/cache/cacheSchemas'
|
||||
import { DefaultSharedCache, DefaultUseCache } from '@shared/data/cache/cacheSchemas'
|
||||
import { useCallback, useEffect, useSyncExternalStore } from 'react'
|
||||
const logger = loggerService.withContext('useCache')
|
||||
|
||||
@ -121,10 +121,10 @@ export function useCache<K extends UseCacheKey>(
|
||||
* setWindowCount(3)
|
||||
* ```
|
||||
*/
|
||||
export function useSharedCache<K extends UseSharedCacheKey>(
|
||||
export function useSharedCache<K extends SharedCacheKey>(
|
||||
key: K,
|
||||
initValue?: UseSharedCacheSchema[K]
|
||||
): [UseSharedCacheSchema[K], (value: UseSharedCacheSchema[K]) => void] {
|
||||
initValue?: SharedCacheSchema[K]
|
||||
): [SharedCacheSchema[K], (value: SharedCacheSchema[K]) => void] {
|
||||
/**
|
||||
* Subscribe to shared cache changes using React's useSyncExternalStore
|
||||
* This ensures the component re-renders when the shared cache value changes
|
||||
@ -145,7 +145,7 @@ export function useSharedCache<K extends UseSharedCacheKey>(
|
||||
}
|
||||
|
||||
if (initValue === undefined) {
|
||||
cacheService.setShared(key, DefaultUseSharedCache[key])
|
||||
cacheService.setShared(key, DefaultSharedCache[key])
|
||||
} else {
|
||||
cacheService.setShared(key, initValue)
|
||||
}
|
||||
@ -178,13 +178,13 @@ export function useSharedCache<K extends UseSharedCacheKey>(
|
||||
* @param newValue - New value to store in shared cache
|
||||
*/
|
||||
const setValue = useCallback(
|
||||
(newValue: UseSharedCacheSchema[K]) => {
|
||||
(newValue: SharedCacheSchema[K]) => {
|
||||
cacheService.setShared(key, newValue)
|
||||
},
|
||||
[key]
|
||||
)
|
||||
|
||||
return [value ?? initValue ?? DefaultUseSharedCache[key], setValue]
|
||||
return [value ?? initValue ?? DefaultSharedCache[key], setValue]
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -2,9 +2,9 @@ import type {
|
||||
RendererPersistCacheKey,
|
||||
RendererPersistCacheSchema,
|
||||
UseCacheKey,
|
||||
UseSharedCacheKey
|
||||
SharedCacheKey
|
||||
} from '@shared/data/cache/cacheSchemas'
|
||||
import { DefaultRendererPersistCache, DefaultUseCache, DefaultUseSharedCache } from '@shared/data/cache/cacheSchemas'
|
||||
import { DefaultRendererPersistCache, DefaultUseCache, DefaultSharedCache } from '@shared/data/cache/cacheSchemas'
|
||||
import type { CacheSubscriber } from '@shared/data/cache/cacheTypes'
|
||||
import { vi } from 'vitest'
|
||||
|
||||
@ -235,8 +235,8 @@ function getDefaultValueForKey(key: string): any {
|
||||
}
|
||||
|
||||
function getDefaultSharedValueForKey(key: string): any {
|
||||
if (key in DefaultUseSharedCache) {
|
||||
return DefaultUseSharedCache[key as UseSharedCacheKey]
|
||||
if (key in DefaultSharedCache) {
|
||||
return DefaultSharedCache[key as SharedCacheKey]
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
@ -3,10 +3,10 @@ import type {
|
||||
RendererPersistCacheSchema,
|
||||
UseCacheKey,
|
||||
UseCacheSchema,
|
||||
UseSharedCacheKey,
|
||||
UseSharedCacheSchema
|
||||
SharedCacheKey,
|
||||
SharedCacheSchema
|
||||
} from '@shared/data/cache/cacheSchemas'
|
||||
import { DefaultRendererPersistCache, DefaultUseCache, DefaultUseSharedCache } from '@shared/data/cache/cacheSchemas'
|
||||
import { DefaultRendererPersistCache, DefaultUseCache, DefaultSharedCache } from '@shared/data/cache/cacheSchemas'
|
||||
import { vi } from 'vitest'
|
||||
|
||||
/**
|
||||
@ -16,7 +16,7 @@ import { vi } from 'vitest'
|
||||
|
||||
// Mock cache state storage
|
||||
const mockMemoryCache = new Map<UseCacheKey, any>()
|
||||
const mockSharedCache = new Map<UseSharedCacheKey, any>()
|
||||
const mockSharedCache = new Map<SharedCacheKey, any>()
|
||||
const mockPersistCache = new Map<RendererPersistCacheKey, any>()
|
||||
|
||||
// Initialize caches with defaults
|
||||
@ -24,8 +24,8 @@ Object.entries(DefaultUseCache).forEach(([key, value]) => {
|
||||
mockMemoryCache.set(key as UseCacheKey, value)
|
||||
})
|
||||
|
||||
Object.entries(DefaultUseSharedCache).forEach(([key, value]) => {
|
||||
mockSharedCache.set(key as UseSharedCacheKey, value)
|
||||
Object.entries(DefaultSharedCache).forEach(([key, value]) => {
|
||||
mockSharedCache.set(key as SharedCacheKey, value)
|
||||
})
|
||||
|
||||
Object.entries(DefaultRendererPersistCache).forEach(([key, value]) => {
|
||||
@ -34,7 +34,7 @@ Object.entries(DefaultRendererPersistCache).forEach(([key, value]) => {
|
||||
|
||||
// Mock subscribers for cache changes
|
||||
const mockMemorySubscribers = new Map<UseCacheKey, Set<() => void>>()
|
||||
const mockSharedSubscribers = new Map<UseSharedCacheKey, Set<() => void>>()
|
||||
const mockSharedSubscribers = new Map<SharedCacheKey, Set<() => void>>()
|
||||
const mockPersistSubscribers = new Map<RendererPersistCacheKey, Set<() => void>>()
|
||||
|
||||
// Helper functions to notify subscribers
|
||||
@ -51,7 +51,7 @@ const notifyMemorySubscribers = (key: UseCacheKey) => {
|
||||
}
|
||||
}
|
||||
|
||||
const notifySharedSubscribers = (key: UseSharedCacheKey) => {
|
||||
const notifySharedSubscribers = (key: SharedCacheKey) => {
|
||||
const subscribers = mockSharedSubscribers.get(key)
|
||||
if (subscribers) {
|
||||
subscribers.forEach((callback) => {
|
||||
@ -108,21 +108,21 @@ export const mockUseCache = vi.fn(
|
||||
* Mock useSharedCache hook (shared cache)
|
||||
*/
|
||||
export const mockUseSharedCache = vi.fn(
|
||||
<K extends UseSharedCacheKey>(
|
||||
<K extends SharedCacheKey>(
|
||||
key: K,
|
||||
initValue?: UseSharedCacheSchema[K]
|
||||
): [UseSharedCacheSchema[K], (value: UseSharedCacheSchema[K]) => void] => {
|
||||
initValue?: SharedCacheSchema[K]
|
||||
): [SharedCacheSchema[K], (value: SharedCacheSchema[K]) => void] => {
|
||||
// Get current value
|
||||
let currentValue = mockSharedCache.get(key)
|
||||
if (currentValue === undefined) {
|
||||
currentValue = initValue ?? DefaultUseSharedCache[key]
|
||||
currentValue = initValue ?? DefaultSharedCache[key]
|
||||
if (currentValue !== undefined) {
|
||||
mockSharedCache.set(key, currentValue)
|
||||
}
|
||||
}
|
||||
|
||||
// Mock setValue function
|
||||
const setValue = vi.fn((value: UseSharedCacheSchema[K]) => {
|
||||
const setValue = vi.fn((value: SharedCacheSchema[K]) => {
|
||||
mockSharedCache.set(key, value)
|
||||
notifySharedSubscribers(key)
|
||||
})
|
||||
@ -188,8 +188,8 @@ export const MockUseCacheUtils = {
|
||||
mockMemoryCache.set(key as UseCacheKey, value)
|
||||
})
|
||||
|
||||
Object.entries(DefaultUseSharedCache).forEach(([key, value]) => {
|
||||
mockSharedCache.set(key as UseSharedCacheKey, value)
|
||||
Object.entries(DefaultSharedCache).forEach(([key, value]) => {
|
||||
mockSharedCache.set(key as SharedCacheKey, value)
|
||||
})
|
||||
|
||||
Object.entries(DefaultRendererPersistCache).forEach(([key, value]) => {
|
||||
@ -220,7 +220,7 @@ export const MockUseCacheUtils = {
|
||||
/**
|
||||
* Set shared cache value for testing
|
||||
*/
|
||||
setSharedCacheValue: <K extends UseSharedCacheKey>(key: K, value: UseSharedCacheSchema[K]) => {
|
||||
setSharedCacheValue: <K extends SharedCacheKey>(key: K, value: SharedCacheSchema[K]) => {
|
||||
mockSharedCache.set(key, value)
|
||||
notifySharedSubscribers(key)
|
||||
},
|
||||
@ -228,8 +228,8 @@ export const MockUseCacheUtils = {
|
||||
/**
|
||||
* Get shared cache value
|
||||
*/
|
||||
getSharedCacheValue: <K extends UseSharedCacheKey>(key: K): UseSharedCacheSchema[K] => {
|
||||
return mockSharedCache.get(key) ?? DefaultUseSharedCache[key]
|
||||
getSharedCacheValue: <K extends SharedCacheKey>(key: K): SharedCacheSchema[K] => {
|
||||
return mockSharedCache.get(key) ?? DefaultSharedCache[key]
|
||||
},
|
||||
|
||||
/**
|
||||
@ -252,7 +252,7 @@ export const MockUseCacheUtils = {
|
||||
*/
|
||||
setMultipleCacheValues: (values: {
|
||||
memory?: Array<[UseCacheKey, any]>
|
||||
shared?: Array<[UseSharedCacheKey, any]>
|
||||
shared?: Array<[SharedCacheKey, any]>
|
||||
persist?: Array<[RendererPersistCacheKey, any]>
|
||||
}) => {
|
||||
values.memory?.forEach(([key, value]) => {
|
||||
@ -310,10 +310,10 @@ export const MockUseCacheUtils = {
|
||||
/**
|
||||
* Mock shared cache hook to return specific value for a key
|
||||
*/
|
||||
mockSharedCacheReturn: <K extends UseSharedCacheKey>(
|
||||
mockSharedCacheReturn: <K extends SharedCacheKey>(
|
||||
key: K,
|
||||
value: UseSharedCacheSchema[K],
|
||||
setValue?: (value: UseSharedCacheSchema[K]) => void
|
||||
value: SharedCacheSchema[K],
|
||||
setValue?: (value: SharedCacheSchema[K]) => void
|
||||
) => {
|
||||
mockUseSharedCache.mockImplementation((cacheKey, initValue) => {
|
||||
if (cacheKey === key) {
|
||||
@ -321,7 +321,7 @@ export const MockUseCacheUtils = {
|
||||
}
|
||||
|
||||
// Default behavior for other keys
|
||||
const defaultValue = mockSharedCache.get(cacheKey) ?? initValue ?? DefaultUseSharedCache[cacheKey]
|
||||
const defaultValue = mockSharedCache.get(cacheKey) ?? initValue ?? DefaultSharedCache[cacheKey]
|
||||
return [defaultValue, vi.fn()]
|
||||
})
|
||||
},
|
||||
@ -368,7 +368,7 @@ export const MockUseCacheUtils = {
|
||||
/**
|
||||
* Add subscriber for shared cache changes
|
||||
*/
|
||||
addSharedSubscriber: (key: UseSharedCacheKey, callback: () => void): (() => void) => {
|
||||
addSharedSubscriber: (key: SharedCacheKey, callback: () => void): (() => void) => {
|
||||
if (!mockSharedSubscribers.has(key)) {
|
||||
mockSharedSubscribers.set(key, new Set())
|
||||
}
|
||||
|
||||
Loading…
Reference in New Issue
Block a user