feat(CacheService): add cache statistics functionality

- Implemented comprehensive cache statistics in CacheService, allowing retrieval of summary and detailed information about cache tiers (memory, shared, persist).
- Introduced methods to estimate memory usage and format byte sizes for better readability.
- Enhanced the Inputbar component to log cache statistics for monitoring purposes.
- Updated related types to support new cache statistics features.
This commit is contained in:
fullex 2026-01-09 23:58:13 +08:00
parent e80c6b06ba
commit fd95b9fe7d
3 changed files with 394 additions and 109 deletions

View File

@ -9,16 +9,18 @@ This guide covers how to use the Cache system in React components and services.
Memory cache is lost on app restart. Best for temporary computed results.
```typescript
import { useCache } from '@data/hooks/useCache'
import { useCache } from "@data/hooks/useCache";
// Basic usage with default value
const [counter, setCounter] = useCache('ui.counter', 0)
const [counter, setCounter] = useCache("ui.counter", 0);
// Update the value
setCounter(counter + 1)
setCounter(counter + 1);
// With TTL (30 seconds)
const [searchResults, setSearchResults] = useCache('search.results', [], { ttl: 30000 })
const [searchResults, setSearchResults] = useCache("search.results", [], {
ttl: 30000,
});
```
### useSharedCache (Cross-Window Cache)
@ -26,13 +28,16 @@ const [searchResults, setSearchResults] = useCache('search.results', [], { ttl:
Shared cache syncs across all windows, lost on app restart.
```typescript
import { useSharedCache } from '@data/hooks/useCache'
import { useSharedCache } from "@data/hooks/useCache";
// Cross-window state
const [layout, setLayout] = useSharedCache('window.layout', defaultLayout)
const [layout, setLayout] = useSharedCache("window.layout", defaultLayout);
// Sidebar state shared between windows
const [sidebarCollapsed, setSidebarCollapsed] = useSharedCache('ui.sidebar.collapsed', false)
const [sidebarCollapsed, setSidebarCollapsed] = useSharedCache(
"ui.sidebar.collapsed",
false
);
```
### usePersistCache (Persistent Cache)
@ -40,13 +45,13 @@ const [sidebarCollapsed, setSidebarCollapsed] = useSharedCache('ui.sidebar.colla
Persist cache survives app restarts via localStorage.
```typescript
import { usePersistCache } from '@data/hooks/useCache'
import { usePersistCache } from "@data/hooks/useCache";
// Recent files list (survives restart)
const [recentFiles, setRecentFiles] = usePersistCache('app.recent_files', [])
const [recentFiles, setRecentFiles] = usePersistCache("app.recent_files", []);
// Search history
const [searchHistory, setSearchHistory] = usePersistCache('search.history', [])
const [searchHistory, setSearchHistory] = usePersistCache("search.history", []);
```
## CacheService Direct Usage
@ -56,54 +61,54 @@ For non-React code or more control, use CacheService directly.
### Memory Cache
```typescript
import { cacheService } from '@data/CacheService'
import { cacheService } from "@data/CacheService";
// Type-safe (schema key)
cacheService.set('temp.calculation', result)
const result = cacheService.get('temp.calculation')
cacheService.set("temp.calculation", result);
const result = cacheService.get("temp.calculation");
// With TTL (30 seconds)
cacheService.set('temp.calculation', result, 30000)
cacheService.set("temp.calculation", result, 30000);
// Casual (dynamic key, manual type)
cacheService.setCasual<TopicCache>(`topic:${id}`, topicData)
const topic = cacheService.getCasual<TopicCache>(`topic:${id}`)
cacheService.setCasual<TopicCache>(`topic:${id}`, topicData);
const topic = cacheService.getCasual<TopicCache>(`topic:${id}`);
// Check existence
if (cacheService.has('temp.calculation')) {
if (cacheService.has("temp.calculation")) {
// ...
}
// Delete
cacheService.delete('temp.calculation')
cacheService.deleteCasual(`topic:${id}`)
cacheService.delete("temp.calculation");
cacheService.deleteCasual(`topic:${id}`);
```
### Shared Cache
```typescript
// Type-safe (schema key)
cacheService.setShared('window.layout', layoutConfig)
const layout = cacheService.getShared('window.layout')
cacheService.setShared("window.layout", layoutConfig);
const layout = cacheService.getShared("window.layout");
// Casual (dynamic key)
cacheService.setSharedCasual<WindowState>(`window:${windowId}`, state)
const state = cacheService.getSharedCasual<WindowState>(`window:${windowId}`)
cacheService.setSharedCasual<WindowState>(`window:${windowId}`, state);
const state = cacheService.getSharedCasual<WindowState>(`window:${windowId}`);
// Delete
cacheService.deleteShared('window.layout')
cacheService.deleteSharedCasual(`window:${windowId}`)
cacheService.deleteShared("window.layout");
cacheService.deleteSharedCasual(`window:${windowId}`);
```
### Persist Cache
```typescript
// Schema keys only (no Casual methods for persist)
cacheService.setPersist('app.recent_files', recentFiles)
const files = cacheService.getPersist('app.recent_files')
cacheService.setPersist("app.recent_files", recentFiles);
const files = cacheService.getPersist("app.recent_files");
// Delete
cacheService.deletePersist('app.recent_files')
cacheService.deletePersist("app.recent_files");
```
## Main Process Usage
@ -113,22 +118,22 @@ Main process CacheService provides SharedCache for cross-window state management
### SharedCache in Main Process
```typescript
import { cacheService } from '@main/data/CacheService'
import { cacheService } from "@main/data/CacheService";
// Type-safe (schema key) - matches Renderer's type system
cacheService.setShared('window.layout', layoutConfig)
const layout = cacheService.getShared('window.layout')
cacheService.setShared("window.layout", layoutConfig);
const layout = cacheService.getShared("window.layout");
// With TTL (30 seconds)
cacheService.setShared('temp.state', state, 30000)
cacheService.setShared("temp.state", state, 30000);
// Check existence
if (cacheService.hasShared('window.layout')) {
if (cacheService.hasShared("window.layout")) {
// ...
}
// Delete
cacheService.deleteShared('window.layout')
cacheService.deleteShared("window.layout");
```
**Note**: Main CacheService does NOT support Casual methods (`getSharedCasual`, etc.). Only schema-based type-safe access is available in Main process.
@ -142,16 +147,18 @@ cacheService.deleteShared('window.layout')
## Type-Safe vs Casual Methods
### Type-Safe Methods
- Use predefined keys from cache schema
- Full auto-completion and type inference
- Compile-time key validation
```typescript
// Key 'ui.counter' must exist in schema
const [counter, setCounter] = useCache('ui.counter', 0)
const [counter, setCounter] = useCache("ui.counter", 0);
```
### Casual Methods
- Use dynamically constructed keys
- Require manual type specification via generics
- No compile-time key validation
@ -159,11 +166,11 @@ const [counter, setCounter] = useCache('ui.counter', 0)
```typescript
// Dynamic key, must specify type
const topic = cacheService.getCasual<TopicCache>(`my.custom.key`)
const topic = cacheService.getCasual<TopicCache>(`my.custom.key`);
// Compile error: cannot use schema keys with Casual methods
cacheService.getCasual('app.user.avatar') // Error: matches fixed key
cacheService.getCasual('scroll.position.topic123') // Error: matches template key
cacheService.getCasual("app.user.avatar"); // Error: matches fixed key
cacheService.getCasual("scroll.position.topic123"); // Error: matches template key
```
### Template Keys
@ -178,56 +185,56 @@ Template keys provide type-safe caching for dynamic key patterns. Define a templ
// packages/shared/data/cache/cacheSchemas.ts
export type UseCacheSchema = {
// Fixed key
'app.user.avatar': string
"app.user.avatar": string;
// Template keys - use ${variable} for dynamic segments
// Must follow dot-separated pattern like fixed keys
'scroll.position.${topicId}': number
'entity.cache.${type}_${id}': EntityData
}
"scroll.position.${topicId}": number;
"entity.cache.${type}_${id}": EntityData;
};
// Default values for templates (shared by all instances)
export const DefaultUseCache: UseCacheSchema = {
'app.user.avatar': '',
'scroll.position.${topicId}': 0,
'entity.cache.${type}_${id}': { loaded: false }
}
"app.user.avatar": "",
"scroll.position.${topicId}": 0,
"entity.cache.${type}_${id}": { loaded: false },
};
```
#### Using Template Keys
```typescript
// TypeScript infers the value type from schema
const [scrollPos, setScrollPos] = useCache('scroll.position.topic123')
const [scrollPos, setScrollPos] = useCache("scroll.position.topic123");
// scrollPos is inferred as `number`
const [entity, setEntity] = useCache('entity.cache.user_456')
const [entity, setEntity] = useCache("entity.cache.user_456");
// entity is inferred as `EntityData`
// Direct CacheService usage
cacheService.set('scroll.position.mytopic', 150) // OK: value must be number
cacheService.set('scroll.position.mytopic', 'hi') // Error: type mismatch
cacheService.set("scroll.position.mytopic", 150); // OK: value must be number
cacheService.set("scroll.position.mytopic", "hi"); // Error: type mismatch
```
#### Template Key Benefits
| Feature | Fixed Keys | Template Keys | Casual Methods |
|---------|-----------|---------------|----------------|
| Type inference | ✅ Automatic | ✅ Automatic | ❌ Manual |
| Auto-completion | ✅ Full | ✅ Partial (prefix) | ❌ None |
| Compile-time validation | ✅ Yes | ✅ Yes | ❌ No |
| Dynamic IDs | ❌ No | ✅ Yes | ✅ Yes |
| Default values | ✅ Yes | ✅ Shared per template | ❌ No |
| Feature | Fixed Keys | Template Keys | Casual Methods |
| ----------------------- | ------------ | ---------------------- | -------------- |
| Type inference | ✅ Automatic | ✅ Automatic | ❌ Manual |
| Auto-completion | ✅ Full | ✅ Partial (prefix) | ❌ None |
| Compile-time validation | ✅ Yes | ✅ Yes | ❌ No |
| Dynamic IDs | ❌ No | ✅ Yes | ✅ Yes |
| Default values | ✅ Yes | ✅ Shared per template | ❌ No |
### When to Use Which
| Scenario | Method | Example |
|----------|--------|---------|
| Fixed cache keys | Type-safe | `useCache('ui.counter')` |
| Dynamic keys with known pattern | Template key | `useCache('scroll.position.topic123')` |
| Entity caching by ID | Template key | `get('entity.cache.user_456')` |
| Completely dynamic keys | Casual | `getCasual<T>(\`custom.dynamic.${x}\`)` |
| UI state | Type-safe | `useSharedCache('window.layout')` |
| Scenario | Method | Example |
| ------------------------------- | ------------ | --------------------------------------- |
| Fixed cache keys | Type-safe | `useCache('ui.counter')` |
| Dynamic keys with known pattern | Template key | `useCache('scroll.position.topic123')` |
| Entity caching by ID | Template key | `get('entity.cache.user_456')` |
| Completely dynamic keys | Casual | `getCasual<T>(\`custom.dynamic.${x}\`)` |
| UI state | Type-safe | `useSharedCache('window.layout')` |
## Common Patterns
@ -235,16 +242,16 @@ cacheService.set('scroll.position.mytopic', 'hi') // Error: type mismatch
```typescript
function useExpensiveData(input: string) {
const [cached, setCached] = useCache(`computed:${input}`, null)
const [cached, setCached] = useCache(`computed:${input}`, null);
useEffect(() => {
if (cached === null) {
const result = expensiveComputation(input)
setCached(result)
const result = expensiveComputation(input);
setCached(result);
}
}, [input, cached, setCached])
}, [input, cached, setCached]);
return cached
return cached;
}
```
@ -252,51 +259,55 @@ function useExpensiveData(input: string) {
```typescript
// Window A: Update shared state
const [activeFile, setActiveFile] = useSharedCache('editor.activeFile', null)
setActiveFile(selectedFile)
const [activeFile, setActiveFile] = useSharedCache("editor.activeFile", null);
setActiveFile(selectedFile);
// Window B: Reacts to change automatically
const [activeFile] = useSharedCache('editor.activeFile', null)
const [activeFile] = useSharedCache("editor.activeFile", null);
// activeFile updates when Window A changes it
```
### Recent Items with Limit
```typescript
const [recentItems, setRecentItems] = usePersistCache('app.recentItems', [])
const [recentItems, setRecentItems] = usePersistCache("app.recentItems", []);
const addRecentItem = (item: Item) => {
setRecentItems(prev => {
const filtered = prev.filter(i => i.id !== item.id)
return [item, ...filtered].slice(0, 10) // Keep last 10
})
}
setRecentItems((prev) => {
const filtered = prev.filter((i) => i.id !== item.id);
return [item, ...filtered].slice(0, 10); // Keep last 10
});
};
```
### Cache with Expiration Check
```typescript
interface CachedData<T> {
data: T
timestamp: number
data: T;
timestamp: number;
}
function useCachedWithExpiry<T>(key: string, fetcher: () => Promise<T>, maxAge: number) {
const [cached, setCached] = useCache<CachedData<T> | null>(key, null)
const [data, setData] = useState<T | null>(cached?.data ?? null)
function useCachedWithExpiry<T>(
key: string,
fetcher: () => Promise<T>,
maxAge: number
) {
const [cached, setCached] = useCache<CachedData<T> | null>(key, null);
const [data, setData] = useState<T | null>(cached?.data ?? null);
useEffect(() => {
const isExpired = !cached || Date.now() - cached.timestamp > maxAge
const isExpired = !cached || Date.now() - cached.timestamp > maxAge;
if (isExpired) {
fetcher().then(result => {
setCached({ data: result, timestamp: Date.now() })
setData(result)
})
fetcher().then((result) => {
setCached({ data: result, timestamp: Date.now() });
setData(result);
});
}
}, [key, maxAge])
}, [key, maxAge]);
return data
return data;
}
```
@ -310,13 +321,13 @@ function useCachedWithExpiry<T>(key: string, fetcher: () => Promise<T>, maxAge:
// packages/shared/data/cache/cacheSchemas.ts
export type UseCacheSchema = {
// Existing keys...
'myFeature.data': MyDataType
}
"myFeature.data": MyDataType;
};
export const DefaultUseCache: UseCacheSchema = {
// Existing defaults...
'myFeature.data': { items: [], lastUpdated: 0 }
}
"myFeature.data": { items: [], lastUpdated: 0 },
};
```
#### 2. Define Value Type (if complex)
@ -324,8 +335,8 @@ export const DefaultUseCache: UseCacheSchema = {
```typescript
// packages/shared/data/cache/cacheValueTypes.ts
export interface MyDataType {
items: string[]
lastUpdated: number
items: string[];
lastUpdated: number;
}
```
@ -333,7 +344,7 @@ export interface MyDataType {
```typescript
// Now type-safe
const [data, setData] = useCache('myFeature.data')
const [data, setData] = useCache("myFeature.data");
```
### Adding Template Keys
@ -345,25 +356,25 @@ const [data, setData] = useCache('myFeature.data')
export type UseCacheSchema = {
// Existing keys...
// Template key with dynamic segment
'scroll.position.${topicId}': number
}
"scroll.position.${topicId}": number;
};
export const DefaultUseCache: UseCacheSchema = {
// Existing defaults...
// Default shared by all instances of this template
'scroll.position.${topicId}': 0
}
"scroll.position.${topicId}": 0,
};
```
#### 2. Use in Code
```typescript
// TypeScript infers number from template pattern
const [scrollPos, setScrollPos] = useCache(`scroll.position.${topicId}`)
const [scrollPos, setScrollPos] = useCache(`scroll.position.${topicId}`);
// Works with any string in the dynamic segment
const [pos1, setPos1] = useCache('scroll.position.topic123')
const [pos2, setPos2] = useCache('scroll.position.conversationabc')
const [pos1, setPos1] = useCache("scroll.position.topic123");
const [pos2, setPos2] = useCache("scroll.position.conversationabc");
```
### Key Naming Convention
@ -389,7 +400,7 @@ All keys (fixed and template) must follow the same naming convention:
Renderer CacheService provides ready state tracking for SharedCache initialization sync.
```typescript
import { cacheService } from '@data/CacheService'
import { cacheService } from "@data/CacheService";
// Check if shared cache is ready
if (cacheService.isSharedCacheReady()) {
@ -399,19 +410,34 @@ if (cacheService.isSharedCacheReady()) {
// Register callback when ready
const unsubscribe = cacheService.onSharedCacheReady(() => {
// Called immediately if already ready, or when sync completes
console.log('SharedCache ready!')
})
console.log("SharedCache ready!");
});
// Cleanup
unsubscribe()
unsubscribe();
```
**Behavior notes**:
- `getShared()` returns `undefined` before ready (expected behavior)
- `setShared()` works immediately and broadcasts to Main (Main updates its cache)
- Hooks like `useSharedCache` work normally - they set initial values and update when sync completes
- Main-priority override: when sync completes, Main's values override local values
## Cache Statistics
For debugging purposes, CacheService provides a `getStats()` method to inspect cache state:
```typescript
// Get summary statistics
const stats = cacheService.getStats();
// Get detailed per-entry information
const fullStats = cacheService.getStats(true);
```
Returns statistics including entry counts, TTL status, hook references, and estimated memory usage for all cache tiers (memory, shared, persist).
## Best Practices
1. **Choose the right tier**: Memory for temp, Shared for cross-window, Persist for survival

View File

@ -41,3 +41,77 @@ export interface CacheSyncBatchMessage {
* Cache subscription callback
*/
export type CacheSubscriber = () => void
// ============ Cache Statistics Types ============
/**
* Summary statistics for a single cache tier
*/
export interface CacheTierSummary {
/** Total number of entries in this tier */
totalCount: number
/** Number of valid (non-expired) entries */
validCount: number
/** Number of expired entries (lazy cleanup pending) */
expiredCount: number
/** Number of entries with TTL configured */
withTTLCount: number
/** Total hook reference count for this tier */
hookReferences: number
/** Estimated memory size in bytes (rough estimate via JSON serialization) */
estimatedBytes: number
}
/**
* Detailed information for a single cache entry
*/
export interface CacheEntryDetail {
/** Cache key */
key: string
/** Whether the entry has a value */
hasValue: boolean
/** Whether TTL is configured */
hasTTL: boolean
/** Whether the entry is expired */
isExpired: boolean
/** Absolute expiration timestamp (ms since epoch) */
expireAt?: number
/** Remaining time until expiration (ms), undefined if no TTL */
remainingTTL?: number
/** Number of hooks currently referencing this key */
hookCount: number
}
/**
* Complete cache statistics
*/
export interface CacheStats {
/** Timestamp when stats were collected */
collectedAt: number
/** Summary statistics */
summary: {
memory: CacheTierSummary
shared: CacheTierSummary
persist: CacheTierSummary
/** Aggregated totals across all tiers */
total: {
totalCount: number
validCount: number
expiredCount: number
withTTLCount: number
hookReferences: number
/** Total estimated memory in bytes */
estimatedBytes: number
/** Human-readable memory size (e.g., "1.5 KB", "2.3 MB") */
estimatedSize: string
}
}
/** Detailed per-entry information (optional, for debugging) */
details: {
memory: CacheEntryDetail[]
shared: CacheEntryDetail[]
persist: CacheEntryDetail[]
}
}

View File

@ -27,7 +27,14 @@ import type {
UseCacheKey
} from '@shared/data/cache/cacheSchemas'
import { DefaultRendererPersistCache } from '@shared/data/cache/cacheSchemas'
import type { CacheEntry, CacheSubscriber, CacheSyncMessage } from '@shared/data/cache/cacheTypes'
import type {
CacheEntry,
CacheEntryDetail,
CacheStats,
CacheSubscriber,
CacheSyncMessage,
CacheTierSummary
} from '@shared/data/cache/cacheTypes'
const STORAGE_PERSIST_KEY = 'cs_cache_persist'
@ -688,6 +695,184 @@ export class CacheService {
this.activeHookCounts.set(key, currentCount - 1)
}
// ============ Statistics ============
/**
* Get comprehensive statistics about all cache tiers
*
* @param includeDetails - Whether to include per-entry details (default: false)
* @returns Cache statistics with summary and optional details
*
* @example
* ```typescript
* // Get summary only (fast)
* const stats = cacheService.getStats()
* console.log(`Memory cache: ${stats.summary.memory.validCount} valid entries`)
*
* // Get full details (for debugging)
* const fullStats = cacheService.getStats(true)
* fullStats.details.memory.forEach(entry => {
* if (entry.isExpired) console.log(`Expired: ${entry.key}`)
* })
* ```
*/
public getStats(includeDetails: boolean = false): CacheStats {
const now = Date.now()
// Process memory and shared cache tiers
const memory = this.processCacheTier(this.memoryCache, now, includeDetails)
const shared = this.processCacheTier(this.sharedCache, now, includeDetails)
const persist = this.processPersistTier(includeDetails)
// Calculate totals
const totalBytes = memory.summary.estimatedBytes + shared.summary.estimatedBytes + persist.summary.estimatedBytes
const total = {
totalCount: memory.summary.totalCount + shared.summary.totalCount + persist.summary.totalCount,
validCount: memory.summary.validCount + shared.summary.validCount + persist.summary.validCount,
expiredCount: memory.summary.expiredCount + shared.summary.expiredCount,
withTTLCount: memory.summary.withTTLCount + shared.summary.withTTLCount,
hookReferences: memory.summary.hookReferences + shared.summary.hookReferences + persist.summary.hookReferences,
estimatedBytes: totalBytes,
estimatedSize: this.formatBytes(totalBytes)
}
return {
collectedAt: now,
summary: {
memory: memory.summary,
shared: shared.summary,
persist: persist.summary,
total
},
details: {
memory: memory.details,
shared: shared.details,
persist: persist.details
}
}
}
/**
* Process a cache tier (memory or shared) and collect statistics
*/
private processCacheTier(
cache: Map<string, CacheEntry>,
now: number,
includeDetails: boolean
): { summary: CacheTierSummary; details: CacheEntryDetail[] } {
let validCount = 0
let expiredCount = 0
let withTTLCount = 0
let hookReferences = 0
let estimatedBytes = 0
const details: CacheEntryDetail[] = []
for (const [key, entry] of cache.entries()) {
const hasTTL = entry.expireAt !== undefined
const isExpired = hasTTL && now > entry.expireAt!
const hookCount = this.activeHookCounts.get(key) ?? 0
// Estimate memory: key size + value size + metadata overhead
estimatedBytes += this.estimateSize(key) + this.estimateSize(entry.value)
if (entry.expireAt) estimatedBytes += 8 // number size
if (hasTTL) withTTLCount++
if (isExpired) {
expiredCount++
} else {
validCount++
}
hookReferences += hookCount
if (includeDetails) {
details.push({
key,
hasValue: entry.value !== undefined,
hasTTL,
isExpired,
expireAt: entry.expireAt,
remainingTTL: hasTTL && !isExpired ? entry.expireAt! - now : undefined,
hookCount
})
}
}
return {
summary: {
totalCount: cache.size,
validCount,
expiredCount,
withTTLCount,
hookReferences,
estimatedBytes
},
details
}
}
/**
* Process persist cache tier and collect statistics
* Persist cache has no TTL support, all entries are always valid
*/
private processPersistTier(includeDetails: boolean): {
summary: CacheTierSummary
details: CacheEntryDetail[]
} {
let hookReferences = 0
let estimatedBytes = 0
for (const [key, value] of this.persistCache.entries()) {
hookReferences += this.activeHookCounts.get(key) ?? 0
estimatedBytes += this.estimateSize(key) + this.estimateSize(value)
}
const details: CacheEntryDetail[] = includeDetails
? Array.from(this.persistCache.keys()).map((key) => ({
key,
hasValue: true,
hasTTL: false,
isExpired: false,
hookCount: this.activeHookCounts.get(key) ?? 0
}))
: []
return {
summary: {
totalCount: this.persistCache.size,
validCount: this.persistCache.size, // All persist entries are always valid
expiredCount: 0,
withTTLCount: 0,
hookReferences,
estimatedBytes
},
details
}
}
/**
* Estimate memory size of a value in bytes using JSON serialization
* Note: This is a rough estimate, actual memory usage may differ
*/
private estimateSize(value: any): number {
try {
return new Blob([JSON.stringify(value)]).size
} catch {
return 0
}
}
/**
* Format bytes to human-readable size
*/
private formatBytes(bytes: number): string {
if (bytes === 0) return '0 B'
const k = 1024
const sizes = ['B', 'KB', 'MB', 'GB']
const i = Math.floor(Math.log(bytes) / Math.log(k))
return `${(bytes / Math.pow(k, i)).toFixed(2)} ${sizes[i]}`
}
// ============ Shared Cache Ready State Management ============
/**