feat: add migration v2 support and update dependencies

- Integrated migration v2 functionality by importing necessary modules and registering IPC handlers.
- Updated the migration process to check for data migration needs and handle the migration window.
- Added new dependencies for stream-json and its types in package.json.
- Updated electron.vite.config.ts to include the new migration window HTML file.
This commit is contained in:
fullex 2025-11-20 19:48:19 +08:00
parent 1a9fd77599
commit a19419e597
33 changed files with 4015 additions and 10 deletions

View File

@ -134,7 +134,8 @@ export default defineConfig({
selectionToolbar: resolve(__dirname, 'src/renderer/selectionToolbar.html'),
selectionAction: resolve(__dirname, 'src/renderer/selectionAction.html'),
traceWindow: resolve(__dirname, 'src/renderer/traceWindow.html'),
dataRefactorMigrate: resolve(__dirname, 'src/renderer/dataRefactorMigrate.html')
dataRefactorMigrate: resolve(__dirname, 'src/renderer/dataRefactorMigrate.html'),
migrationV2: resolve(__dirname, 'src/renderer/migrationV2.html')
},
onwarn(warning, warn) {
if (warning.code === 'COMMONJS_VARIABLE_IN_ESM') return

View File

@ -101,6 +101,7 @@
"selection-hook": "^1.0.12",
"sharp": "^0.34.3",
"socket.io": "^4.8.1",
"stream-json": "^1.9.1",
"swagger-jsdoc": "^6.2.8",
"swagger-ui-express": "^5.0.1",
"tesseract.js": "patch:tesseract.js@npm%3A6.0.1#~/.yarn/patches/tesseract.js-npm-6.0.1-2562a7e46d.patch",
@ -221,6 +222,7 @@
"@types/react-infinite-scroll-component": "^5.0.0",
"@types/react-transition-group": "^4.4.12",
"@types/react-window": "^1",
"@types/stream-json": "^1",
"@types/swagger-jsdoc": "^6",
"@types/swagger-ui-express": "^4.1.8",
"@types/tinycolor2": "^1",

View File

@ -0,0 +1,55 @@
/**
* Migration context shared between all migrators
*/
import { dbService } from '@data/db/DbService'
import type { DbType } from '@data/db/types'
import { type LoggerService, loggerService } from '@logger'
import type { ConfigManager } from '@main/services/ConfigManager'
import { configManager } from '@main/services/ConfigManager'
import { DexieFileReader } from '../utils/DexieFileReader'
import { ReduxStateReader } from '../utils/ReduxStateReader'
// Logger type for migration context (using actual LoggerService type)
export type MigrationLogger = LoggerService
// Migration context interface
export interface MigrationContext {
// Data source accessors
sources: {
electronStore: ConfigManager
reduxState: ReduxStateReader
dexieExport: DexieFileReader
}
// Target database
db: DbType
// Shared data between migrators
sharedData: Map<string, unknown>
// Logger
logger: MigrationLogger
}
/**
* Create a migration context with all data sources
* @param reduxData - Parsed Redux state data from Renderer
* @param dexieExportPath - Path to exported Dexie files
*/
export function createMigrationContext(reduxData: Record<string, unknown>, dexieExportPath: string): MigrationContext {
const db = dbService.getDb()
const logger = loggerService.withContext('Migration')
return {
sources: {
electronStore: configManager,
reduxState: new ReduxStateReader(reduxData),
dexieExport: new DexieFileReader(dexieExportPath)
},
db,
sharedData: new Map(),
logger
}
}

View File

@ -0,0 +1,369 @@
/**
* Migration engine orchestrates the entire migration process
* Coordinates migrators, manages progress, and handles failures
*/
import { dbService } from '@data/db/DbService'
import { appStateTable } from '@data/db/schemas/appState'
import { preferenceTable } from '@data/db/schemas/preference'
import { loggerService } from '@logger'
import { eq, sql } from 'drizzle-orm'
import fs from 'fs/promises'
import type { BaseMigrator } from '../migrators/BaseMigrator'
import { createMigrationContext } from './MigrationContext'
import type {
MigrationProgress,
MigrationResult,
MigrationStage,
MigrationStatusValue,
MigratorResult,
MigratorStatus,
ValidateResult
} from './types'
// TODO: Import these tables when they are created in user data schema
// import { assistantTable } from '../../db/schemas/assistant'
// import { topicTable } from '../../db/schemas/topic'
// import { messageTable } from '../../db/schemas/message'
// import { fileTable } from '../../db/schemas/file'
// import { knowledgeBaseTable } from '../../db/schemas/knowledgeBase'
const logger = loggerService.withContext('MigrationEngine')
const MIGRATION_V2_STATUS = 'migration_v2_status'
export class MigrationEngine {
private migrators: BaseMigrator[] = []
private progressCallback?: (progress: MigrationProgress) => void
constructor() {}
/**
* Register migrators in execution order
*/
registerMigrators(migrators: BaseMigrator[]): void {
this.migrators = migrators.sort((a, b) => a.order - b.order)
logger.info('Migrators registered', {
migrators: this.migrators.map((m) => ({ id: m.id, name: m.name, order: m.order }))
})
}
/**
* Set progress callback for UI updates
*/
onProgress(callback: (progress: MigrationProgress) => void): void {
this.progressCallback = callback
}
/**
* Check if migration is needed
*/
async needsMigration(): Promise<boolean> {
const db = dbService.getDb()
const status = await db.select().from(appStateTable).where(eq(appStateTable.key, MIGRATION_V2_STATUS)).get()
// Migration needed if: no status record, or status is not 'completed'
if (!status?.value) return true
const statusValue = status.value as MigrationStatusValue
return statusValue.status !== 'completed'
}
/**
* Get last migration error (for UI display)
*/
async getLastError(): Promise<string | null> {
const db = dbService.getDb()
const status = await db.select().from(appStateTable).where(eq(appStateTable.key, MIGRATION_V2_STATUS)).get()
if (status?.value) {
const statusValue = status.value as MigrationStatusValue
if (statusValue.status === 'failed') {
return statusValue.error || 'Unknown error'
}
}
return null
}
/**
* Execute full migration
* @param reduxData - Parsed Redux state data from Renderer
* @param dexieExportPath - Path to exported Dexie files
*/
async run(reduxData: Record<string, unknown>, dexieExportPath: string): Promise<MigrationResult> {
const startTime = Date.now()
const results: MigratorResult[] = []
try {
// Safety check: verify new tables status before clearing
await this.verifyAndClearNewTables()
// Create migration context
const context = createMigrationContext(reduxData, dexieExportPath)
for (let i = 0; i < this.migrators.length; i++) {
const migrator = this.migrators[i]
const migratorStartTime = Date.now()
logger.info(`Starting migrator: ${migrator.name}`, { id: migrator.id })
// Update progress: migrator starting
this.updateProgress('migration', this.calculateProgress(i, 0), migrator)
// Set up migrator progress callback
migrator.setProgressCallback((progress, message) => {
this.updateProgress('migration', this.calculateProgress(i, progress), migrator, message)
})
// Phase 1: Prepare (includes dry-run validation)
const prepareResult = await migrator.prepare(context)
if (!prepareResult.success) {
throw new Error(`${migrator.name} prepare failed: ${prepareResult.warnings?.join(', ')}`)
}
logger.info(`${migrator.name} prepare completed`, { itemCount: prepareResult.itemCount })
// Phase 2: Execute (each migrator manages its own transactions)
const executeResult = await migrator.execute(context)
if (!executeResult.success) {
throw new Error(`${migrator.name} execute failed: ${executeResult.error}`)
}
logger.info(`${migrator.name} execute completed`, {
processedCount: executeResult.processedCount
})
// Phase 3: Validate
const validateResult = await migrator.validate(context)
// Engine-level validation
this.validateMigratorResult(migrator, validateResult)
logger.info(`${migrator.name} validation passed`, { stats: validateResult.stats })
// Record result
results.push({
migratorId: migrator.id,
migratorName: migrator.name,
success: true,
recordsProcessed: executeResult.processedCount,
duration: Date.now() - migratorStartTime
})
// Update progress: migrator completed
this.updateProgress('migration', this.calculateProgress(i + 1, 0), migrator, 'completed')
}
// Mark migration completed
await this.markCompleted()
// Cleanup temporary files
await this.cleanupTempFiles(dexieExportPath)
logger.info('Migration completed successfully', {
totalDuration: Date.now() - startTime,
migratorCount: results.length
})
return {
success: true,
migratorResults: results,
totalDuration: Date.now() - startTime
}
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error)
logger.error('Migration failed', { error: errorMessage })
// Mark migration as failed with error details
await this.markFailed(errorMessage)
return {
success: false,
migratorResults: results,
totalDuration: Date.now() - startTime,
error: errorMessage
}
}
}
/**
* Verify and clear new architecture tables before migration
* Safety check: log if tables are not empty (may indicate previous failed migration)
*/
private async verifyAndClearNewTables(): Promise<void> {
const db = dbService.getDb()
// Tables to clear - add more as they are created
const tables = [
{ table: preferenceTable, name: 'preference' }
// TODO: Add these when tables are created
// { table: assistantTable, name: 'assistant' },
// { table: topicTable, name: 'topic' },
// { table: messageTable, name: 'message' },
// { table: fileTable, name: 'file' },
// { table: knowledgeBaseTable, name: 'knowledge_base' }
]
// Check if tables have data (safety check)
for (const { table, name } of tables) {
const result = await db.select({ count: sql<number>`count(*)` }).from(table).get()
const count = result?.count ?? 0
if (count > 0) {
logger.warn(`Table '${name}' is not empty (${count} rows), clearing for fresh migration`)
}
}
// Clear tables in reverse dependency order
// TODO: Add these when tables are created (in correct order)
// await db.delete(messageTable)
// await db.delete(topicTable)
// await db.delete(fileTable)
// await db.delete(knowledgeBaseTable)
// await db.delete(assistantTable)
await db.delete(preferenceTable)
logger.info('All new architecture tables cleared successfully')
}
/**
* Validate migrator result at engine level
* Ensures count validation and error checking
*/
private validateMigratorResult(migrator: BaseMigrator, result: ValidateResult): void {
const { stats } = result
// Count validation: target must have at least source count minus skipped
const expectedCount = stats.sourceCount - stats.skippedCount
if (stats.targetCount < expectedCount) {
throw new Error(
`${migrator.name} count mismatch: ` +
`expected ${expectedCount}, ` +
`got ${stats.targetCount}. ${stats.mismatchReason || ''}`
)
}
// Any validation errors are fatal
if (result.errors.length > 0) {
const errorSummary = result.errors
.slice(0, 3)
.map((e) => e.message)
.join('; ')
throw new Error(
`${migrator.name} validation failed: ${errorSummary}` +
(result.errors.length > 3 ? ` (+${result.errors.length - 3} more)` : '')
)
}
}
/**
* Cleanup temporary export files
*/
private async cleanupTempFiles(exportPath: string): Promise<void> {
try {
await fs.rm(exportPath, { recursive: true, force: true })
logger.info('Temporary files cleaned up', { path: exportPath })
} catch (error) {
logger.warn('Failed to cleanup temp files', { error, path: exportPath })
}
}
/**
* Calculate overall progress based on completed migrators and current migrator progress
*/
private calculateProgress(completedMigrators: number, currentMigratorProgress: number): number {
if (this.migrators.length === 0) return 0
const migratorWeight = 100 / this.migrators.length
return Math.round(completedMigrators * migratorWeight + (currentMigratorProgress / 100) * migratorWeight)
}
/**
* Update progress callback with current state
*/
private updateProgress(
stage: MigrationStage,
overallProgress: number,
currentMigrator: BaseMigrator,
message?: string
): void {
const migratorsProgress = this.migrators.map((m) => ({
id: m.id,
name: m.name,
status: this.getMigratorStatus(m, currentMigrator)
}))
this.progressCallback?.({
stage,
overallProgress,
currentMessage: message || `正在处理${currentMigrator.name}...`,
migrators: migratorsProgress
})
}
/**
* Determine migrator status based on execution order
*/
private getMigratorStatus(migrator: BaseMigrator, current: BaseMigrator): MigratorStatus {
if (migrator.order < current.order) return 'completed'
if (migrator.order === current.order) return 'running'
return 'pending'
}
/**
* Mark migration as completed in app_state
*/
private async markCompleted(): Promise<void> {
const db = dbService.getDb()
const statusValue: MigrationStatusValue = {
status: 'completed',
completedAt: Date.now(),
version: '2.0.0',
error: null
}
await db
.insert(appStateTable)
.values({
key: MIGRATION_V2_STATUS,
value: statusValue
})
.onConflictDoUpdate({
target: appStateTable.key,
set: {
value: statusValue,
updatedAt: Date.now()
}
})
}
/**
* Mark migration as failed in app_state with error details
*/
private async markFailed(error: string): Promise<void> {
const db = dbService.getDb()
const statusValue: MigrationStatusValue = {
status: 'failed',
failedAt: Date.now(),
version: '2.0.0',
error: error
}
await db
.insert(appStateTable)
.values({
key: MIGRATION_V2_STATUS,
value: statusValue
})
.onConflictDoUpdate({
target: appStateTable.key,
set: {
value: statusValue,
updatedAt: Date.now()
}
})
}
}
// Export singleton instance
export const migrationEngine = new MigrationEngine()

View File

@ -0,0 +1,122 @@
/**
* Core type definitions for the migration system
*/
// Migration stages for UI flow
export type MigrationStage =
| 'introduction'
| 'backup_required'
| 'backup_progress'
| 'backup_confirmed'
| 'migration'
| 'completed'
| 'error'
// Individual migrator status
export type MigratorStatus = 'pending' | 'running' | 'completed' | 'failed'
// Migrator progress info for UI display
export interface MigratorProgress {
id: string
name: string
status: MigratorStatus
error?: string
}
// Overall migration progress
export interface MigrationProgress {
stage: MigrationStage
overallProgress: number // 0-100
currentMessage: string
migrators: MigratorProgress[]
error?: string
}
// Prepare phase result
export interface PrepareResult {
success: boolean
itemCount: number
warnings?: string[]
}
// Execute phase result
export interface ExecuteResult {
success: boolean
processedCount: number
error?: string
}
// Validation error detail
export interface ValidationError {
key: string
expected?: unknown
actual?: unknown
message: string
}
// Validate phase result with count validation support
export interface ValidateResult {
success: boolean
errors: ValidationError[]
stats: {
sourceCount: number
targetCount: number
skippedCount: number
mismatchReason?: string
}
}
// Individual migrator result
export interface MigratorResult {
migratorId: string
migratorName: string
success: boolean
recordsProcessed: number
duration: number
error?: string
}
// Overall migration result
export interface MigrationResult {
success: boolean
migratorResults: MigratorResult[]
totalDuration: number
error?: string
}
// Migration status stored in app_state table
export interface MigrationStatusValue {
status: 'completed' | 'failed' | 'in_progress'
completedAt?: number
failedAt?: number
version: string
error?: string | null
}
// IPC channels for migration communication
export const MigrationIpcChannels = {
// Status queries
CheckNeeded: 'migration:check-needed',
GetProgress: 'migration:get-progress',
GetLastError: 'migration:get-last-error',
GetUserDataPath: 'migration:get-user-data-path',
// Flow control
Start: 'migration:start',
ProceedToBackup: 'migration:proceed-to-backup',
ShowBackupDialog: 'migration:show-backup-dialog',
BackupCompleted: 'migration:backup-completed',
StartMigration: 'migration:start-migration',
Retry: 'migration:retry',
Cancel: 'migration:cancel',
Restart: 'migration:restart',
// Data transfer (Renderer -> Main)
SendReduxData: 'migration:send-redux-data',
DexieExportCompleted: 'migration:dexie-export-completed',
WriteExportFile: 'migration:write-export-file',
// Progress broadcast (Main -> Renderer)
Progress: 'migration:progress',
ExportProgress: 'migration:export-progress'
} as const

View File

@ -0,0 +1,25 @@
/**
* Migration v2 module exports
*/
// Core
export { createMigrationContext, type MigrationContext } from './core/MigrationContext'
export { MigrationEngine, migrationEngine } from './core/MigrationEngine'
export * from './core/types'
// Migrators
export { getAllMigrators } from './migrators'
export { BaseMigrator } from './migrators/BaseMigrator'
// Utils
export { DexieFileReader } from './utils/DexieFileReader'
export { JSONStreamReader } from './utils/JSONStreamReader'
export { ReduxStateReader } from './utils/ReduxStateReader'
// Window management
export {
registerMigrationIpcHandlers,
resetMigrationData,
unregisterMigrationIpcHandlers
} from './window/MigrationIpcHandler'
export { MigrationWindowManager, migrationWindowManager } from './window/MigrationWindowManager'

View File

@ -0,0 +1,67 @@
/**
* Assistant migrator - migrates assistants from Redux to SQLite
*
* TODO: Implement when assistant tables are created
* Data source: Redux assistants slice (not Dexie)
* Target tables: assistant, agent, provider, model
*/
import { loggerService } from '@logger'
import type { ExecuteResult, PrepareResult, ValidateResult } from '../core/types'
import { BaseMigrator } from './BaseMigrator'
const logger = loggerService.withContext('AssistantMigrator')
export class AssistantMigrator extends BaseMigrator {
readonly id = 'assistant'
readonly name = 'Assistant'
readonly description = 'Migrate assistant and model configuration'
readonly order = 2
async prepare(): Promise<PrepareResult> {
logger.info('AssistantMigrator.prepare - placeholder implementation')
// TODO: Implement when assistant tables are created
// 1. Read from _ctx.sources.reduxState.getCategory('assistants')
// 2. Extract assistants, presets, defaultAssistant
// 3. Prepare data for migration
return {
success: true,
itemCount: 0,
warnings: ['AssistantMigrator not yet implemented - waiting for assistant tables']
}
}
async execute(): Promise<ExecuteResult> {
logger.info('AssistantMigrator.execute - placeholder implementation')
// TODO: Implement when assistant tables are created
// 1. Insert assistants into assistant table
// 2. Insert related data (agents, providers, models)
return {
success: true,
processedCount: 0
}
}
async validate(): Promise<ValidateResult> {
logger.info('AssistantMigrator.validate - placeholder implementation')
// TODO: Implement when assistant tables are created
// 1. Count validation
// 2. Sample validation
return {
success: true,
errors: [],
stats: {
sourceCount: 0,
targetCount: 0,
skippedCount: 0
}
}
}
}

View File

@ -0,0 +1,52 @@
/**
* Abstract base class for all migrators
* Each migrator handles migration of a specific business domain
*/
import type { MigrationContext } from '../core/MigrationContext'
import type { ExecuteResult, PrepareResult, ValidateResult } from '../core/types'
export abstract class BaseMigrator {
// Metadata - must be implemented by subclasses
abstract readonly id: string
abstract readonly name: string // Display name for UI
abstract readonly description: string // Display description for UI
abstract readonly order: number // Execution order (lower runs first)
// Progress callback for UI updates
protected onProgress?: (progress: number, message: string) => void
/**
* Set progress callback for reporting progress to UI
*/
setProgressCallback(callback: (progress: number, message: string) => void): void {
this.onProgress = callback
}
/**
* Report progress to UI
* @param progress - Progress percentage (0-100)
* @param message - Progress message
*/
protected reportProgress(progress: number, message: string): void {
this.onProgress?.(progress, message)
}
/**
* Prepare phase - validate source data and count items
* This includes dry-run validation to catch errors early
*/
abstract prepare(ctx: MigrationContext): Promise<PrepareResult>
/**
* Execute phase - perform the actual data migration
* Each migrator manages its own transactions
*/
abstract execute(ctx: MigrationContext): Promise<ExecuteResult>
/**
* Validate phase - verify migrated data integrity
* Must include count validation
*/
abstract validate(ctx: MigrationContext): Promise<ValidateResult>
}

View File

@ -0,0 +1,81 @@
/**
* Chat migrator - migrates topics and messages from Dexie to SQLite
*
* TODO: Implement when chat tables are created
* Data source: Dexie topics table (messages are embedded in topics)
* Target tables: topic, message
*
* Note: This migrator handles the largest amount of data (potentially millions of messages)
* and uses streaming JSON reading with batch inserts for memory efficiency.
*/
import { loggerService } from '@logger'
import type { ExecuteResult, PrepareResult, ValidateResult } from '../core/types'
import { BaseMigrator } from './BaseMigrator'
const logger = loggerService.withContext('ChatMigrator')
export class ChatMigrator extends BaseMigrator {
readonly id = 'chat'
readonly name = 'ChatData'
readonly description = 'Migrate chat data'
readonly order = 4
async prepare(): Promise<PrepareResult> {
logger.info('ChatMigrator.prepare - placeholder implementation')
// TODO: Implement when chat tables are created
// 1. Check if topics.json export file exists
// 2. Validate JSON format with sample read
// 3. Count total topics and estimate message count
// 4. Check for data integrity (e.g., messages have valid topic references)
return {
success: true,
itemCount: 0,
warnings: ['ChatMigrator not yet implemented - waiting for chat tables']
}
}
async execute(): Promise<ExecuteResult> {
logger.info('ChatMigrator.execute - placeholder implementation')
// TODO: Implement when chat tables are created
// Use streaming JSON reader for large message files:
//
// const streamReader = _ctx.sources.dexieExport.createStreamReader('topics')
// await streamReader.readInBatches<OldTopic>(
// BATCH_SIZE,
// async (topics, batchIndex) => {
// // 1. Insert topics
// // 2. Extract and insert messages from each topic
// // 3. Report progress
// }
// )
return {
success: true,
processedCount: 0
}
}
async validate(): Promise<ValidateResult> {
logger.info('ChatMigrator.validate - placeholder implementation')
// TODO: Implement when chat tables are created
// 1. Count validation for topics and messages
// 2. Sample validation (check a few topics have correct message counts)
// 3. Reference integrity validation
return {
success: true,
errors: [],
stats: {
sourceCount: 0,
targetCount: 0,
skippedCount: 0
}
}
}
}

View File

@ -0,0 +1,74 @@
/**
* Knowledge migrator - migrates knowledge bases from Redux and Dexie to SQLite
*
* TODO: Implement when knowledge tables are created
* Data sources:
* - Redux knowledge slice (knowledge.bases metadata)
* - Dexie knowledge_notes table
* - Dexie files table (for file references)
* Target tables: knowledge_base, knowledge_note, file
*/
import { loggerService } from '@logger'
import type { ExecuteResult, PrepareResult, ValidateResult } from '../core/types'
import { BaseMigrator } from './BaseMigrator'
const logger = loggerService.withContext('KnowledgeMigrator')
export class KnowledgeMigrator extends BaseMigrator {
readonly id = 'knowledge'
readonly name = 'KnowledgeBase'
readonly description = 'Migrate knowledge base and file data'
readonly order = 3
async prepare(): Promise<PrepareResult> {
logger.info('KnowledgeMigrator.prepare - placeholder implementation')
// TODO: Implement when knowledge tables are created
// 1. Read from _ctx.sources.reduxState.getCategory('knowledge')
// 2. Read from _ctx.sources.dexieExport.readTable('knowledge_notes')
// 3. Read from _ctx.sources.dexieExport.readTable('files')
// 4. Check reference integrity between knowledge items and files
// 5. Prepare data for migration
return {
success: true,
itemCount: 0,
warnings: ['KnowledgeMigrator not yet implemented - waiting for knowledge tables']
}
}
async execute(): Promise<ExecuteResult> {
logger.info('KnowledgeMigrator.execute - placeholder implementation')
// TODO: Implement when knowledge tables are created
// 1. Insert files into file table
// 2. Insert knowledge bases into knowledge_base table
// 3. Insert knowledge notes into knowledge_note table
return {
success: true,
processedCount: 0
}
}
async validate(): Promise<ValidateResult> {
logger.info('KnowledgeMigrator.validate - placeholder implementation')
// TODO: Implement when knowledge tables are created
// 1. Count validation for each table
// 2. Reference integrity validation
// 3. Sample validation
return {
success: true,
errors: [],
stats: {
sourceCount: 0,
targetCount: 0,
skippedCount: 0
}
}
}
}

View File

@ -0,0 +1,248 @@
/**
* Preferences migrator - migrates preferences from ElectronStore and Redux to SQLite
*/
import { preferenceTable } from '@data/db/schemas/preference'
import { loggerService } from '@logger'
import { configManager } from '@main/services/ConfigManager'
import { DefaultPreferences } from '@shared/data/preference/preferenceSchemas'
import { and, eq, sql } from 'drizzle-orm'
import type { MigrationContext } from '../core/MigrationContext'
import type { ExecuteResult, PrepareResult, ValidateResult, ValidationError } from '../core/types'
import { BaseMigrator } from './BaseMigrator'
import { ELECTRON_STORE_MAPPINGS, REDUX_STORE_MAPPINGS } from './mappings/PreferencesMappings'
const logger = loggerService.withContext('PreferencesMigrator')
interface MigrationItem {
originalKey: string
targetKey: string
defaultValue: unknown
source: 'electronStore' | 'redux'
sourceCategory?: string
}
interface PreparedData {
targetKey: string
value: unknown
source: 'electronStore' | 'redux'
originalKey: string
}
export class PreferencesMigrator extends BaseMigrator {
readonly id = 'preferences'
readonly name = 'Preferences'
readonly description = 'Migrate application preferences'
readonly order = 1
private preparedItems: PreparedData[] = []
private skippedCount = 0
async prepare(ctx: MigrationContext): Promise<PrepareResult> {
const warnings: string[] = []
this.preparedItems = []
this.skippedCount = 0
try {
// Load migration items from mappings
const migrationItems = this.loadMigrationItems()
logger.info(`Found ${migrationItems.length} preference items to migrate`)
// Prepare each item
for (const item of migrationItems) {
try {
let originalValue: unknown
// Read from source
if (item.source === 'electronStore') {
originalValue = configManager.get(item.originalKey)
} else if (item.source === 'redux' && item.sourceCategory) {
originalValue = ctx.sources.reduxState.get(item.sourceCategory, item.originalKey)
}
// Determine value to migrate
let valueToMigrate = originalValue
if (originalValue === undefined || originalValue === null) {
if (item.defaultValue !== null && item.defaultValue !== undefined) {
valueToMigrate = item.defaultValue
} else {
this.skippedCount++
continue
}
}
this.preparedItems.push({
targetKey: item.targetKey,
value: valueToMigrate,
source: item.source,
originalKey: item.originalKey
})
} catch (error) {
warnings.push(`Failed to prepare ${item.originalKey}: ${error}`)
}
}
logger.info('Preparation completed', {
itemCount: this.preparedItems.length,
skipped: this.skippedCount
})
return {
success: true,
itemCount: this.preparedItems.length,
warnings: warnings.length > 0 ? warnings : undefined
}
} catch (error) {
logger.error('Preparation failed', error as Error)
return {
success: false,
itemCount: 0,
warnings: [error instanceof Error ? error.message : String(error)]
}
}
}
async execute(ctx: MigrationContext): Promise<ExecuteResult> {
if (this.preparedItems.length === 0) {
return { success: true, processedCount: 0 }
}
try {
const db = ctx.db
const scope = 'default'
const timestamp = Date.now()
// Use transaction for atomic insert
await db.transaction(async (tx) => {
// Batch insert all preferences
const insertValues = this.preparedItems.map((item) => ({
scope,
key: item.targetKey,
value: item.value,
createdAt: timestamp,
updatedAt: timestamp
}))
// Insert in batches to avoid SQL limitations
const BATCH_SIZE = 100
for (let i = 0; i < insertValues.length; i += BATCH_SIZE) {
const batch = insertValues.slice(i, i + BATCH_SIZE)
await tx.insert(preferenceTable).values(batch)
// Report progress
const progress = Math.round(((i + batch.length) / insertValues.length) * 100)
this.reportProgress(progress, `已迁移 ${i + batch.length}/${insertValues.length} 条配置`)
}
})
logger.info('Execute completed', { processedCount: this.preparedItems.length })
return {
success: true,
processedCount: this.preparedItems.length
}
} catch (error) {
logger.error('Execute failed', error as Error)
return {
success: false,
processedCount: 0,
error: error instanceof Error ? error.message : String(error)
}
}
}
async validate(ctx: MigrationContext): Promise<ValidateResult> {
const errors: ValidationError[] = []
const db = ctx.db
try {
// Count validation
const result = await db
.select({ count: sql<number>`count(*)` })
.from(preferenceTable)
.where(eq(preferenceTable.scope, 'default'))
.get()
const targetCount = result?.count ?? 0
// Sample validation - check critical keys
const criticalKeys = ['app.language', 'ui.theme_mode', 'app.zoom_factor']
for (const key of criticalKeys) {
const record = await db
.select()
.from(preferenceTable)
.where(and(eq(preferenceTable.scope, 'default'), eq(preferenceTable.key, key)))
.get()
if (!record) {
// Not an error if the key wasn't in source data
const wasPrepared = this.preparedItems.some((item) => item.targetKey === key)
if (wasPrepared) {
errors.push({
key,
message: `Critical preference '${key}' not found after migration`
})
}
}
}
return {
success: errors.length === 0,
errors,
stats: {
sourceCount: this.preparedItems.length,
targetCount,
skippedCount: this.skippedCount
}
}
} catch (error) {
logger.error('Validation failed', error as Error)
return {
success: false,
errors: [
{
key: 'validation',
message: error instanceof Error ? error.message : String(error)
}
],
stats: {
sourceCount: this.preparedItems.length,
targetCount: 0,
skippedCount: this.skippedCount
}
}
}
}
private loadMigrationItems(): MigrationItem[] {
const items: MigrationItem[] = []
// Process ElectronStore mappings
for (const mapping of ELECTRON_STORE_MAPPINGS) {
const defaultValue = DefaultPreferences.default[mapping.targetKey] ?? null
items.push({
originalKey: mapping.originalKey,
targetKey: mapping.targetKey,
defaultValue,
source: 'electronStore'
})
}
// Process Redux mappings
for (const [category, mappings] of Object.entries(REDUX_STORE_MAPPINGS)) {
for (const mapping of mappings) {
const defaultValue = DefaultPreferences.default[mapping.targetKey] ?? null
items.push({
originalKey: mapping.originalKey,
targetKey: mapping.targetKey,
sourceCategory: category,
defaultValue,
source: 'redux'
})
}
}
return items
}
}

View File

@ -0,0 +1,21 @@
/**
* Migrator registration and exports
*/
export { BaseMigrator } from './BaseMigrator'
// Import all migrators
import { AssistantMigrator } from './AssistantMigrator'
import { ChatMigrator } from './ChatMigrator'
import { KnowledgeMigrator } from './KnowledgeMigrator'
import { PreferencesMigrator } from './PreferencesMigrator'
// Export migrator classes
export { AssistantMigrator, ChatMigrator, KnowledgeMigrator, PreferencesMigrator }
/**
* Get all registered migrators in execution order
*/
export function getAllMigrators() {
return [new PreferencesMigrator(), new AssistantMigrator(), new KnowledgeMigrator(), new ChatMigrator()]
}

View File

@ -0,0 +1,755 @@
/**
* Auto-generated preference mappings from classification.json
* Generated at: 2025-09-02T06:27:50.213Z
*
* This file contains pure mapping relationships without default values.
* Default values are managed in packages/shared/data/preferences.ts
*
* === AUTO-GENERATED CONTENT START ===
*/
/**
* ElectronStore映射关系 -
*
* ElectronStore没有嵌套originalKey直接对应configManager.get(key)
*/
export const ELECTRON_STORE_MAPPINGS = [
{
originalKey: 'ZoomFactor',
targetKey: 'app.zoom_factor'
}
] as const
/**
* Redux Store映射关系 - category分组
*
* Redux Store可能有children结构originalKey可能包含嵌套路径:
* - : "theme" -> reduxData.settings.theme
* - : "codeEditor.enabled" -> reduxData.settings.codeEditor.enabled
* - : "exportMenuOptions.docx" -> reduxData.settings.exportMenuOptions.docx
*/
export const REDUX_STORE_MAPPINGS = {
settings: [
{
originalKey: 'autoCheckUpdate',
targetKey: 'app.dist.auto_update.enabled'
},
{
originalKey: 'clickTrayToShowQuickAssistant',
targetKey: 'feature.quick_assistant.click_tray_to_show'
},
{
originalKey: 'disableHardwareAcceleration',
targetKey: 'app.disable_hardware_acceleration'
},
{
originalKey: 'enableDataCollection',
targetKey: 'app.privacy.data_collection.enabled'
},
{
originalKey: 'enableDeveloperMode',
targetKey: 'app.developer_mode.enabled'
},
{
originalKey: 'enableQuickAssistant',
targetKey: 'feature.quick_assistant.enabled'
},
{
originalKey: 'language',
targetKey: 'app.language'
},
{
originalKey: 'launchToTray',
targetKey: 'app.tray.on_launch'
},
{
originalKey: 'testChannel',
targetKey: 'app.dist.test_plan.channel'
},
{
originalKey: 'testPlan',
targetKey: 'app.dist.test_plan.enabled'
},
{
originalKey: 'theme',
targetKey: 'ui.theme_mode'
},
{
originalKey: 'tray',
targetKey: 'app.tray.enabled'
},
{
originalKey: 'trayOnClose',
targetKey: 'app.tray.on_close'
},
{
originalKey: 'showAssistants',
targetKey: 'assistant.tab.show'
},
{
originalKey: 'showTopics',
targetKey: 'topic.tab.show'
},
{
originalKey: 'assistantsTabSortType',
targetKey: 'assistant.tab.sort_type'
},
{
originalKey: 'sendMessageShortcut',
targetKey: 'chat.input.send_message_shortcut'
},
{
originalKey: 'targetLanguage',
targetKey: 'feature.translate.target_language'
},
{
originalKey: 'proxyMode',
targetKey: 'app.proxy.mode'
},
{
originalKey: 'proxyUrl',
targetKey: 'app.proxy.url'
},
{
originalKey: 'proxyBypassRules',
targetKey: 'app.proxy.bypass_rules'
},
{
originalKey: 'userName',
targetKey: 'app.user.name'
},
{
originalKey: 'userId',
targetKey: 'app.user.id'
},
{
originalKey: 'showPrompt',
targetKey: 'chat.message.show_prompt'
},
{
originalKey: 'showMessageDivider',
targetKey: 'chat.message.show_divider'
},
{
originalKey: 'messageFont',
targetKey: 'chat.message.font'
},
{
originalKey: 'showInputEstimatedTokens',
targetKey: 'chat.input.show_estimated_tokens'
},
{
originalKey: 'launchOnBoot',
targetKey: 'app.launch_on_boot'
},
{
originalKey: 'userTheme.colorPrimary',
targetKey: 'ui.theme_user.color_primary'
},
{
originalKey: 'windowStyle',
targetKey: 'ui.window_style'
},
{
originalKey: 'fontSize',
targetKey: 'chat.message.font_size'
},
{
originalKey: 'topicPosition',
targetKey: 'topic.position'
},
{
originalKey: 'showTopicTime',
targetKey: 'topic.tab.show_time'
},
{
originalKey: 'pinTopicsToTop',
targetKey: 'topic.tab.pin_to_top'
},
{
originalKey: 'assistantIconType',
targetKey: 'assistant.icon_type'
},
{
originalKey: 'pasteLongTextAsFile',
targetKey: 'chat.input.paste_long_text_as_file'
},
{
originalKey: 'pasteLongTextThreshold',
targetKey: 'chat.input.paste_long_text_threshold'
},
{
originalKey: 'clickAssistantToShowTopic',
targetKey: 'assistant.click_to_show_topic'
},
{
originalKey: 'codeExecution.enabled',
targetKey: 'chat.code.execution.enabled'
},
{
originalKey: 'codeExecution.timeoutMinutes',
targetKey: 'chat.code.execution.timeout_minutes'
},
{
originalKey: 'codeEditor.enabled',
targetKey: 'chat.code.editor.enabled'
},
{
originalKey: 'codeEditor.themeLight',
targetKey: 'chat.code.editor.theme_light'
},
{
originalKey: 'codeEditor.themeDark',
targetKey: 'chat.code.editor.theme_dark'
},
{
originalKey: 'codeEditor.highlightActiveLine',
targetKey: 'chat.code.editor.highlight_active_line'
},
{
originalKey: 'codeEditor.foldGutter',
targetKey: 'chat.code.editor.fold_gutter'
},
{
originalKey: 'codeEditor.autocompletion',
targetKey: 'chat.code.editor.autocompletion'
},
{
originalKey: 'codeEditor.keymap',
targetKey: 'chat.code.editor.keymap'
},
{
originalKey: 'codePreview.themeLight',
targetKey: 'chat.code.preview.theme_light'
},
{
originalKey: 'codePreview.themeDark',
targetKey: 'chat.code.preview.theme_dark'
},
{
originalKey: 'codeViewer.themeLight',
targetKey: 'chat.code.viewer.theme_light'
},
{
originalKey: 'codeViewer.themeDark',
targetKey: 'chat.code.viewer.theme_dark'
},
{
originalKey: 'codeShowLineNumbers',
targetKey: 'chat.code.show_line_numbers'
},
{
originalKey: 'codeCollapsible',
targetKey: 'chat.code.collapsible'
},
{
originalKey: 'codeWrappable',
targetKey: 'chat.code.wrappable'
},
{
originalKey: 'codeImageTools',
targetKey: 'chat.code.image_tools'
},
{
originalKey: 'mathEngine',
targetKey: 'chat.message.math_engine'
},
{
originalKey: 'messageStyle',
targetKey: 'chat.message.style'
},
{
originalKey: 'foldDisplayMode',
targetKey: 'chat.message.multi_model.fold_display_mode'
},
{
originalKey: 'gridColumns',
targetKey: 'chat.message.multi_model.grid_columns'
},
{
originalKey: 'gridPopoverTrigger',
targetKey: 'chat.message.multi_model.grid_popover_trigger'
},
{
originalKey: 'messageNavigation',
targetKey: 'chat.message.navigation_mode'
},
{
originalKey: 'skipBackupFile',
targetKey: 'data.backup.general.skip_backup_file'
},
{
originalKey: 'webdavHost',
targetKey: 'data.backup.webdav.host'
},
{
originalKey: 'webdavUser',
targetKey: 'data.backup.webdav.user'
},
{
originalKey: 'webdavPass',
targetKey: 'data.backup.webdav.pass'
},
{
originalKey: 'webdavPath',
targetKey: 'data.backup.webdav.path'
},
{
originalKey: 'webdavAutoSync',
targetKey: 'data.backup.webdav.auto_sync'
},
{
originalKey: 'webdavSyncInterval',
targetKey: 'data.backup.webdav.sync_interval'
},
{
originalKey: 'webdavMaxBackups',
targetKey: 'data.backup.webdav.max_backups'
},
{
originalKey: 'webdavSkipBackupFile',
targetKey: 'data.backup.webdav.skip_backup_file'
},
{
originalKey: 'webdavDisableStream',
targetKey: 'data.backup.webdav.disable_stream'
},
{
originalKey: 'translateModelPrompt',
targetKey: 'feature.translate.model_prompt'
},
{
originalKey: 'autoTranslateWithSpace',
targetKey: 'chat.input.translate.auto_translate_with_space'
},
{
originalKey: 'showTranslateConfirm',
targetKey: 'chat.input.translate.show_confirm'
},
{
originalKey: 'enableTopicNaming',
targetKey: 'topic.naming.enabled'
},
{
originalKey: 'customCss',
targetKey: 'ui.custom_css'
},
{
originalKey: 'topicNamingPrompt',
targetKey: 'topic.naming.prompt'
},
{
originalKey: 'narrowMode',
targetKey: 'chat.narrow_mode'
},
{
originalKey: 'multiModelMessageStyle',
targetKey: 'chat.message.multi_model.style'
},
{
originalKey: 'readClipboardAtStartup',
targetKey: 'feature.quick_assistant.read_clipboard_at_startup'
},
{
originalKey: 'notionDatabaseID',
targetKey: 'data.integration.notion.database_id'
},
{
originalKey: 'notionApiKey',
targetKey: 'data.integration.notion.api_key'
},
{
originalKey: 'notionPageNameKey',
targetKey: 'data.integration.notion.page_name_key'
},
{
originalKey: 'markdownExportPath',
targetKey: 'data.export.markdown.path'
},
{
originalKey: 'forceDollarMathInMarkdown',
targetKey: 'data.export.markdown.force_dollar_math'
},
{
originalKey: 'useTopicNamingForMessageTitle',
targetKey: 'data.export.markdown.use_topic_naming_for_message_title'
},
{
originalKey: 'showModelNameInMarkdown',
targetKey: 'data.export.markdown.show_model_name'
},
{
originalKey: 'showModelProviderInMarkdown',
targetKey: 'data.export.markdown.show_model_provider'
},
{
originalKey: 'thoughtAutoCollapse',
targetKey: 'chat.message.thought.auto_collapse'
},
{
originalKey: 'notionExportReasoning',
targetKey: 'data.integration.notion.export_reasoning'
},
{
originalKey: 'excludeCitationsInExport',
targetKey: 'data.export.markdown.exclude_citations'
},
{
originalKey: 'standardizeCitationsInExport',
targetKey: 'data.export.markdown.standardize_citations'
},
{
originalKey: 'yuqueToken',
targetKey: 'data.integration.yuque.token'
},
{
originalKey: 'yuqueUrl',
targetKey: 'data.integration.yuque.url'
},
{
originalKey: 'yuqueRepoId',
targetKey: 'data.integration.yuque.repo_id'
},
{
originalKey: 'joplinToken',
targetKey: 'data.integration.joplin.token'
},
{
originalKey: 'joplinUrl',
targetKey: 'data.integration.joplin.url'
},
{
originalKey: 'joplinExportReasoning',
targetKey: 'data.integration.joplin.export_reasoning'
},
{
originalKey: 'defaultObsidianVault',
targetKey: 'data.integration.obsidian.default_vault'
},
{
originalKey: 'siyuanApiUrl',
targetKey: 'data.integration.siyuan.api_url'
},
{
originalKey: 'siyuanToken',
targetKey: 'data.integration.siyuan.token'
},
{
originalKey: 'siyuanBoxId',
targetKey: 'data.integration.siyuan.box_id'
},
{
originalKey: 'siyuanRootPath',
targetKey: 'data.integration.siyuan.root_path'
},
{
originalKey: 'maxKeepAliveMinapps',
targetKey: 'feature.minapp.max_keep_alive'
},
{
originalKey: 'showOpenedMinappsInSidebar',
targetKey: 'feature.minapp.show_opened_in_sidebar'
},
{
originalKey: 'minappsOpenLinkExternal',
targetKey: 'feature.minapp.open_link_external'
},
{
originalKey: 'enableSpellCheck',
targetKey: 'app.spell_check.enabled'
},
{
originalKey: 'spellCheckLanguages',
targetKey: 'app.spell_check.languages'
},
{
originalKey: 'enableQuickPanelTriggers',
targetKey: 'chat.input.quick_panel.triggers_enabled'
},
{
originalKey: 'exportMenuOptions.image',
targetKey: 'data.export.menus.image'
},
{
originalKey: 'exportMenuOptions.markdown',
targetKey: 'data.export.menus.markdown'
},
{
originalKey: 'exportMenuOptions.markdown_reason',
targetKey: 'data.export.menus.markdown_reason'
},
{
originalKey: 'exportMenuOptions.notion',
targetKey: 'data.export.menus.notion'
},
{
originalKey: 'exportMenuOptions.yuque',
targetKey: 'data.export.menus.yuque'
},
{
originalKey: 'exportMenuOptions.joplin',
targetKey: 'data.export.menus.joplin'
},
{
originalKey: 'exportMenuOptions.obsidian',
targetKey: 'data.export.menus.obsidian'
},
{
originalKey: 'exportMenuOptions.siyuan',
targetKey: 'data.export.menus.siyuan'
},
{
originalKey: 'exportMenuOptions.docx',
targetKey: 'data.export.menus.docx'
},
{
originalKey: 'exportMenuOptions.plain_text',
targetKey: 'data.export.menus.plain_text'
},
{
originalKey: 'notification.assistant',
targetKey: 'app.notification.assistant.enabled'
},
{
originalKey: 'notification.backup',
targetKey: 'app.notification.backup.enabled'
},
{
originalKey: 'notification.knowledge',
targetKey: 'app.notification.knowledge.enabled'
},
{
originalKey: 'localBackupDir',
targetKey: 'data.backup.local.dir'
},
{
originalKey: 'localBackupAutoSync',
targetKey: 'data.backup.local.auto_sync'
},
{
originalKey: 'localBackupSyncInterval',
targetKey: 'data.backup.local.sync_interval'
},
{
originalKey: 'localBackupMaxBackups',
targetKey: 'data.backup.local.max_backups'
},
{
originalKey: 'localBackupSkipBackupFile',
targetKey: 'data.backup.local.skip_backup_file'
},
{
originalKey: 's3.endpoint',
targetKey: 'data.backup.s3.endpoint'
},
{
originalKey: 's3.region',
targetKey: 'data.backup.s3.region'
},
{
originalKey: 's3.bucket',
targetKey: 'data.backup.s3.bucket'
},
{
originalKey: 's3.accessKeyId',
targetKey: 'data.backup.s3.access_key_id'
},
{
originalKey: 's3.secretAccessKey',
targetKey: 'data.backup.s3.secret_access_key'
},
{
originalKey: 's3.root',
targetKey: 'data.backup.s3.root'
},
{
originalKey: 's3.autoSync',
targetKey: 'data.backup.s3.auto_sync'
},
{
originalKey: 's3.syncInterval',
targetKey: 'data.backup.s3.sync_interval'
},
{
originalKey: 's3.maxBackups',
targetKey: 'data.backup.s3.max_backups'
},
{
originalKey: 's3.skipBackupFile',
targetKey: 'data.backup.s3.skip_backup_file'
},
{
originalKey: 'navbarPosition',
targetKey: 'ui.navbar.position'
},
{
originalKey: 'apiServer.enabled',
targetKey: 'feature.csaas.enabled'
},
{
originalKey: 'apiServer.host',
targetKey: 'feature.csaas.host'
},
{
originalKey: 'apiServer.port',
targetKey: 'feature.csaas.port'
},
{
originalKey: 'apiServer.apiKey',
targetKey: 'feature.csaas.api_key'
}
],
selectionStore: [
{
originalKey: 'selectionEnabled',
targetKey: 'feature.selection.enabled'
},
{
originalKey: 'filterList',
targetKey: 'feature.selection.filter_list'
},
{
originalKey: 'filterMode',
targetKey: 'feature.selection.filter_mode'
},
{
originalKey: 'isFollowToolbar',
targetKey: 'feature.selection.follow_toolbar'
},
{
originalKey: 'isRemeberWinSize',
targetKey: 'feature.selection.remember_win_size'
},
{
originalKey: 'triggerMode',
targetKey: 'feature.selection.trigger_mode'
},
{
originalKey: 'isCompact',
targetKey: 'feature.selection.compact'
},
{
originalKey: 'isAutoClose',
targetKey: 'feature.selection.auto_close'
},
{
originalKey: 'isAutoPin',
targetKey: 'feature.selection.auto_pin'
},
{
originalKey: 'actionWindowOpacity',
targetKey: 'feature.selection.action_window_opacity'
},
{
originalKey: 'actionItems',
targetKey: 'feature.selection.action_items'
}
],
nutstore: [
{
originalKey: 'nutstoreToken',
targetKey: 'data.backup.nutstore.token'
},
{
originalKey: 'nutstorePath',
targetKey: 'data.backup.nutstore.path'
},
{
originalKey: 'nutstoreAutoSync',
targetKey: 'data.backup.nutstore.auto_sync'
},
{
originalKey: 'nutstoreSyncInterval',
targetKey: 'data.backup.nutstore.sync_interval'
},
{
originalKey: 'nutstoreSyncState',
targetKey: 'data.backup.nutstore.sync_state'
},
{
originalKey: 'nutstoreSkipBackupFile',
targetKey: 'data.backup.nutstore.skip_backup_file'
}
],
shortcuts: [
{
originalKey: 'shortcuts.zoom_in',
targetKey: 'shortcut.app.zoom_in'
},
{
originalKey: 'shortcuts.zoom_out',
targetKey: 'shortcut.app.zoom_out'
},
{
originalKey: 'shortcuts.zoom_reset',
targetKey: 'shortcut.app.zoom_reset'
},
{
originalKey: 'shortcuts.show_settings',
targetKey: 'shortcut.app.show_settings'
},
{
originalKey: 'shortcuts.show_app',
targetKey: 'shortcut.app.show_main_window'
},
{
originalKey: 'shortcuts.mini_window',
targetKey: 'shortcut.app.show_mini_window'
},
{
originalKey: 'shortcuts.selection_assistant_toggle',
targetKey: 'shortcut.selection.toggle_enabled'
},
{
originalKey: 'shortcuts.selection_assistant_select_text',
targetKey: 'shortcut.selection.get_text'
},
{
originalKey: 'shortcuts.new_topic',
targetKey: 'shortcut.topic.new'
},
{
originalKey: 'shortcuts.toggle_show_assistants',
targetKey: 'shortcut.app.toggle_show_assistants'
},
{
originalKey: 'shortcuts.copy_last_message',
targetKey: 'shortcut.chat.copy_last_message'
},
{
originalKey: 'shortcuts.search_message_in_chat',
targetKey: 'shortcut.chat.search_message'
},
{
originalKey: 'shortcuts.search_message',
targetKey: 'shortcut.app.search_message'
},
{
originalKey: 'shortcuts.clear_topic',
targetKey: 'shortcut.chat.clear'
},
{
originalKey: 'shortcuts.toggle_new_context',
targetKey: 'shortcut.chat.toggle_new_context'
},
{
originalKey: 'shortcuts.exit_fullscreen',
targetKey: 'shortcut.app.exit_fullscreen'
}
]
} as const
// === AUTO-GENERATED CONTENT END ===
/**
* :
* - ElectronStore项: 1
* - Redux Store项: 175
* - Redux分类: settings, selectionStore, nutstore, shortcuts
* - 总配置项: 176
*
* 使:
* 1. ElectronStore读取: configManager.get(mapping.originalKey)
* 2. Redux读取: 需要解析嵌套路径 reduxData[category][originalKey路径]
* 3. 默认值: 从defaultPreferences.default[mapping.targetKey]
*/

View File

@ -0,0 +1,68 @@
/**
* Dexie file reader for accessing exported Dexie table data
* Dexie data is exported by Renderer to JSON files
*/
import fs from 'fs/promises'
import path from 'path'
import { JSONStreamReader } from './JSONStreamReader'
export class DexieFileReader {
private exportPath: string
constructor(exportPath: string) {
this.exportPath = exportPath
}
/**
* Get the export path
*/
getExportPath(): string {
return this.exportPath
}
/**
* Read exported table data (for small tables)
* @param tableName - Name of the table to read
*/
async readTable<T>(tableName: string): Promise<T[]> {
const filePath = path.join(this.exportPath, `${tableName}.json`)
const content = await fs.readFile(filePath, 'utf-8')
return JSON.parse(content)
}
/**
* Create stream reader for large tables
* Use this for tables with large amounts of data (e.g., messages)
* @param tableName - Name of the table to stream
*/
createStreamReader(tableName: string): JSONStreamReader {
const filePath = path.join(this.exportPath, `${tableName}.json`)
return new JSONStreamReader(filePath)
}
/**
* Check if a table export file exists
* @param tableName - Name of the table
*/
async tableExists(tableName: string): Promise<boolean> {
const filePath = path.join(this.exportPath, `${tableName}.json`)
try {
await fs.access(filePath)
return true
} catch {
return false
}
}
/**
* Get file size for a table export
* @param tableName - Name of the table
*/
async getTableFileSize(tableName: string): Promise<number> {
const filePath = path.join(this.exportPath, `${tableName}.json`)
const stats = await fs.stat(filePath)
return stats.size
}
}

View File

@ -0,0 +1,130 @@
/**
* Streaming JSON reader for processing large JSON array files
* Uses stream-json library to avoid loading entire file into memory
*/
import { createReadStream } from 'fs'
import { parser } from 'stream-json'
import { streamArray } from 'stream-json/streamers/StreamArray'
export class JSONStreamReader {
private filePath: string
constructor(filePath: string) {
this.filePath = filePath
}
/**
* Read JSON array in streaming mode with batch processing
* @param batchSize - Number of items per batch
* @param onBatch - Callback for each batch
* @returns Total number of items processed
*/
async readInBatches<T>(
batchSize: number,
onBatch: (items: T[], batchIndex: number) => Promise<void>
): Promise<number> {
return new Promise((resolve, reject) => {
const pipeline = createReadStream(this.filePath).pipe(parser()).pipe(streamArray())
let batch: T[] = []
let batchIndex = 0
let totalCount = 0
let isPaused = false
const processBatch = async () => {
if (batch.length === 0) return
const currentBatch = batch
batch = []
isPaused = true
pipeline.pause()
try {
await onBatch(currentBatch, batchIndex++)
isPaused = false
pipeline.resume()
} catch (error) {
reject(error)
}
}
pipeline.on('data', async ({ value }: { value: T }) => {
batch.push(value)
totalCount++
if (batch.length >= batchSize && !isPaused) {
await processBatch()
}
})
pipeline.on('end', async () => {
try {
// Process remaining items
if (batch.length > 0) {
await onBatch(batch, batchIndex)
}
resolve(totalCount)
} catch (error) {
reject(error)
}
})
pipeline.on('error', reject)
})
}
/**
* Count total items in the JSON array without loading all data
*/
async count(): Promise<number> {
return new Promise((resolve, reject) => {
const pipeline = createReadStream(this.filePath).pipe(parser()).pipe(streamArray())
let count = 0
pipeline.on('data', () => {
count++
})
pipeline.on('end', () => {
resolve(count)
})
pipeline.on('error', reject)
})
}
/**
* Read first N items for sampling/validation
* @param n - Number of items to read
*/
async readSample<T>(n: number): Promise<T[]> {
return new Promise((resolve, reject) => {
const pipeline = createReadStream(this.filePath).pipe(parser()).pipe(streamArray())
const items: T[] = []
pipeline.on('data', ({ value }: { value: T }) => {
items.push(value)
if (items.length >= n) {
pipeline.destroy()
resolve(items)
}
})
pipeline.on('end', () => {
resolve(items)
})
pipeline.on('error', (error) => {
// Ignore error from destroy()
if (items.length >= n) {
resolve(items)
} else {
reject(error)
}
})
})
}
}

View File

@ -0,0 +1,65 @@
/**
* Redux state reader for accessing Redux Persist data
* Data is parsed by Renderer before IPC transfer
*/
export class ReduxStateReader {
private data: Record<string, unknown>
constructor(rawData: Record<string, unknown>) {
this.data = rawData
}
/**
* Read value from Redux state with nested path support
* @param category - Top-level category (e.g., 'settings', 'assistants')
* @param key - Key within category, supports dot notation (e.g., 'codeEditor.enabled')
* @returns The value or undefined if not found
* @example
* reader.get('settings', 'codeEditor.enabled')
* reader.get('assistants', 'defaultAssistant')
*/
get<T>(category: string, key: string): T | undefined {
const categoryData = this.data[category]
if (!categoryData) return undefined
// Support nested paths like "codeEditor.enabled"
if (key.includes('.')) {
const keyPath = key.split('.')
let current: unknown = categoryData
for (const segment of keyPath) {
if (current && typeof current === 'object') {
current = (current as Record<string, unknown>)[segment]
} else {
return undefined
}
}
return current as T
}
return (categoryData as Record<string, unknown>)[key] as T
}
/**
* Get entire category data
* @param category - Category name
*/
getCategory<T>(category: string): T | undefined {
return this.data[category] as T | undefined
}
/**
* Check if a category exists
*/
hasCategory(category: string): boolean {
return category in this.data
}
/**
* Get all available categories
*/
getCategories(): string[] {
return Object.keys(this.data)
}
}

View File

@ -0,0 +1,442 @@
/**
* IPC handler for migration communication between Main and Renderer
*/
import { loggerService } from '@logger'
import BackupManager from '@main/services/BackupManager'
import { app, dialog, ipcMain } from 'electron'
import fs from 'fs/promises'
import path from 'path'
import { migrationEngine } from '../core/MigrationEngine'
import { MigrationIpcChannels, type MigrationProgress } from '../core/types'
import { migrationWindowManager } from './MigrationWindowManager'
const logger = loggerService.withContext('MigrationIpcHandler')
// Store for cached data from Renderer
let cachedReduxData: Record<string, unknown> | null = null
let cachedDexieExportPath: string | null = null
const backupManager = new BackupManager()
// Current migration progress
let currentProgress: MigrationProgress = {
stage: 'introduction',
overallProgress: 0,
currentMessage: 'Ready to start data migration',
migrators: []
}
/**
* Register all migration IPC handlers
*/
export function registerMigrationIpcHandlers(): void {
logger.info('Registering migration IPC handlers')
// Get user data path
ipcMain.handle(MigrationIpcChannels.GetUserDataPath, () => {
return app.getPath('userData')
})
// Check if migration is needed
ipcMain.handle(MigrationIpcChannels.CheckNeeded, async () => {
try {
return await migrationEngine.needsMigration()
} catch (error) {
logger.error('Error checking migration needed', error as Error)
throw error
}
})
// Get current progress
ipcMain.handle(MigrationIpcChannels.GetProgress, () => {
return currentProgress
})
// Get last error
ipcMain.handle(MigrationIpcChannels.GetLastError, async () => {
try {
return await migrationEngine.getLastError()
} catch (error) {
logger.error('Error getting last error', error as Error)
throw error
}
})
// Proceed to backup stage
ipcMain.handle(MigrationIpcChannels.ProceedToBackup, async () => {
try {
updateProgress({
stage: 'backup_required',
overallProgress: 0,
currentMessage: 'Data backup is required before migration can proceed',
migrators: []
})
return true
} catch (error) {
logger.error('Error proceeding to backup', error as Error)
throw error
}
})
// Show Backup Dialog
ipcMain.handle(MigrationIpcChannels.ShowBackupDialog, async () => {
try {
logger.info('Opening backup dialog for migration')
// Update progress to indicate backup dialog is opening
updateProgress({
stage: 'backup_progress',
overallProgress: 10,
currentMessage: 'Opening backup dialog...',
migrators: []
})
const result = await dialog.showSaveDialog({
title: 'Save Migration Backup',
defaultPath: `cherry-studio-migration-backup-${new Date().toISOString().split('T')[0]}.zip`,
filters: [
{ name: 'Backup Files', extensions: ['zip'] },
{ name: 'All Files', extensions: ['*'] }
]
})
if (!result.canceled && result.filePath) {
logger.info('User selected backup location', { filePath: result.filePath })
updateProgress({
stage: 'backup_progress',
overallProgress: 10,
currentMessage: 'Creating backup file...',
migrators: []
})
// Perform the actual backup to the selected location
const backupResult = await performBackupToFile(result.filePath)
if (backupResult.success) {
updateProgress({
stage: 'backup_progress',
overallProgress: 100,
currentMessage: 'Backup created successfully!',
migrators: []
})
// Wait a moment to show the success message, then transition to confirmed state
setTimeout(() => {
updateProgress({
stage: 'backup_confirmed',
overallProgress: 100,
currentMessage: 'Backup completed! Ready to start migration. Click "Start Migration" to continue.',
migrators: []
})
}, 1000)
} else {
updateProgress({
stage: 'backup_required',
overallProgress: 0,
currentMessage: `Backup failed: ${backupResult.error}`,
migrators: []
})
}
return backupResult
} else {
logger.info('User cancelled backup dialog')
updateProgress({
stage: 'backup_required',
overallProgress: 0,
currentMessage: 'Backup cancelled. Please create a backup to continue.',
migrators: []
})
return { success: false, error: 'Backup cancelled by user' }
}
} catch (error) {
logger.error('Error showing backup dialog', error as Error)
updateProgress({
stage: 'backup_required',
overallProgress: 0,
currentMessage: 'Backup process failed',
migrators: []
})
throw error
}
})
// Backup completed
ipcMain.handle(MigrationIpcChannels.BackupCompleted, async () => {
try {
updateProgress({
stage: 'backup_confirmed',
overallProgress: 100,
currentMessage: 'Backup completed! Ready to start migration. Click "Start Migration" to continue.',
migrators: []
})
return true
} catch (error) {
logger.error('Error confirming backup', error as Error)
throw error
}
})
// Receive Redux data from Renderer
ipcMain.handle(MigrationIpcChannels.SendReduxData, async (_event, data: Record<string, unknown>) => {
try {
cachedReduxData = data
logger.info('Redux data received', {
categories: Object.keys(data)
})
return true
} catch (error) {
logger.error('Error receiving Redux data', error as Error)
throw error
}
})
// Dexie export completed
ipcMain.handle(MigrationIpcChannels.DexieExportCompleted, async (_event, exportPath: string) => {
try {
cachedDexieExportPath = exportPath
logger.info('Dexie export completed', { exportPath })
return true
} catch (error) {
logger.error('Error receiving Dexie export path', error as Error)
throw error
}
})
// Write export file from Renderer
ipcMain.handle(
MigrationIpcChannels.WriteExportFile,
async (_event, exportPath: string, tableName: string, jsonData: string) => {
try {
// Ensure export directory exists
await fs.mkdir(exportPath, { recursive: true })
// Write table data to file
const filePath = path.join(exportPath, `${tableName}.json`)
await fs.writeFile(filePath, jsonData, 'utf-8')
logger.info('Export file written', { tableName, filePath })
return true
} catch (error) {
logger.error('Error writing export file', error as Error)
throw error
}
}
)
// Start the migration process
ipcMain.handle(MigrationIpcChannels.StartMigration, async () => {
try {
if (!cachedReduxData || !cachedDexieExportPath) {
throw new Error('Migration data not ready. Redux data or Dexie export path missing.')
}
// Set up progress callback
migrationEngine.onProgress((progress) => {
updateProgress(progress)
})
// Run migration
const result = await migrationEngine.run(cachedReduxData, cachedDexieExportPath)
if (result.success) {
updateProgress({
stage: 'completed',
overallProgress: 100,
currentMessage: 'Migration completed successfully! Click restart to continue.',
migrators: currentProgress.migrators.map((m) => ({
...m,
status: 'completed'
}))
})
} else {
updateProgress({
stage: 'error',
overallProgress: currentProgress.overallProgress,
currentMessage: result.error || 'Migration failed',
migrators: currentProgress.migrators,
error: result.error
})
}
return result
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error)
logger.error('Error starting migration', error as Error)
updateProgress({
stage: 'error',
overallProgress: currentProgress.overallProgress,
currentMessage: errorMessage,
migrators: currentProgress.migrators,
error: errorMessage
})
throw error
}
})
// Retry migration
ipcMain.handle(MigrationIpcChannels.Retry, async () => {
try {
// Reset to backup confirmed stage
updateProgress({
stage: 'backup_confirmed',
overallProgress: 0,
currentMessage: 'Ready to retry migration',
migrators: []
})
return true
} catch (error) {
logger.error('Error retrying migration', error as Error)
throw error
}
})
// Cancel migration
ipcMain.handle(MigrationIpcChannels.Cancel, async () => {
try {
logger.info('Migration cancelled by user')
migrationWindowManager.close()
return true
} catch (error) {
logger.error('Error cancelling migration', error as Error)
throw error
}
})
// Restart app
ipcMain.handle(MigrationIpcChannels.Restart, async () => {
try {
logger.info('Restarting app after migration')
migrationWindowManager.restartApp()
return true
} catch (error) {
logger.error('Error restarting app', error as Error)
throw error
}
})
}
/**
* Unregister all migration IPC handlers
*/
export function unregisterMigrationIpcHandlers(): void {
logger.info('Unregistering migration IPC handlers')
const channels = Object.values(MigrationIpcChannels)
for (const channel of channels) {
ipcMain.removeHandler(channel)
}
}
/**
* Update progress and broadcast to window
*/
function updateProgress(progress: MigrationProgress): void {
currentProgress = progress
migrationWindowManager.send(MigrationIpcChannels.Progress, progress)
}
/**
* Reset cached data
*/
export function resetMigrationData(): void {
cachedReduxData = null
cachedDexieExportPath = null
currentProgress = {
stage: 'introduction',
overallProgress: 0,
currentMessage: 'Ready to start data migration',
migrators: []
}
}
/**
* Get backup data from the current application
*/
async function getBackupData(): Promise<string> {
try {
const { getDataPath } = await import('@main/utils')
const dataPath = getDataPath()
// Gather basic system information
const data = {
backup: {
timestamp: new Date().toISOString(),
version: app.getVersion(),
type: 'pre-migration-backup',
note: 'This is a safety backup created before data migration'
},
system: {
platform: process.platform,
arch: process.arch,
nodeVersion: process.version
},
// Include basic configuration files if they exist
configs: {} as Record<string, any>
}
// Check if there are any config files we should backup
const configFiles = ['config.json', 'settings.json', 'preferences.json']
for (const configFile of configFiles) {
const configPath = path.join(dataPath, configFile)
try {
// Check if file exists
await fs.access(configPath)
const configContent = await fs.readFile(configPath, 'utf-8')
data.configs[configFile] = JSON.parse(configContent)
} catch (err) {
// Ignore if file doesn't exist or can't be read
}
}
return JSON.stringify(data, null, 2)
} catch (error) {
logger.error('Failed to get backup data:', error as Error)
throw error
}
}
/**
* Perform backup to a specific file location
*/
async function performBackupToFile(filePath: string): Promise<{ success: boolean; error?: string }> {
try {
logger.info('Performing backup to file', { filePath })
// Get backup data
const backupData = await getBackupData()
// Extract directory and filename from the full path
const destinationDir = path.dirname(filePath)
const fileName = path.basename(filePath)
// Use the existing backup manager to create a backup
const backupPath = await backupManager.backup(
null as any, // IpcMainInvokeEvent - we're calling directly so pass null
fileName,
backupData,
destinationDir,
false // Don't skip backup files - full backup for migration safety
)
if (backupPath) {
logger.info('Backup created successfully', { path: backupPath })
return { success: true }
} else {
return {
success: false,
error: 'Backup process did not return a file path'
}
}
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error)
logger.error('Backup failed during migration:', error as Error)
return {
success: false,
error: errorMessage
}
}
}

View File

@ -0,0 +1,142 @@
/**
* Migration window manager for creating and managing the migration window
*/
import { loggerService } from '@logger'
import { isDev } from '@main/constant'
import { app, BrowserWindow, dialog } from 'electron'
import { join } from 'path'
const logger = loggerService.withContext('MigrationWindowManager')
export class MigrationWindowManager {
private window: BrowserWindow | null = null
/**
* Check if migration window exists and is not destroyed
*/
hasWindow(): boolean {
return this.window !== null && !this.window.isDestroyed()
}
/**
* Get the current migration window
*/
getWindow(): BrowserWindow | null {
return this.window
}
/**
* Create and show the migration window
*/
create(): BrowserWindow {
if (this.hasWindow()) {
this.window!.show()
return this.window!
}
logger.info('Creating migration window')
this.window = new BrowserWindow({
width: 640,
height: 480,
resizable: false,
maximizable: false,
minimizable: false,
show: false,
frame: false,
autoHideMenuBar: true,
webPreferences: {
preload: join(__dirname, '../preload/simplest.js'),
sandbox: false,
webSecurity: false,
contextIsolation: true
}
})
// Load the migration window
if (isDev && process.env['ELECTRON_RENDERER_URL']) {
this.window.loadURL(process.env['ELECTRON_RENDERER_URL'] + '/migrationV2.html')
} else {
this.window.loadFile(join(__dirname, '../renderer/migrationV2.html'))
}
this.window.once('ready-to-show', () => {
this.window?.show()
logger.info('Migration window shown')
})
this.window.on('closed', () => {
this.window = null
logger.info('Migration window closed')
})
return this.window
}
/**
* Wait for window to be ready
*/
async waitForReady(): Promise<void> {
if (!this.window) return
return new Promise<void>((resolve) => {
if (this.window!.webContents.isLoading()) {
this.window!.webContents.once('did-finish-load', () => resolve())
} else {
resolve()
}
})
}
/**
* Close the migration window
*/
close(): void {
if (this.hasWindow()) {
this.window!.close()
this.window = null
}
}
/**
* Send message to the migration window
*/
send(channel: string, ...args: unknown[]): void {
if (this.hasWindow()) {
this.window!.webContents.send(channel, ...args)
}
}
/**
* Restart the application
*/
async restartApp(): Promise<void> {
logger.info('Restarting application after migration')
// In development mode, relaunch might not work properly
if (isDev || !app.isPackaged) {
logger.warn('Development mode detected - showing restart instruction instead of auto-restart')
await dialog.showMessageBox({
type: 'info',
title: 'Migration Complete - Restart Required',
message:
'Data migration completed successfully!\n\nSince you are in development mode, please manually restart the application to continue.',
buttons: ['Close App'],
defaultId: 0
})
this.close()
app.quit()
} else {
// Production mode - clean up first, then relaunch
this.close()
app.relaunch()
app.exit(0)
}
}
}
// Export singleton instance
export const migrationWindowManager = new MigrationWindowManager()

View File

@ -34,7 +34,13 @@ import { registerShortcuts } from './services/ShortcutService'
import { TrayService } from './services/TrayService'
import { versionService } from './services/VersionService'
import { windowService } from './services/WindowService'
import { dataRefactorMigrateService } from './data/migrate/dataRefactor/DataRefactorMigrateService'
import {
getAllMigrators,
migrationEngine,
migrationWindowManager,
registerMigrationIpcHandlers,
unregisterMigrationIpcHandlers
} from '@data/migration/v2'
import { dataApiService } from '@data/DataApiService'
import { cacheService } from '@data/CacheService'
import { initWebviewHotkeys } from './services/WebviewService'
@ -121,24 +127,36 @@ if (!app.requestSingleInstanceLock()) {
await dbService.migrateDb()
await dbService.migrateSeed('preference')
// Data Refactor Migration
// Data Migration v2
// Check if data migration is needed BEFORE creating any windows
try {
logger.info('Checking if data refactor migration is needed')
const isMigrated = await dataRefactorMigrateService.isMigrated()
logger.info('Migration status check result', { isMigrated })
logger.info('Checking if data migration v2 is needed')
if (!isMigrated) {
logger.info('Data Refactor Migration needed, starting migration process')
// Register migration IPC handlers
registerMigrationIpcHandlers()
// Register migrators
migrationEngine.registerMigrators(getAllMigrators())
const needsMigration = await migrationEngine.needsMigration()
logger.info('Migration status check result', { needsMigration })
if (needsMigration) {
logger.info('Data Migration v2 needed, starting migration process')
try {
await dataRefactorMigrateService.runMigration()
// Create and show migration window
migrationWindowManager.create()
await migrationWindowManager.waitForReady()
logger.info('Migration window created successfully')
// Migration service will handle the migration flow, no need to continue startup
// Migration window will handle the flow, no need to continue startup
return
} catch (migrationError) {
logger.error('Failed to start migration process', migrationError as Error)
// Cleanup IPC handlers on failure
unregisterMigrationIpcHandlers()
// Migration is required for this version - show error and exit
await dialog.showErrorBox(
'Migration Required - Application Cannot Start',

View File

@ -0,0 +1,61 @@
<!doctype html>
<html>
<head>
<meta charset="UTF-8" />
<title>Cherry Studio - Data Migration</title>
<meta
http-equiv="Content-Security-Policy"
content="default-src 'self'; script-src 'self' 'unsafe-inline'; style-src 'self' 'unsafe-inline'; img-src 'self' data: blob:;" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
</head>
<body id="root" theme-mode="light">
<script type="module" src="/src/windows/migrationV2/entryPoint.tsx"></script>
<style>
html {
margin: 0;
padding: 0;
box-sizing: border-box;
}
body {
width: 100vw;
height: 100vh;
margin: 0;
padding: 0;
box-sizing: border-box;
}
#root {
margin: 0;
padding: 0;
width: 100%;
height: 100%;
box-sizing: border-box;
}
/* Custom button styles */
.ant-btn-primary {
background-color: var(--color-primary);
border-color: var(--color-primary);
}
.ant-btn-primary:hover {
background-color: var(--color-primary-soft) !important;
border-color: var(--color-primary-soft) !important;
}
.ant-btn-primary:active,
.ant-btn-primary:focus {
background-color: var(--color-primary) !important;
border-color: var(--color-primary) !important;
}
/* Non-primary button hover styles */
.ant-btn:not(.ant-btn-primary):hover {
border-color: var(--color-primary-soft) !important;
color: var(--color-primary) !important;
}
</style>
</body>
</html>

View File

@ -0,0 +1,451 @@
import { Button } from '@cherrystudio/ui'
import { AppLogo } from '@renderer/config/env'
import { loggerService } from '@renderer/services/LoggerService'
import { Progress, Space, Steps } from 'antd'
import { AlertTriangle, CheckCircle, CheckCircle2, Database, Loader2, Rocket } from 'lucide-react'
import React, { useMemo, useState } from 'react'
import styled from 'styled-components'
import { MigratorProgressList } from './components'
import { DexieExporter, ReduxExporter } from './exporters'
import { useMigrationActions, useMigrationProgress } from './hooks/useMigrationProgress'
import { MigrationIpcChannels } from './types'
const logger = loggerService.withContext('MigrationApp')
const MigrationApp: React.FC = () => {
const { progress, lastError } = useMigrationProgress()
const actions = useMigrationActions()
const [isLoading, setIsLoading] = useState(false)
const handleStartMigration = async () => {
setIsLoading(true)
try {
logger.info('Starting migration process...')
// Export Redux data
const reduxExporter = new ReduxExporter()
const reduxResult = reduxExporter.export()
logger.info('Redux data exported', {
slicesFound: reduxResult.slicesFound,
slicesMissing: reduxResult.slicesMissing
})
// Export Dexie data
const userDataPath = await window.electron.ipcRenderer.invoke(MigrationIpcChannels.GetUserDataPath)
const exportPath = `${userDataPath}/migration_temp/dexie_export`
const dexieExporter = new DexieExporter(exportPath)
await dexieExporter.exportAll((p) => {
logger.info('Dexie export progress', p)
})
logger.info('Dexie data exported', { exportPath })
// Start migration with exported data
await actions.startMigration(reduxResult.data, exportPath)
} catch (error) {
logger.error('Failed to start migration', error as Error)
} finally {
setIsLoading(false)
}
}
const currentStep = useMemo(() => {
switch (progress.stage) {
case 'introduction':
return 0
case 'backup_required':
case 'backup_progress':
case 'backup_confirmed':
return 1
case 'migration':
return 2
case 'completed':
return 3
case 'error':
return -1
default:
return 0
}
}, [progress.stage])
const stepStatus = useMemo(() => {
if (progress.stage === 'error') {
return 'error'
}
return 'process'
}, [progress.stage])
const getProgressColor = () => {
switch (progress.stage) {
case 'completed':
return 'var(--color-primary)'
case 'error':
return '#ff4d4f'
default:
return 'var(--color-primary)'
}
}
const getCurrentStepIcon = () => {
switch (progress.stage) {
case 'introduction':
return <Rocket size={48} color="var(--color-primary)" />
case 'backup_required':
case 'backup_progress':
return <Database size={48} color="var(--color-primary)" />
case 'backup_confirmed':
return <CheckCircle size={48} color="var(--color-primary)" />
case 'migration':
return (
<SpinningIcon>
<Loader2 size={48} color="var(--color-primary)" />
</SpinningIcon>
)
case 'completed':
return <CheckCircle2 size={48} color="var(--color-primary)" />
case 'error':
return <AlertTriangle size={48} color="#ff4d4f" />
default:
return <Rocket size={48} color="var(--color-primary)" />
}
}
const renderActionButtons = () => {
switch (progress.stage) {
case 'introduction':
return (
<>
<Button onClick={actions.cancel}></Button>
<Spacer />
<Button onClick={actions.proceedToBackup}></Button>
</>
)
case 'backup_required':
return (
<>
<Button onClick={actions.cancel}></Button>
<Spacer />
<Space>
<Button onClick={actions.showBackupDialog}></Button>
<Button onClick={actions.confirmBackup}></Button>
</Space>
</>
)
case 'backup_progress':
return (
<ButtonRow>
<div></div>
<Button disabled loading>
...
</Button>
</ButtonRow>
)
case 'backup_confirmed':
return (
<ButtonRow>
<Button onClick={actions.cancel}></Button>
<Space>
<Button onClick={handleStartMigration} loading={isLoading}>
</Button>
</Space>
</ButtonRow>
)
case 'migration':
return (
<ButtonRow>
<div></div>
<Button disabled>...</Button>
</ButtonRow>
)
case 'completed':
return (
<ButtonRow>
<div></div>
<Button onClick={actions.restart}></Button>
</ButtonRow>
)
case 'error':
return (
<ButtonRow>
<Button onClick={actions.cancel}></Button>
<Space>
<Button onClick={actions.retry}></Button>
</Space>
</ButtonRow>
)
default:
return null
}
}
return (
<Container>
<Header>
<HeaderLogo src={AppLogo} />
<HeaderTitle></HeaderTitle>
</Header>
<MainContent>
<LeftSidebar>
<StepsContainer>
<Steps
direction="vertical"
current={currentStep}
status={stepStatus}
size="small"
items={[{ title: '介绍' }, { title: '备份' }, { title: '迁移' }, { title: '完成' }]}
/>
</StepsContainer>
</LeftSidebar>
<RightContent>
<ContentArea>
<InfoIcon>{getCurrentStepIcon()}</InfoIcon>
{progress.stage === 'introduction' && (
<InfoCard>
<InfoTitle></InfoTitle>
<InfoDescription>
Cherry Studio对数据的存储和使用方式进行了重大重构
<br />
<br />
使
<br />
<br />
使
</InfoDescription>
</InfoCard>
)}
{progress.stage === 'backup_required' && (
<InfoCard variant="warning">
<InfoTitle></InfoTitle>
<InfoDescription>
</InfoDescription>
</InfoCard>
)}
{progress.stage === 'backup_progress' && (
<InfoCard variant="warning">
<InfoTitle></InfoTitle>
<InfoDescription></InfoDescription>
</InfoCard>
)}
{progress.stage === 'backup_confirmed' && (
<InfoCard variant="success">
<InfoTitle></InfoTitle>
<InfoDescription></InfoDescription>
</InfoCard>
)}
{progress.stage === 'migration' && (
<div style={{ width: '100%', maxWidth: '600px', margin: '0 auto' }}>
<InfoCard>
<InfoTitle>...</InfoTitle>
<InfoDescription>{progress.currentMessage}</InfoDescription>
</InfoCard>
<ProgressContainer>
<Progress
percent={Math.round(progress.overallProgress * 100)}
strokeColor={getProgressColor()}
trailColor="#f0f0f0"
/>
</ProgressContainer>
<div style={{ marginTop: '20px', height: '200px', overflowY: 'auto' }}>
<MigratorProgressList migrators={progress.migrators} overallProgress={progress.overallProgress} />
</div>
</div>
)}
{progress.stage === 'completed' && (
<InfoCard variant="success">
<InfoTitle></InfoTitle>
<InfoDescription>使</InfoDescription>
</InfoCard>
)}
{progress.stage === 'error' && (
<InfoCard variant="error">
<InfoTitle></InfoTitle>
<InfoDescription>
使
<br />
<br />
{lastError || progress.error || '发生未知错误'}
</InfoDescription>
</InfoCard>
)}
</ContentArea>
</RightContent>
</MainContent>
<Footer>{renderActionButtons()}</Footer>
</Container>
)
}
const Container = styled.div`
width: 100%;
height: 100vh;
display: flex;
flex-direction: column;
background: #fff;
`
const Header = styled.div`
height: 48px;
background: rgb(240, 240, 240);
display: flex;
align-items: center;
justify-content: center;
z-index: 10;
-webkit-app-region: drag;
user-select: none;
`
const HeaderTitle = styled.div`
font-size: 16px;
font-weight: 600;
color: black;
margin-left: 12px;
`
const HeaderLogo = styled.img`
width: 24px;
height: 24px;
border-radius: 6px;
`
const MainContent = styled.div`
flex: 1;
display: flex;
overflow: hidden;
`
const LeftSidebar = styled.div`
width: 150px;
background: #fff;
border-right: 1px solid #f0f0f0;
display: flex;
flex-direction: column;
`
const StepsContainer = styled.div`
padding: 32px 24px;
flex: 1;
.ant-steps-item-process .ant-steps-item-icon {
background-color: var(--color-primary);
border-color: var(--color-primary-soft);
}
.ant-steps-item-finish .ant-steps-item-icon {
background-color: var(--color-primary-mute);
border-color: var(--color-primary-mute);
}
.ant-steps-item-finish .ant-steps-item-icon > .ant-steps-icon {
color: var(--color-primary);
}
.ant-steps-item-process .ant-steps-item-icon > .ant-steps-icon {
color: #fff;
}
.ant-steps-item-wait .ant-steps-item-icon {
border-color: #d9d9d9;
}
`
const RightContent = styled.div`
flex: 1;
display: flex;
flex-direction: column;
`
const ContentArea = styled.div`
flex: 1;
display: flex;
flex-direction: column;
width: 100%;
padding: 24px;
`
const Footer = styled.div`
display: flex;
flex-direction: row;
align-items: center;
justify-content: center;
background: rgb(250, 250, 250);
height: 64px;
padding: 0 24px;
gap: 16px;
`
const Spacer = styled.div`
flex: 1;
`
const ProgressContainer = styled.div`
margin: 32px 0;
width: 100%;
`
const ButtonRow = styled.div`
display: flex;
justify-content: space-between;
align-items: center;
width: 100%;
min-width: 300px;
`
const InfoIcon = styled.div`
display: flex;
justify-content: center;
align-items: center;
margin-top: 12px;
`
const InfoCard = styled.div<{ variant?: 'info' | 'warning' | 'success' | 'error' }>`
width: 100%;
`
const InfoTitle = styled.div`
margin-bottom: 32px;
margin-top: 32px;
font-size: 16px;
font-weight: 600;
color: var(--color-primary);
line-height: 1.4;
text-align: center;
`
const InfoDescription = styled.p`
margin: 0;
color: rgba(0, 0, 0, 0.68);
line-height: 1.8;
max-width: 420px;
margin: 0 auto;
text-align: center;
`
const SpinningIcon = styled.div`
display: inline-block;
animation: spin 2s linear infinite;
@keyframes spin {
from {
transform: rotate(0deg);
}
to {
transform: rotate(360deg);
}
}
`
export default MigrationApp

View File

@ -0,0 +1,101 @@
/**
* Action buttons component for migration flow
*/
import { Button } from '@cherrystudio/ui'
import React from 'react'
import type { MigrationStage } from '../types'
interface Props {
stage: MigrationStage
onProceedToBackup: () => void
onConfirmBackup: () => void
onStartMigration: () => void
onRetry: () => void
onCancel: () => void
onRestart: () => void
isLoading?: boolean
}
export const ActionButtons: React.FC<Props> = ({
stage,
onProceedToBackup,
onConfirmBackup,
onStartMigration,
onRetry,
onCancel,
onRestart,
isLoading = false
}) => {
switch (stage) {
case 'introduction':
return (
<div className="flex justify-end gap-3">
<Button variant="ghost" onClick={onCancel}>
</Button>
<Button variant="default" onClick={onProceedToBackup}>
</Button>
</div>
)
case 'backup_required':
return (
<div className="flex justify-end gap-3">
<Button variant="ghost" onClick={onCancel}>
</Button>
<Button variant="default" onClick={onConfirmBackup}>
</Button>
</div>
)
case 'backup_confirmed':
return (
<div className="flex justify-end gap-3">
<Button variant="ghost" onClick={onCancel}>
</Button>
<Button variant="default" onClick={onStartMigration} loading={isLoading}>
</Button>
</div>
)
case 'migration':
return (
<div className="flex justify-end gap-3">
<Button variant="default" disabled loading>
...
</Button>
</div>
)
case 'completed':
return (
<div className="flex justify-end gap-3">
<Button variant="default" onClick={onRestart} className="bg-green-600 hover:bg-green-700">
</Button>
</div>
)
case 'error':
return (
<div className="flex justify-end gap-3">
<Button variant="ghost" onClick={onCancel}>
退
</Button>
<Button variant="default" onClick={onRetry}>
</Button>
</div>
)
default:
return null
}
}

View File

@ -0,0 +1,113 @@
/**
* Migrator progress list component
* Shows the status of each migrator
*/
import { CheckCircle2, Circle, Loader2, XCircle } from 'lucide-react'
import React from 'react'
import styled, { keyframes } from 'styled-components'
import type { MigratorProgress as MigratorProgressType, MigratorStatus } from '../types'
interface Props {
migrators: MigratorProgressType[]
overallProgress: number
}
const spin = keyframes`
from { transform: rotate(0deg); }
to { transform: rotate(360deg); }
`
const StatusIcon: React.FC<{ status: MigratorStatus }> = ({ status }) => {
switch (status) {
case 'completed':
return <CheckCircle2 size={20} color="#52c41a" /> // Antd success color
case 'running':
return (
<SpinningIcon>
<Loader2 size={20} color="var(--color-primary)" />
</SpinningIcon>
)
case 'failed':
return <XCircle size={20} color="#ff4d4f" /> // Antd error color
default:
return <Circle size={20} color="#d9d9d9" />
}
}
const SpinningIcon = styled.div`
display: flex;
animation: ${spin} 1s linear infinite;
`
const statusTextMap: Record<MigratorStatus, string> = {
pending: '等待中',
running: '进行中',
completed: '完成',
failed: '失败'
}
export const MigratorProgressList: React.FC<Props> = ({ migrators }) => {
return (
<Container>
<List>
{migrators.map((migrator) => (
<ListItem key={migrator.id}>
<ItemLeft>
<StatusIcon status={migrator.status} />
<ItemName>{migrator.name}</ItemName>
</ItemLeft>
<ItemStatus status={migrator.status}>{migrator.error || statusTextMap[migrator.status]}</ItemStatus>
</ListItem>
))}
</List>
</Container>
)
}
const Container = styled.div`
width: 100%;
`
const List = styled.div`
display: flex;
flex-direction: column;
gap: 8px;
`
const ListItem = styled.div`
display: flex;
align-items: center;
justify-content: space-between;
padding: 12px;
background-color: rgba(0, 0, 0, 0.04);
border-radius: 8px;
`
const ItemLeft = styled.div`
display: flex;
align-items: center;
gap: 12px;
`
const ItemName = styled.span`
font-weight: 500;
color: rgba(0, 0, 0, 0.88);
`
const ItemStatus = styled.span<{ status: MigratorStatus }>`
font-size: 14px;
color: ${({ status }) => {
switch (status) {
case 'failed':
return '#ff4d4f'
case 'completed':
return '#52c41a'
case 'running':
return 'var(--color-primary)'
default:
return 'rgba(0, 0, 0, 0.45)'
}
}};
`

View File

@ -0,0 +1,92 @@
/**
* Stage indicator component
* Shows the current migration stage in a stepper format
*/
import { CheckCircle2, Database, FileArchive, Rocket } from 'lucide-react'
import React from 'react'
import type { MigrationStage } from '../types'
interface Props {
stage: MigrationStage
}
interface StepInfo {
id: string
label: string
icon: React.ReactNode
}
const steps: StepInfo[] = [
{ id: 'intro', label: '开始', icon: <Rocket className="h-4 w-4" /> },
{ id: 'backup', label: '备份', icon: <FileArchive className="h-4 w-4" /> },
{ id: 'migrate', label: '迁移', icon: <Database className="h-4 w-4" /> },
{ id: 'complete', label: '完成', icon: <CheckCircle2 className="h-4 w-4" /> }
]
function getStepIndex(stage: MigrationStage): number {
switch (stage) {
case 'introduction':
return 0
case 'backup_required':
case 'backup_progress':
case 'backup_confirmed':
return 1
case 'migration':
return 2
case 'completed':
return 3
case 'error':
return -1
default:
return 0
}
}
export const StageIndicator: React.FC<Props> = ({ stage }) => {
const currentIndex = getStepIndex(stage)
const isError = stage === 'error'
return (
<div className="mb-8 flex w-full items-center justify-between">
{steps.map((step, index) => {
const isCompleted = index < currentIndex
const isCurrent = index === currentIndex
const isPending = index > currentIndex
return (
<React.Fragment key={step.id}>
{/* Step indicator */}
<div className="flex flex-col items-center">
<div
className={`flex h-10 w-10 items-center justify-center rounded-full border-2 transition-colors ${isCompleted ? 'border-green-600 bg-green-600 text-white dark:border-green-400 dark:bg-green-400' : ''}
${isCurrent && !isError ? 'border-primary bg-primary text-white' : ''}
${isCurrent && isError ? 'border-red-600 bg-red-600 text-white dark:border-red-400 dark:bg-red-400' : ''}
${isPending ? 'border-border bg-secondary text-muted-foreground' : ''}
`}>
{isCompleted ? <CheckCircle2 className="h-5 w-5" /> : step.icon}
</div>
<span
className={`mt-2 font-medium text-sm ${isCompleted ? 'text-green-600 dark:text-green-400' : ''}
${isCurrent && !isError ? 'text-primary' : ''}
${isCurrent && isError ? 'text-red-600 dark:text-red-400' : ''}
${isPending ? 'text-muted-foreground' : ''}
`}>
{step.label}
</span>
</div>
{/* Connector line */}
{index < steps.length - 1 && (
<div
className={`mx-2 h-0.5 flex-1 transition-colors ${index < currentIndex ? 'bg-green-600 dark:bg-green-400' : 'bg-border'}
`}
/>
)}
</React.Fragment>
)
})}
</div>
)
}

View File

@ -0,0 +1,3 @@
export { ActionButtons } from './ActionButtons'
export { MigratorProgressList } from './MigratorProgress'
export { StageIndicator } from './StageIndicator'

View File

@ -0,0 +1,19 @@
/**
* Entry point for the migration v2 window
* Initializes the migration UI with @cherrystudio/ui components
*/
import '@renderer/assets/styles/index.css'
import '@renderer/assets/styles/tailwind.css'
import '@ant-design/v5-patch-for-react-19'
import { loggerService } from '@logger'
import { createRoot } from 'react-dom/client'
import MigrationApp from './MigrationApp'
// Initialize logger for this window
loggerService.initWindowSource('MigrationV2')
const root = createRoot(document.getElementById('root') as HTMLElement)
root.render(<MigrationApp />)

View File

@ -0,0 +1,95 @@
/**
* Dexie database exporter for migration
* Exports IndexedDB tables to JSON files for Main process to read
*/
import { db } from '@renderer/databases'
// Required tables that must exist
const REQUIRED_TABLES = [
'topics', // Contains messages embedded within each topic
'files', // File metadata
'knowledge_notes', // Individual knowledge note items
'message_blocks' // Message block data
]
// Optional tables that may not exist in older versions
const OPTIONAL_TABLES = ['settings', 'translate_history', 'quick_phrases', 'translate_languages']
export interface ExportProgress {
table: string
progress: number
total: number
}
export class DexieExporter {
private exportPath: string
constructor(exportPath: string) {
this.exportPath = exportPath
}
/**
* Export all Dexie tables to JSON files
* @param onProgress - Progress callback
* @returns Export path
*/
async exportAll(onProgress?: (progress: ExportProgress) => void): Promise<string> {
// Validate required tables exist
const existingTables = db.tables.map((t) => t.name)
const missingTables = REQUIRED_TABLES.filter((t) => !existingTables.includes(t))
if (missingTables.length > 0) {
throw new Error(
`Required Dexie tables not found: ${missingTables.join(', ')}. ` +
`This may indicate an incompatible database version.`
)
}
// Determine which tables to export
const tablesToExport = [...REQUIRED_TABLES, ...OPTIONAL_TABLES.filter((t) => existingTables.includes(t))]
// Export each table
for (let i = 0; i < tablesToExport.length; i++) {
const tableName = tablesToExport[i]
onProgress?.({
table: tableName,
progress: 0,
total: tablesToExport.length
})
const data = await db.table(tableName).toArray()
// Send data to Main process for writing
// Uses IPC invoke with migration channel
await window.electron.ipcRenderer.invoke(
'migration:write-export-file',
this.exportPath,
tableName,
JSON.stringify(data)
)
onProgress?.({
table: tableName,
progress: i + 1,
total: tablesToExport.length
})
}
return this.exportPath
}
/**
* Get table counts for validation
*/
async getTableCounts(): Promise<Record<string, number>> {
const counts: Record<string, number> = {}
for (const table of db.tables) {
counts[table.name] = await table.count()
}
return counts
}
}

View File

@ -0,0 +1,112 @@
/**
* Redux Persist data exporter for migration
* Extracts persisted Redux state from localStorage and parses it for Main process
*/
const PERSIST_KEY = 'persist:cherry-studio'
// Redux slices that need to be migrated
const SLICES_TO_EXPORT = [
'settings', // App settings and preferences
'assistants', // Assistant configurations
'knowledge', // Knowledge base metadata
'llm', // LLM provider and model configurations
'note', // Note-related settings
'selectionStore' // Selection assistant settings
]
export interface ReduxExportResult {
data: Record<string, unknown>
slicesFound: string[]
slicesMissing: string[]
}
export class ReduxExporter {
/**
* Export Redux Persist data from localStorage
* Parses the nested JSON structure and returns clean data
*/
export(): ReduxExportResult {
const rawData = localStorage.getItem(PERSIST_KEY)
if (!rawData) {
throw new Error(`Redux Persist data not found in localStorage (key: ${PERSIST_KEY})`)
}
// Parse the outer JSON
let persistedState: Record<string, string>
try {
persistedState = JSON.parse(rawData)
} catch (error) {
throw new Error(`Failed to parse Redux Persist root data: ${error}`)
}
// Parse each slice (Redux Persist stores each slice as a JSON string)
const result: Record<string, unknown> = {}
const slicesFound: string[] = []
const slicesMissing: string[] = []
for (const sliceName of SLICES_TO_EXPORT) {
const sliceData = persistedState[sliceName]
if (sliceData === undefined) {
slicesMissing.push(sliceName)
continue
}
try {
// Each slice is stored as a JSON string
result[sliceName] = JSON.parse(sliceData)
slicesFound.push(sliceName)
} catch (error) {
// If parsing fails, store as-is (might be a primitive)
result[sliceName] = sliceData
slicesFound.push(sliceName)
}
}
// Also include _persist metadata if present
if (persistedState._persist) {
try {
result._persist = JSON.parse(persistedState._persist)
} catch {
result._persist = persistedState._persist
}
}
return {
data: result,
slicesFound,
slicesMissing
}
}
/**
* Get raw Redux Persist data for debugging
*/
getRawData(): string | null {
return localStorage.getItem(PERSIST_KEY)
}
/**
* Check if Redux Persist data exists
*/
hasData(): boolean {
return localStorage.getItem(PERSIST_KEY) !== null
}
/**
* Get list of all persisted slices
*/
getPersistedSlices(): string[] {
const rawData = localStorage.getItem(PERSIST_KEY)
if (!rawData) return []
try {
const persistedState = JSON.parse(rawData)
return Object.keys(persistedState).filter((key) => key !== '_persist')
} catch {
return []
}
}
}

View File

@ -0,0 +1,6 @@
/**
* Migration exporters
*/
export { DexieExporter, type ExportProgress } from './DexieExporter'
export { ReduxExporter, type ReduxExportResult } from './ReduxExporter'

View File

@ -0,0 +1,5 @@
/**
* Migration hooks
*/
export { useMigrationActions, useMigrationProgress } from './useMigrationProgress'

View File

@ -0,0 +1,123 @@
/**
* Hook for subscribing to migration progress updates
*/
import { useCallback, useEffect, useState } from 'react'
import { MigrationIpcChannels, type MigrationProgress, type MigrationStage, type MigratorStatus } from '../types'
// Re-export types for convenience
export type { MigrationProgress, MigrationStage, MigratorStatus }
const initialProgress: MigrationProgress = {
stage: 'introduction',
overallProgress: 0,
currentMessage: 'Ready to start data migration',
migrators: []
}
export function useMigrationProgress() {
const [progress, setProgress] = useState<MigrationProgress>(initialProgress)
const [lastError, setLastError] = useState<string | null>(null)
useEffect(() => {
// Listen for progress updates from Main process
const handleProgress = (_: unknown, progressData: MigrationProgress) => {
setProgress(progressData)
if (progressData.error) {
setLastError(progressData.error)
}
}
window.electron.ipcRenderer.on(MigrationIpcChannels.Progress, handleProgress)
// Request initial progress
window.electron.ipcRenderer
.invoke(MigrationIpcChannels.GetProgress)
.then((initialProgress: MigrationProgress) => {
if (initialProgress) {
setProgress(initialProgress)
}
})
.catch(console.error)
// Check for last error
window.electron.ipcRenderer
.invoke(MigrationIpcChannels.GetLastError)
.then((error: string | null) => {
if (error) {
setLastError(error)
}
})
.catch(console.error)
return () => {
window.electron.ipcRenderer.removeAllListeners(MigrationIpcChannels.Progress)
}
}, [])
// Stage helpers
const isInProgress = progress.stage === 'migration'
const isCompleted = progress.stage === 'completed'
const isError = progress.stage === 'error'
const canCancel = progress.stage === 'introduction' || progress.stage === 'backup_required'
return {
progress,
lastError,
isInProgress,
isCompleted,
isError,
canCancel
}
}
/**
* Hook for migration actions
*/
export function useMigrationActions() {
const proceedToBackup = useCallback(() => {
return window.electron.ipcRenderer.invoke(MigrationIpcChannels.ProceedToBackup)
}, [])
const confirmBackup = useCallback(() => {
return window.electron.ipcRenderer.invoke(MigrationIpcChannels.BackupCompleted)
}, [])
const showBackupDialog = useCallback(() => {
return window.electron.ipcRenderer.invoke(MigrationIpcChannels.ShowBackupDialog)
}, [])
const startMigration = useCallback(async (reduxData: Record<string, unknown>, dexieExportPath: string) => {
// Send Redux data
await window.electron.ipcRenderer.invoke(MigrationIpcChannels.SendReduxData, reduxData)
// Send Dexie export path
await window.electron.ipcRenderer.invoke(MigrationIpcChannels.DexieExportCompleted, dexieExportPath)
// Start migration
return window.electron.ipcRenderer.invoke(MigrationIpcChannels.StartMigration)
}, [])
const retry = useCallback(() => {
return window.electron.ipcRenderer.invoke(MigrationIpcChannels.Retry)
}, [])
const cancel = useCallback(() => {
return window.electron.ipcRenderer.invoke(MigrationIpcChannels.Cancel)
}, [])
const restart = useCallback(() => {
return window.electron.ipcRenderer.invoke(MigrationIpcChannels.Restart)
}, [])
return {
proceedToBackup,
confirmBackup,
showBackupDialog,
startMigration,
retry,
cancel,
restart
}
}

View File

@ -0,0 +1,50 @@
/**
* Migration types for Renderer process
* Duplicated from Main to avoid cross-process imports
*/
export type MigrationStage =
| 'introduction'
| 'backup_required'
| 'backup_progress'
| 'backup_confirmed'
| 'migration'
| 'completed'
| 'error'
export type MigratorStatus = 'pending' | 'running' | 'completed' | 'failed'
export interface MigratorProgress {
id: string
name: string
status: MigratorStatus
error?: string
}
export interface MigrationProgress {
stage: MigrationStage
overallProgress: number
currentMessage: string
migrators: MigratorProgress[]
error?: string
}
// IPC channel names
export const MigrationIpcChannels = {
CheckNeeded: 'migration:check-needed',
GetProgress: 'migration:get-progress',
GetLastError: 'migration:get-last-error',
GetUserDataPath: 'migration:get-user-data-path',
Start: 'migration:start',
ProceedToBackup: 'migration:proceed-to-backup',
ShowBackupDialog: 'migration:show-backup-dialog',
BackupCompleted: 'migration:backup-completed',
StartMigration: 'migration:start-migration',
Retry: 'migration:retry',
Cancel: 'migration:cancel',
Restart: 'migration:restart',
SendReduxData: 'migration:send-redux-data',
DexieExportCompleted: 'migration:dexie-export-completed',
Progress: 'migration:progress',
ExportProgress: 'migration:export-progress'
} as const

View File

@ -12440,6 +12440,25 @@ __metadata:
languageName: node
linkType: hard
"@types/stream-chain@npm:*":
version: 2.1.0
resolution: "@types/stream-chain@npm:2.1.0"
dependencies:
"@types/node": "npm:*"
checksum: 10c0/b9acf2740068c20defed8a0066040375d7b75ce3f51100b61b888c9425818d92ef794110468350f51109d9c1aa890a645953abe8fcb25a57ee4e86a19559bda4
languageName: node
linkType: hard
"@types/stream-json@npm:^1":
version: 1.7.8
resolution: "@types/stream-json@npm:1.7.8"
dependencies:
"@types/node": "npm:*"
"@types/stream-chain": "npm:*"
checksum: 10c0/c88f4dca65332d3847b37b16a60dc9d9a7401a6c47b0553e5a424e35f989757afeede32397b1a9ee9c1e5ba4a013832d2938fa60590e2b1dffa848185ac33b93
languageName: node
linkType: hard
"@types/styled-components@npm:^5.1.34":
version: 5.1.34
resolution: "@types/styled-components@npm:5.1.34"
@ -13719,6 +13738,7 @@ __metadata:
"@types/react-infinite-scroll-component": "npm:^5.0.0"
"@types/react-transition-group": "npm:^4.4.12"
"@types/react-window": "npm:^1"
"@types/stream-json": "npm:^1"
"@types/swagger-jsdoc": "npm:^6"
"@types/swagger-ui-express": "npm:^4.1.8"
"@types/tinycolor2": "npm:^1"
@ -13862,6 +13882,7 @@ __metadata:
sharp: "npm:^0.34.3"
shiki: "npm:^3.12.0"
socket.io: "npm:^4.8.1"
stream-json: "npm:^1.9.1"
strict-url-sanitise: "npm:^0.0.1"
string-width: "npm:^7.2.0"
striptags: "npm:^3.2.0"
@ -27674,6 +27695,13 @@ __metadata:
languageName: node
linkType: hard
"stream-chain@npm:^2.2.5":
version: 2.2.5
resolution: "stream-chain@npm:2.2.5"
checksum: 10c0/c512f50190d7c92d688fa64e7af540c51b661f9c2b775fc72bca38ea9bca515c64c22c2197b1be463741daacbaaa2dde8a8ea24ebda46f08391224f15249121a
languageName: node
linkType: hard
"stream-head@npm:^3.0.0":
version: 3.0.0
resolution: "stream-head@npm:3.0.0"
@ -27683,6 +27711,15 @@ __metadata:
languageName: node
linkType: hard
"stream-json@npm:^1.9.1":
version: 1.9.1
resolution: "stream-json@npm:1.9.1"
dependencies:
stream-chain: "npm:^2.2.5"
checksum: 10c0/0521e5cb3fb6b0e2561d715975e891bd81fa77d0239c8d0b1756846392bc3c7cdd7f1ddb0fe7ed77e6fdef58daab9e665d3b39f7d677bd0859e65a2bff59b92c
languageName: node
linkType: hard
"stream-mime-type@npm:^2.0.0":
version: 2.0.0
resolution: "stream-mime-type@npm:2.0.0"