feat: add batch upload and file watcher control functionalities

This commit is contained in:
suyao 2025-11-20 17:55:18 +08:00
parent a95e776699
commit 5c724a03a6
No known key found for this signature in database
7 changed files with 259 additions and 10 deletions

View File

@ -196,6 +196,9 @@ export enum IpcChannel {
File_ValidateNotesDirectory = 'file:validateNotesDirectory',
File_StartWatcher = 'file:startWatcher',
File_StopWatcher = 'file:stopWatcher',
File_PauseWatcher = 'file:pauseWatcher',
File_ResumeWatcher = 'file:resumeWatcher',
File_BatchUploadMarkdown = 'file:batchUploadMarkdown',
File_ShowInFolder = 'file:showInFolder',
// file service

View File

@ -10,7 +10,7 @@ export type LoaderReturn = {
messageSource?: 'preprocess' | 'embedding' | 'validation'
}
export type FileChangeEventType = 'add' | 'change' | 'unlink' | 'addDir' | 'unlinkDir'
export type FileChangeEventType = 'add' | 'change' | 'unlink' | 'addDir' | 'unlinkDir' | 'refresh'
export type FileChangeEvent = {
eventType: FileChangeEventType

View File

@ -557,6 +557,9 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
ipcMain.handle(IpcChannel.File_ValidateNotesDirectory, fileManager.validateNotesDirectory.bind(fileManager))
ipcMain.handle(IpcChannel.File_StartWatcher, fileManager.startFileWatcher.bind(fileManager))
ipcMain.handle(IpcChannel.File_StopWatcher, fileManager.stopFileWatcher.bind(fileManager))
ipcMain.handle(IpcChannel.File_PauseWatcher, fileManager.pauseFileWatcher.bind(fileManager))
ipcMain.handle(IpcChannel.File_ResumeWatcher, fileManager.resumeFileWatcher.bind(fileManager))
ipcMain.handle(IpcChannel.File_BatchUploadMarkdown, fileManager.batchUploadMarkdownFiles.bind(fileManager))
ipcMain.handle(IpcChannel.File_ShowInFolder, fileManager.showInFolder.bind(fileManager))
// file service

View File

@ -1605,6 +1605,164 @@ class FileStorage {
logger.error('Failed to show item in folder:', error as Error)
}
}
/**
* Batch upload markdown files from native File objects
* This handles all I/O operations in the Main process to avoid blocking Renderer
*/
public batchUploadMarkdownFiles = async (
_: Electron.IpcMainInvokeEvent,
filePaths: string[],
targetPath: string
): Promise<{
fileCount: number
folderCount: number
skippedFiles: number
}> => {
try {
logger.info('Starting batch upload', { fileCount: filePaths.length, targetPath })
const basePath = path.resolve(targetPath)
const MARKDOWN_EXTS = ['.md', '.markdown']
// Filter markdown files
const markdownFiles = filePaths.filter((filePath) => {
const ext = path.extname(filePath).toLowerCase()
return MARKDOWN_EXTS.includes(ext)
})
const skippedFiles = filePaths.length - markdownFiles.length
if (markdownFiles.length === 0) {
return { fileCount: 0, folderCount: 0, skippedFiles }
}
// Collect unique folders needed
const foldersSet = new Set<string>()
const fileOperations: Array<{ sourcePath: string; targetPath: string }> = []
for (const filePath of markdownFiles) {
try {
// Get relative path if file is from a directory upload
const fileName = path.basename(filePath)
const relativePath = path.dirname(filePath)
// Determine target directory structure
let targetDir = basePath
const folderParts: string[] = []
// Extract folder structure from file path for nested uploads
// This is a simplified version - in real scenario we'd need the original directory structure
if (relativePath && relativePath !== '.') {
const parts = relativePath.split(path.sep)
// Get the last few parts that represent the folder structure within upload
const relevantParts = parts.slice(Math.max(0, parts.length - 3))
folderParts.push(...relevantParts)
}
// Build target directory path
for (const part of folderParts) {
targetDir = path.join(targetDir, part)
foldersSet.add(targetDir)
}
// Determine final file name
const nameWithoutExt = fileName.endsWith('.md')
? fileName.slice(0, -3)
: fileName.endsWith('.markdown')
? fileName.slice(0, -9)
: fileName
const { safeName } = await this.fileNameGuard(_, targetDir, nameWithoutExt, true)
const finalPath = path.join(targetDir, safeName + '.md')
fileOperations.push({ sourcePath: filePath, targetPath: finalPath })
} catch (error) {
logger.error('Failed to prepare file operation:', error as Error, { filePath })
}
}
// Create folders in order (shallow to deep)
const sortedFolders = Array.from(foldersSet).sort((a, b) => a.length - b.length)
for (const folder of sortedFolders) {
try {
if (!fs.existsSync(folder)) {
await fs.promises.mkdir(folder, { recursive: true })
}
} catch (error) {
logger.debug('Folder already exists or creation failed', { folder, error: (error as Error).message })
}
}
// Process files in batches
const BATCH_SIZE = 10 // Higher batch size since we're in Main process
let successCount = 0
for (let i = 0; i < fileOperations.length; i += BATCH_SIZE) {
const batch = fileOperations.slice(i, i + BATCH_SIZE)
const results = await Promise.allSettled(
batch.map(async (op) => {
// Read from source and write to target in Main process
const content = await fs.promises.readFile(op.sourcePath, 'utf-8')
await fs.promises.writeFile(op.targetPath, content, 'utf-8')
return true
})
)
results.forEach((result, index) => {
if (result.status === 'fulfilled') {
successCount++
} else {
logger.error('Failed to upload file:', result.reason, {
file: batch[index].sourcePath
})
}
})
}
logger.info('Batch upload completed', {
successCount,
folderCount: foldersSet.size,
skippedFiles
})
return {
fileCount: successCount,
folderCount: foldersSet.size,
skippedFiles
}
} catch (error) {
logger.error('Batch upload failed:', error as Error)
throw error
}
}
/**
* Pause file watcher to prevent events during batch operations
*/
public pauseFileWatcher = async (): Promise<void> => {
if (this.watcher) {
logger.debug('Pausing file watcher')
// Chokidar doesn't have pause, so we temporarily set a flag
// We'll handle this by clearing the debounce timer
if (this.debounceTimer) {
clearTimeout(this.debounceTimer)
this.debounceTimer = undefined
}
}
}
/**
* Resume file watcher and trigger a refresh
*/
public resumeFileWatcher = async (): Promise<void> => {
if (this.watcher && this.currentWatchPath) {
logger.debug('Resuming file watcher')
// Send a synthetic refresh event to trigger tree reload
this.notifyChange('refresh', this.currentWatchPath)
}
}
}
export const fileStorage = new FileStorage()

View File

@ -220,6 +220,10 @@ const api = {
startFileWatcher: (dirPath: string, config?: any) =>
ipcRenderer.invoke(IpcChannel.File_StartWatcher, dirPath, config),
stopFileWatcher: () => ipcRenderer.invoke(IpcChannel.File_StopWatcher),
pauseFileWatcher: () => ipcRenderer.invoke(IpcChannel.File_PauseWatcher),
resumeFileWatcher: () => ipcRenderer.invoke(IpcChannel.File_ResumeWatcher),
batchUploadMarkdown: (filePaths: string[], targetPath: string) =>
ipcRenderer.invoke(IpcChannel.File_BatchUploadMarkdown, filePaths, targetPath),
onFileChange: (callback: (data: FileChangeEvent) => void) => {
const listener = (_event: Electron.IpcRendererEvent, data: any) => {
if (data && typeof data === 'object') {

View File

@ -295,6 +295,16 @@ const NotesPage: FC = () => {
break
}
case 'refresh': {
// 批量操作完成后的单次刷新
logger.debug('Received refresh event, triggering tree refresh')
const refresh = refreshTreeRef.current
if (refresh) {
await refresh()
}
break
}
case 'add':
case 'addDir':
case 'unlink':
@ -621,17 +631,26 @@ const NotesPage: FC = () => {
throw new Error('No folder path selected')
}
// Show loading toast for multiple files to indicate processing
let loadingToast: number | string | undefined
if (files.length > 5) {
loadingToast = window.toast.loading(t('notes.uploading_files', { count: files.length }))
// Validate uploadNotes function is available
if (typeof uploadNotes !== 'function') {
logger.error('uploadNotes function is not available', { uploadNotes })
window.toast.error(t('notes.upload_failed'))
return
}
const result = await uploadNotes(files, targetFolderPath)
let result: Awaited<ReturnType<typeof uploadNotes>>
try {
result = await uploadNotes(files, targetFolderPath)
} catch (uploadError) {
logger.error('Upload operation failed:', uploadError as Error)
throw uploadError
}
// Dismiss loading toast if shown
if (loadingToast) {
window.toast.dismiss(loadingToast)
// Validate result object
if (!result || typeof result !== 'object') {
logger.error('Invalid upload result:', { result })
window.toast.error(t('notes.upload_failed'))
return
}
// 检查上传结果

View File

@ -83,6 +83,68 @@ export async function renameNode(node: NotesTreeNode, newName: string): Promise<
}
export async function uploadNotes(files: File[], targetPath: string): Promise<UploadResult> {
const basePath = normalizePath(targetPath)
const totalFiles = files.length
if (files.length === 0) {
return {
uploadedNodes: [],
totalFiles: 0,
skippedFiles: 0,
fileCount: 0,
folderCount: 0
}
}
try {
// Get file paths from File objects
// For browser File objects from drag-and-drop, we need to use FileReader to save temporarily
// However, for directory uploads, the files already have paths
const filePaths: string[] = []
for (const file of files) {
// @ts-ignore - webkitRelativePath exists on File objects from directory uploads
if (file.path) {
// @ts-ignore - Electron File objects have .path property
filePaths.push(file.path)
} else {
// For browser File API, we'd need to use FileReader and create temp files
// For now, fall back to the old method for these cases
logger.warn('File without path detected, using fallback method')
return uploadNotesLegacy(files, targetPath)
}
}
// Pause file watcher to prevent N refresh events
await window.api.file.pauseFileWatcher()
try {
// Use the new optimized batch upload API that runs in Main process
const result = await window.api.file.batchUploadMarkdown(filePaths, basePath)
return {
uploadedNodes: [],
totalFiles,
skippedFiles: result.skippedFiles,
fileCount: result.fileCount,
folderCount: result.folderCount
}
} finally {
// Resume watcher and trigger single refresh
await window.api.file.resumeFileWatcher()
}
} catch (error) {
logger.error('Batch upload failed, falling back to legacy method:', error as Error)
// Fall back to old method if new method fails
return uploadNotesLegacy(files, targetPath)
}
}
/**
* Legacy upload method using Renderer process
* Kept as fallback for browser File API files without paths
*/
async function uploadNotesLegacy(files: File[], targetPath: string): Promise<UploadResult> {
const basePath = normalizePath(targetPath)
const markdownFiles = filterMarkdown(files)
const skippedFiles = files.length - markdownFiles.length
@ -106,7 +168,7 @@ export async function uploadNotes(files: File[], targetPath: string): Promise<Up
// Process files in batches to avoid blocking the UI thread
for (let i = 0; i < markdownFiles.length; i += BATCH_SIZE) {
const batch = markdownFiles.slice(i, i + BATCH_SIZE)
// Process current batch in parallel
const results = await Promise.allSettled(
batch.map(async (file) => {