diff --git a/packages/shared/IpcChannel.ts b/packages/shared/IpcChannel.ts index 167721a7f0..27d208a2a8 100644 --- a/packages/shared/IpcChannel.ts +++ b/packages/shared/IpcChannel.ts @@ -198,7 +198,10 @@ export enum IpcChannel { File_StopWatcher = 'file:stopWatcher', File_PauseWatcher = 'file:pauseWatcher', File_ResumeWatcher = 'file:resumeWatcher', - File_BatchUploadMarkdown = 'file:batchUploadMarkdown', + File_BatchUpload = 'file:batchUpload', + File_UploadFolder = 'file:uploadFolder', // Upload entire folder with recursive structure + File_UploadEntry = 'file:uploadEntry', // Single entry upload for drag-and-drop + File_BatchUploadEntries = 'file:batchUploadEntries', // Batch entry upload (performance-optimized) File_ShowInFolder = 'file:showInFolder', // file service diff --git a/src/main/ipc.ts b/src/main/ipc.ts index f91e61eaa4..26cdbb5403 100644 --- a/src/main/ipc.ts +++ b/src/main/ipc.ts @@ -597,7 +597,10 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) { ipcMain.handle(IpcChannel.File_StopWatcher, fileManager.stopFileWatcher.bind(fileManager)) ipcMain.handle(IpcChannel.File_PauseWatcher, fileManager.pauseFileWatcher.bind(fileManager)) ipcMain.handle(IpcChannel.File_ResumeWatcher, fileManager.resumeFileWatcher.bind(fileManager)) - ipcMain.handle(IpcChannel.File_BatchUploadMarkdown, fileManager.batchUploadMarkdownFiles.bind(fileManager)) + ipcMain.handle(IpcChannel.File_BatchUpload, fileManager.batchUpload.bind(fileManager)) + ipcMain.handle(IpcChannel.File_UploadFolder, fileManager.uploadFolder.bind(fileManager)) + ipcMain.handle(IpcChannel.File_UploadEntry, fileManager.uploadFileEntry.bind(fileManager)) + ipcMain.handle(IpcChannel.File_BatchUploadEntries, fileManager.batchUploadEntries.bind(fileManager)) ipcMain.handle(IpcChannel.File_ShowInFolder, fileManager.showInFolder.bind(fileManager)) // file service diff --git a/src/main/services/FileStorage.ts b/src/main/services/FileStorage.ts index 81f5c15bd9..5179c1cf2d 100644 --- a/src/main/services/FileStorage.ts +++ b/src/main/services/FileStorage.ts @@ -1,6 +1,7 @@ import { loggerService } from '@logger' import { checkName, + findCommonRoot, getFilesDir, getFileType, getName, @@ -23,6 +24,7 @@ import { writeFileSync } from 'fs' import { readFile } from 'fs/promises' import { isBinaryFile } from 'isbinaryfile' import officeParser from 'officeparser' +import PQueue from 'p-queue' import * as path from 'path' import { PDFDocument } from 'pdf-lib' import { chdir } from 'process' @@ -1645,13 +1647,23 @@ class FileStorage { } /** - * Batch upload markdown files from native File objects - * This handles all I/O operations in the Main process to avoid blocking Renderer + * Generic batch upload files with structure preservation (VS Code-inspired) + * Handles all I/O operations in Main process to avoid blocking Renderer + * + * @param filePaths - Array of source file paths to upload + * @param targetPath - Destination directory + * @param options - Upload options + * @param options.fileFilter - Filter function (path => boolean). + * @param options.fileNameTransform - Transform function (basename => newBasename). */ - public batchUploadMarkdownFiles = async ( + public batchUpload = async ( _: Electron.IpcMainInvokeEvent, filePaths: string[], - targetPath: string + targetPath: string, + options?: { + allowedExtensions?: string[] + fileNameTransform?: (fileName: string) => string + } ): Promise<{ fileCount: number folderCount: number @@ -1661,58 +1673,59 @@ class FileStorage { logger.info('Starting batch upload', { fileCount: filePaths.length, targetPath }) const basePath = path.resolve(targetPath) - const MARKDOWN_EXTS = ['.md', '.markdown'] - // Filter markdown files - const markdownFiles = filePaths.filter((filePath) => { - const ext = path.extname(filePath).toLowerCase() - return MARKDOWN_EXTS.includes(ext) - }) + const allowedExtensions = options?.allowedExtensions + const fileFilter = (filePath: string): boolean => { + if (!allowedExtensions || allowedExtensions.length === 0) return true + const lowerPath = filePath.toLowerCase() + return allowedExtensions.some((ext) => lowerPath.endsWith(ext.toLowerCase())) + } - const skippedFiles = filePaths.length - markdownFiles.length + const fileNameTransform = options?.fileNameTransform || ((name) => name) - if (markdownFiles.length === 0) { + // Filter files using custom or default filter + const validFiles = filePaths.filter(fileFilter) + const skippedFiles = filePaths.length - validFiles.length + + if (validFiles.length === 0) { return { fileCount: 0, folderCount: 0, skippedFiles } } + // Find common root for file paths to preserve relative structure + const commonRoot = findCommonRoot(validFiles) + logger.debug('Calculated common root:', { commonRoot }) + // Collect unique folders needed const foldersSet = new Set() const fileOperations: Array<{ sourcePath: string; targetPath: string }> = [] - for (const filePath of markdownFiles) { + for (const filePath of validFiles) { try { - // Get relative path if file is from a directory upload - const fileName = path.basename(filePath) - const relativePath = path.dirname(filePath) + // Calculate relative path from common root + const relativePath = path.relative(commonRoot, filePath) + const fileName = path.basename(relativePath) + const relativeDir = path.dirname(relativePath) - // Determine target directory structure + // Build target directory path preserving structure let targetDir = basePath - const folderParts: string[] = [] + if (relativeDir && relativeDir !== '.') { + targetDir = path.join(basePath, relativeDir) - // Extract folder structure from file path for nested uploads - // This is a simplified version - in real scenario we'd need the original directory structure - if (relativePath && relativePath !== '.') { - const parts = relativePath.split(path.sep) - // Get the last few parts that represent the folder structure within upload - const relevantParts = parts.slice(Math.max(0, parts.length - 3)) - folderParts.push(...relevantParts) + // Collect all parent directories + let currentDir = basePath + const dirParts = relativeDir.split(path.sep) + for (const part of dirParts) { + currentDir = path.join(currentDir, part) + foldersSet.add(currentDir) + } } - // Build target directory path - for (const part of folderParts) { - targetDir = path.join(targetDir, part) - foldersSet.add(targetDir) - } - - // Determine final file name - const nameWithoutExt = fileName.endsWith('.md') - ? fileName.slice(0, -3) - : fileName.endsWith('.markdown') - ? fileName.slice(0, -9) - : fileName + const transformedFileName = fileNameTransform(fileName) + const nameWithoutExt = path.parse(transformedFileName).name const { safeName } = await this.fileNameGuard(_, targetDir, nameWithoutExt, true) - const finalPath = path.join(targetDir, safeName + '.md') + const finalExt = path.extname(transformedFileName) || '.md' + const finalPath = path.join(targetDir, safeName + finalExt) fileOperations.push({ sourcePath: filePath, targetPath: finalPath }) } catch (error) { @@ -1720,6 +1733,8 @@ class FileStorage { } } + // No special folder root handling needed for simplified batchUpload + // Create folders in order (shallow to deep) const sortedFolders = Array.from(foldersSet).sort((a, b) => a.length - b.length) for (const folder of sortedFolders) { @@ -1802,6 +1817,320 @@ class FileStorage { this.notifyChange('refresh', this.currentWatchPath) } } + + /** + * This method handles drag-and-drop file uploads by preserving the directory structure + * using the FileSystemEntry.fullPath property, which contains the relative path from + * the drag operation root. + * + * @param _ - IPC event (unused) + * @param entryData - File entry information from FileSystemEntry API + * @param entryData.fullPath - Relative path from drag root (e.g., "/tmp/xxx/file.md") + * @param entryData.isFile - Whether this is a file + * @param entryData.isDirectory - Whether this is a directory + * @param entryData.systemPath - Absolute file system path (from webUtils.getPathForFile) + * @param targetBasePath - Target directory where files should be uploaded + * @returns Promise resolving to upload result with created path + * + * @example + * // Drag ~/Users/me/tmp/xxx to Notes + * // Entry: { fullPath: "/tmp/xxx/file.md", systemPath: "/Users/me/tmp/xxx/file.md" } + * // Target: "/notes" + * // Result: Creates /notes/tmp/xxx/file.md + */ + public uploadFileEntry = async ( + _: Electron.IpcMainInvokeEvent, + entryData: { + fullPath: string + isFile: boolean + isDirectory: boolean + systemPath: string + }, + targetBasePath: string + ): Promise<{ success: boolean; targetPath: string }> => { + try { + // Normalize target base path + const normalizedBasePath = targetBasePath.replace(/\\/g, '/') + + // Build target path by joining base path with entry's relative fullPath + // Remove leading slash from fullPath to avoid path.join issues + const relativeFullPath = entryData.fullPath.startsWith('/') ? entryData.fullPath.slice(1) : entryData.fullPath + + const targetPath = path.join(normalizedBasePath, relativeFullPath) + + logger.debug('Uploading file entry:', { + fullPath: entryData.fullPath, + systemPath: entryData.systemPath, + targetBasePath: normalizedBasePath, + targetPath, + isFile: entryData.isFile, + isDirectory: entryData.isDirectory + }) + + if (entryData.isFile) { + // Ensure parent directory exists + const targetDir = path.dirname(targetPath) + if (!fs.existsSync(targetDir)) { + await fs.promises.mkdir(targetDir, { recursive: true }) + } + + // Use fileNameGuard to ensure unique filename + const originalFileName = path.basename(targetPath) + const nameWithoutExt = path.parse(originalFileName).name + const originalExt = path.parse(originalFileName).ext + const { safeName } = await this.fileNameGuard(_, targetDir, nameWithoutExt, true) + const finalExt = originalExt || '.md' // Only default to .md if no extension + const finalPath = path.join(targetDir, safeName + finalExt) + + // Copy file directly (works for both text and binary files) + await fs.promises.copyFile(entryData.systemPath, finalPath) + + logger.info('File uploaded successfully:', { source: entryData.systemPath, target: finalPath }) + + return { success: true, targetPath: finalPath } + } else if (entryData.isDirectory) { + // Create directory + if (!fs.existsSync(targetPath)) { + await fs.promises.mkdir(targetPath, { recursive: true }) + logger.info('Directory created:', { targetPath }) + } + + return { success: true, targetPath } + } else { + throw new Error('Entry is neither a file nor a directory') + } + } catch (error) { + logger.error('Failed to upload file entry:', error as Error, { + fullPath: entryData.fullPath, + systemPath: entryData.systemPath + }) + throw error + } + } + + /** + * Upload entire folder with recursive structure preservation + * + * This is a VS Code-inspired approach that handles all recursion in the Main process, + * providing better performance and cleaner architecture than the Renderer-based approach. + * + * @param _ - IPC event (unused) + * @param folderPath - Source folder path to upload + * @param targetPath - Destination directory + * @param options - Upload options + * @returns Promise resolving to upload result with statistics + */ + public uploadFolder = async ( + _: Electron.IpcMainInvokeEvent, + folderPath: string, + targetPath: string, + options?: { + allowedExtensions?: string[] + } + ): Promise<{ + fileCount: number + folderCount: number + skippedFiles: number + }> => { + try { + logger.info('Starting folder upload', { folderPath, targetPath }) + + // Use existing listDirectory to get all files recursively + const allFiles = await this.listDirectory(_, folderPath, { + recursive: true, + includeFiles: true, + includeDirectories: false, + includeHidden: false + }) + + const allowedExtensions = options?.allowedExtensions || ['.md', '.markdown'] + + // Filter by allowed extensions + const validFiles = allFiles.filter((filePath: string) => { + const ext = path.extname(filePath).toLowerCase() + return allowedExtensions.includes(ext) + }) + + const skippedFiles = allFiles.length - validFiles.length + + if (validFiles.length === 0) { + logger.warn('No valid files found in folder', { folderPath, allowedExtensions }) + return { fileCount: 0, folderCount: 0, skippedFiles: allFiles.length } + } + + logger.info('Found valid files in folder', { + folderPath, + totalFiles: allFiles.length, + validFiles: validFiles.length, + skippedFiles + }) + + // Upload files with folder name preservation (VS Code behavior) + // Example: /User/tmp → target/tmp/... + const folderName = path.basename(folderPath) + const targetFolderRoot = path.join(targetPath, folderName) + + // Create target root folder + if (!fs.existsSync(targetFolderRoot)) { + await fs.promises.mkdir(targetFolderRoot, { recursive: true }) + } + + const result = await this.batchUpload(_, validFiles, targetFolderRoot, { + allowedExtensions + }) + + logger.info('Folder upload completed', { + folderPath, + targetPath, + result + }) + + return result + } catch (error) { + logger.error('Folder upload failed:', error as Error, { folderPath, targetPath }) + throw error + } + } + + /** + * Batch upload file entries with dynamic parallel processing (p-queue optimized) + * + * This method uses p-queue for dynamic task scheduling, ensuring maximum throughput + * by maintaining a constant level of concurrency. Unlike static batching, tasks are + * processed as soon as a slot becomes available, preventing blocking by slow files. + * + * @param _ - IPC event (unused) + * @param entryDataList - Array of file entry information from FileSystemEntry API + * @param targetBasePath - Target directory where files should be uploaded + * @returns Promise resolving to batch upload result with statistics + * + * @example + * // Upload 100 files with dynamic scheduling (up to 20 concurrent) + * const result = await batchUploadEntries(_, entries, '/notes') + * // Result: { fileCount: 95, folderCount: 5, skippedFiles: 0 } + */ + public batchUploadEntries = async ( + _: Electron.IpcMainInvokeEvent, + entryDataList: Array<{ + fullPath: string + isFile: boolean + isDirectory: boolean + systemPath: string + }>, + targetBasePath: string, + options?: { + allowedExtensions?: string[] + } + ): Promise<{ + fileCount: number + folderCount: number + skippedFiles: number + }> => { + const CONCURRENCY = 20 // Maximum concurrent uploads (matching VS Code's approach) + let fileCount = 0 + let folderCount = 0 + let skippedFiles = 0 + + logger.info('Starting batch upload of entries with p-queue:', { + totalEntries: entryDataList.length, + targetBasePath, + concurrency: CONCURRENCY + }) + + try { + // Create queue with dynamic scheduling + const queue = new PQueue({ concurrency: CONCURRENCY }) + + // Track progress + let completed = 0 + const total = entryDataList.length + + const results = await Promise.allSettled( + entryDataList.map((entryData) => + queue.add( + async (): Promise< + | { status: 'skipped'; reason: string } + | { status: 'success'; isFile: boolean; isDirectory: boolean; targetPath: string } + > => { + try { + // Filter: only upload allowed files + if (entryData.isFile) { + const allowedExtensions = options?.allowedExtensions + if (allowedExtensions && allowedExtensions.length > 0) { + const lowerPath = entryData.fullPath.toLowerCase() + const isAllowed = allowedExtensions.some((ext) => lowerPath.endsWith(ext.toLowerCase())) + if (!isAllowed) { + return { status: 'skipped' as const, reason: 'extension not allowed' } + } + } else { + // Fallback to default filter if no options provided (backward compatibility) + const lowerPath = entryData.fullPath.toLowerCase() + const isMarkdown = lowerPath.endsWith('.md') || lowerPath.endsWith('.markdown') + const isImage = imageExts.some((ext) => lowerPath.endsWith(ext)) + + if (!isMarkdown && !isImage) { + return { status: 'skipped' as const, reason: 'not markdown or image' } + } + } + } + + // Upload the entry using the existing single-entry method + const result = await this.uploadFileEntry(_, entryData, targetBasePath) + + return { + status: 'success' as const, + isFile: entryData.isFile, + isDirectory: entryData.isDirectory, + targetPath: result.targetPath + } + } finally { + // Send progress update (throttled to avoid flooding) + completed++ + if (completed % 5 === 0 || completed === total) { + _.sender.send('file-upload-progress', { + completed, + total, + percentage: Math.round((completed / total) * 100) + }) + } + } + } + ) + ) + ) + + // Count results + results.forEach((result) => { + if (result.status === 'fulfilled' && result.value) { + const value = result.value + if (value.status === 'skipped') { + skippedFiles++ + } else if (value.status === 'success') { + if (value.isFile) { + fileCount++ + } else if (value.isDirectory) { + folderCount++ + } + } + } else if (result.status === 'rejected') { + logger.error('Failed to upload entry:', result.reason) + skippedFiles++ + } + }) + + logger.info('Batch upload completed:', { + totalProcessed: entryDataList.length, + fileCount, + folderCount, + skippedFiles + }) + + return { fileCount, folderCount, skippedFiles } + } catch (error) { + logger.error('Batch upload entries failed:', error as Error) + throw error + } + } } export const fileStorage = new FileStorage() diff --git a/src/main/services/__tests__/FileStorage.test.ts b/src/main/services/__tests__/FileStorage.test.ts new file mode 100644 index 0000000000..8921e38476 --- /dev/null +++ b/src/main/services/__tests__/FileStorage.test.ts @@ -0,0 +1,133 @@ +import * as fs from 'fs' +import { beforeEach, describe, expect, it, vi } from 'vitest' + +import { fileStorage } from '../FileStorage' + +describe('FileStorage', () => { + beforeEach(() => { + // Reset mocks before each test + vi.clearAllMocks() + // Setup default fs mocks to prevent directory creation during tests + vi.mocked(fs.existsSync).mockReturnValue(true) + vi.mocked(fs.mkdirSync).mockReturnValue(undefined) + }) + + describe('batchUpload', () => { + const mockEvent = {} as Electron.IpcMainInvokeEvent + + beforeEach(() => { + // Setup fs mocks + vi.mocked(fs.promises.mkdir).mockResolvedValue(undefined) + vi.mocked(fs.promises.readFile).mockResolvedValue('# Test content') + vi.mocked(fs.promises.writeFile).mockResolvedValue() + vi.mocked(fs.existsSync).mockReturnValue(false) + }) + + it('should allow all files by default', async () => { + const filePaths = ['/src/test.md', '/src/image.png', '/src/doc.markdown', '/src/script.js'] + + const result = await fileStorage.batchUpload(mockEvent, filePaths, '/target') + + expect(result.fileCount).toBe(4) + expect(result.skippedFiles).toBe(0) + }) + + it('should filter by allowed extensions', async () => { + const filePaths = ['/src/a.txt', '/src/b.txt', '/src/c.md'] + + const result = await fileStorage.batchUpload(mockEvent, filePaths, '/target', { + allowedExtensions: ['.txt'], + fileNameTransform: (name) => name // Keep original name + }) + + expect(result.fileCount).toBe(2) // Only .txt files + expect(result.skippedFiles).toBe(1) + }) + + it('should preserve folder structure', async () => { + const filePaths = ['/source/a.md', '/source/sub/b.md', '/source/sub/deep/c.md'] + + await fileStorage.batchUpload(mockEvent, filePaths, '/target') + + // Check mkdir was called for subdirectories + expect(fs.promises.mkdir).toHaveBeenCalled() + }) + + // preserveFolderRoot functionality has been moved to uploadFolder API + + it('should handle empty file list', async () => { + const result = await fileStorage.batchUpload(mockEvent, [], '/target') + + expect(result.fileCount).toBe(0) + expect(result.folderCount).toBe(0) + expect(result.skippedFiles).toBe(0) + }) + + it('should skip all files if allowed extensions do not match', async () => { + const filePaths = ['/src/a.md', '/src/b.md'] + + const result = await fileStorage.batchUpload(mockEvent, filePaths, '/target', { + allowedExtensions: ['.txt'] + }) + + expect(result.fileCount).toBe(0) + expect(result.skippedFiles).toBe(2) + }) + + it('should transform filenames', async () => { + const filePaths = ['/src/test.txt'] + + await fileStorage.batchUpload(mockEvent, filePaths, '/target', { + fileNameTransform: (name) => name.replace('.txt', '.md') + }) + + // Check that writeFile was called with .md extension + expect(fs.promises.writeFile).toHaveBeenCalled() + const calls = vi.mocked(fs.promises.writeFile).mock.calls + const targetPath = calls[0][0] as string + expect(targetPath).toMatch(/\.md$/) + }) + + it('should handle single file upload', async () => { + const filePaths = ['/source/single.md'] + + const result = await fileStorage.batchUpload(mockEvent, filePaths, '/target') + + expect(result.fileCount).toBe(1) + expect(result.folderCount).toBe(0) + expect(result.skippedFiles).toBe(0) + }) + + it('should create nested directories', async () => { + // Use multiple files at different depths to force nested directory creation + const filePaths = ['/source/a/b/c/deep.md', '/source/shallow.md'] + + await fileStorage.batchUpload(mockEvent, filePaths, '/target') + + // Should create nested directories (a, a/b, a/b/c) + expect(fs.promises.mkdir).toHaveBeenCalled() + expect(fs.promises.writeFile).toHaveBeenCalled() + }) + + it('should handle file read/write errors gracefully', async () => { + const filePaths = ['/source/test.md'] + + vi.spyOn(fs.promises, 'readFile').mockRejectedValueOnce(new Error('Read failed')) + + const result = await fileStorage.batchUpload(mockEvent, filePaths, '/target') + + // Should not throw, but report 0 successful uploads + expect(result.fileCount).toBe(0) + }) + + it('should process files in batches', async () => { + // Create 25 files (more than BATCH_SIZE of 10) + const filePaths = Array.from({ length: 25 }, (_, i) => `/source/file${i}.md`) + + await fileStorage.batchUpload(mockEvent, filePaths, '/target') + + // All files should be processed + expect(fs.promises.writeFile).toHaveBeenCalledTimes(25) + }) + }) +}) diff --git a/src/main/utils/__tests__/file.test.ts b/src/main/utils/__tests__/file.test.ts index f6f6d2c40e..5514c61df3 100644 --- a/src/main/utils/__tests__/file.test.ts +++ b/src/main/utils/__tests__/file.test.ts @@ -10,6 +10,7 @@ import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' import { readTextFileWithAutoEncoding } from '../file' import { + findCommonRoot, getAllFiles, getAppConfigDir, getConfigDir, @@ -480,4 +481,80 @@ describe('file', () => { ) }) }) + + describe('findCommonRoot', () => { + beforeEach(() => { + // Mock path module for findCommonRoot tests + vi.mocked(path.dirname).mockImplementation((filePath) => { + const parts = filePath.split('/') + parts.pop() + return parts.join('/') || '/' + }) + + // Mock path.sep as '/' for Unix tests + Object.defineProperty(path, 'sep', { value: '/', writable: true, configurable: true }) + }) + + it('should return empty string for empty array', () => { + const result = findCommonRoot([]) + expect(result).toBe('') + }) + + it('should return parent directory for single file', () => { + const result = findCommonRoot(['/User/tmp/file.md']) + expect(result).toBe('/User/tmp') + }) + + it('should find common root for files in same directory', () => { + const files = ['/User/tmp/a.md', '/User/tmp/b.md', '/User/tmp/c.md'] + const result = findCommonRoot(files) + expect(result).toBe('/User/tmp') + }) + + it('should find common root for nested files', () => { + const files = ['/User/tmp/sub/1.md', '/User/tmp/2.md', '/User/tmp/sub/deep/3.md'] + const result = findCommonRoot(files) + expect(result).toBe('/User/tmp') + }) + + it('should find common root for files in different branches', () => { + const files = ['/User/tmp/a/1.md', '/User/tmp/b/2.md', '/User/tmp/c/d/3.md'] + const result = findCommonRoot(files) + expect(result).toBe('/User/tmp') + }) + + it('should handle files with different directory depths', () => { + const files = ['/a/b/c/d/e/file.md', '/a/b/x.md'] + const result = findCommonRoot(files) + expect(result).toBe('/a/b') + }) + + it('should handle root level files', () => { + const files = ['/a.md', '/b.md'] + const result = findCommonRoot(files) + expect(result).toBe('/') + }) + + it('should handle Windows paths', () => { + // Skip on non-Windows platforms as path.sep differs + if (process.platform !== 'win32') { + // On Unix, test Unix paths instead + const files = ['/C/Users/tmp/a.md', '/C/Users/tmp/b.md'] + const result = findCommonRoot(files) + expect(result).toBe('/C/Users/tmp') + } else { + // Mock for Windows + Object.defineProperty(path, 'sep', { value: '\\', writable: true }) + vi.mocked(path.dirname).mockImplementation((filePath) => { + const parts = filePath.split('\\') + parts.pop() + return parts.join('\\') || 'C:\\' + }) + + const files = ['C:\\Users\\tmp\\a.md', 'C:\\Users\\tmp\\b.md'] + const result = findCommonRoot(files) + expect(result).toBe('C:\\Users\\tmp') + } + }) + }) }) diff --git a/src/main/utils/file.ts b/src/main/utils/file.ts index 1432dccc8a..2612562401 100644 --- a/src/main/utils/file.ts +++ b/src/main/utils/file.ts @@ -5,6 +5,7 @@ import os from 'node:os' import path from 'node:path' import { loggerService } from '@logger' +import { isWin } from '@main/constant' import { audioExts, documentExts, HOME_CHERRY_DIR, imageExts, MB, textExts, videoExts } from '@shared/config/constant' import type { FileMetadata, NotesTreeNode } from '@types' import { FileTypes } from '@types' @@ -437,3 +438,49 @@ export function sanitizeFilename(fileName: string, replacement = '_'): string { return sanitized } + +/** + * Find the common root directory of multiple file paths + * + * Examples: + * - [/a/b/c/1.md, /a/b/c/2.md] => /a/b/c + * - [/a/b/c/1.md, /a/b/d/2.md] => /a/b + * - [/a/b/c/sub/1.md, /a/b/c/2.md] => /a/b/c + */ +export function findCommonRoot(filePaths: string[]): string { + if (filePaths.length === 0) { + return '' + } + + if (filePaths.length === 1) { + // Single file: use its parent directory + return path.dirname(filePaths[0]) + } + + // Get all parent directories + const allDirs = filePaths.map((p) => path.dirname(p)) + + // Split into path components + const pathComponents = allDirs.map((dir) => dir.split(path.sep)) + + // Find common prefix + const commonParts: string[] = [] + const minLength = Math.min(...pathComponents.map((parts) => parts.length)) + + for (let i = 0; i < minLength; i++) { + const part = pathComponents[0][i] + const allMatch = pathComponents.every((parts) => parts[i] === part) + + if (allMatch) { + commonParts.push(part) + } else { + break + } + } + + // Join back to path + const commonRoot = commonParts.join(path.sep) + + // Ensure we return at least the root directory + return commonRoot || (isWin ? pathComponents[0][0] : '/') +} diff --git a/src/preload/index.ts b/src/preload/index.ts index 25b1064d49..0a3b141110 100644 --- a/src/preload/index.ts +++ b/src/preload/index.ts @@ -223,8 +223,44 @@ const api = { stopFileWatcher: () => ipcRenderer.invoke(IpcChannel.File_StopWatcher), pauseFileWatcher: () => ipcRenderer.invoke(IpcChannel.File_PauseWatcher), resumeFileWatcher: () => ipcRenderer.invoke(IpcChannel.File_ResumeWatcher), - batchUploadMarkdown: (filePaths: string[], targetPath: string) => - ipcRenderer.invoke(IpcChannel.File_BatchUploadMarkdown, filePaths, targetPath), + batchUpload: ( + filePaths: string[], + targetPath: string, + options?: { + allowedExtensions?: string[] + fileNameTransform?: (fileName: string) => string + } + ) => ipcRenderer.invoke(IpcChannel.File_BatchUpload, filePaths, targetPath, options), + uploadFolder: ( + folderPath: string, + targetPath: string, + options?: { + allowedExtensions?: string[] + } + ) => ipcRenderer.invoke(IpcChannel.File_UploadFolder, folderPath, targetPath, options), + uploadEntry: ( + entryData: { + fullPath: string + isFile: boolean + isDirectory: boolean + systemPath: string + }, + targetBasePath: string + ): Promise<{ success: boolean; targetPath: string }> => + ipcRenderer.invoke(IpcChannel.File_UploadEntry, entryData, targetBasePath), + batchUploadEntries: ( + entryDataList: Array<{ + fullPath: string + isFile: boolean + isDirectory: boolean + systemPath: string + }>, + targetBasePath: string, + options?: { + allowedExtensions?: string[] + } + ): Promise<{ fileCount: number; folderCount: number; skippedFiles: number }> => + ipcRenderer.invoke(IpcChannel.File_BatchUploadEntries, entryDataList, targetBasePath, options), onFileChange: (callback: (data: FileChangeEvent) => void) => { const listener = (_event: Electron.IpcRendererEvent, data: any) => { if (data && typeof data === 'object') { @@ -234,6 +270,15 @@ const api = { ipcRenderer.on('file-change', listener) return () => ipcRenderer.off('file-change', listener) }, + onUploadProgress: (callback: (data: { completed: number; total: number; percentage: number }) => void) => { + const listener = (_event: Electron.IpcRendererEvent, data: any) => { + if (data && typeof data === 'object') { + callback(data) + } + } + ipcRenderer.on('file-upload-progress', listener) + return () => ipcRenderer.off('file-upload-progress', listener) + }, showInFolder: (path: string): Promise => ipcRenderer.invoke(IpcChannel.File_ShowInFolder, path) }, fs: { diff --git a/src/renderer/src/components/RichEditor/extensions/relative-image-resolver.ts b/src/renderer/src/components/RichEditor/extensions/relative-image-resolver.ts new file mode 100644 index 0000000000..81380271f6 --- /dev/null +++ b/src/renderer/src/components/RichEditor/extensions/relative-image-resolver.ts @@ -0,0 +1,127 @@ +import { Extension } from '@tiptap/core' +import { Plugin } from '@tiptap/pm/state' + +/** + * Resolves relative image paths to absolute file:// URLs dynamically during rendering + * This keeps markdown files portable while allowing proper image display + */ +export const RelativeImageResolver = Extension.create({ + name: 'relativeImageResolver', + + addOptions() { + return { + // Current markdown file path for resolving relative paths + currentFilePath: undefined as string | undefined + } + }, + + addProseMirrorPlugins() { + const { currentFilePath } = this.options + + if (!currentFilePath) { + return [] + } + + return [ + new Plugin({ + // Apply view plugin for post-render processing + view(view) { + const resolveImages = () => { + const dom = view.dom + const images = dom.querySelectorAll('img[src]') + + images.forEach((img) => { + if (img instanceof HTMLImageElement) { + const src = img.getAttribute('src') + if (src && isRelativePath(src)) { + const resolvedSrc = resolveRelativePath(src, currentFilePath) + img.setAttribute('src', resolvedSrc) + } + } + }) + } + + // Initial resolution + setTimeout(resolveImages, 0) + + // Set up a mutation observer to handle dynamically added images + const observer = new MutationObserver((mutations) => { + let shouldResolve = false + for (const mutation of mutations) { + if (mutation.type === 'childList' && mutation.addedNodes.length > 0) { + // Check if any added nodes contain images + for (const node of mutation.addedNodes) { + if (node.nodeType === Node.ELEMENT_NODE) { + const element = node as Element + if (element.tagName === 'IMG' || element.querySelector('img')) { + shouldResolve = true + break + } + } + } + } + } + if (shouldResolve) { + setTimeout(resolveImages, 0) + } + }) + + observer.observe(view.dom, { + childList: true, + subtree: true + }) + + return { + destroy: () => { + observer.disconnect() + } + } + } + }) + ] + } +}) + +/** + * Checks if a path is relative (not starting with http://, https://, file://, or /) + */ +function isRelativePath(path: string): boolean { + return !path.match(/^[a-zA-Z][a-zA-Z0-9+.-]*:\/\//) && !path.startsWith('/') +} + +/** + * Resolves a relative path against a base directory to create an absolute file:// URL + */ +function resolveRelativePath(relativePath: string, baseFilePath: string): string { + // Remove any './' prefix and normalize path separators + const normalizedRelative = relativePath.replace(/^\.\//, '').replace(/\\/g, '/') + + // Get the directory of the current file + const baseDirectory = baseFilePath ? baseFilePath.substring(0, baseFilePath.lastIndexOf('/')) : '' + + if (!baseDirectory) { + return relativePath + } + + // Combine base directory with relative path + const combinedPath = baseDirectory + '/' + normalizedRelative + + // Handle '..' segments + const pathSegments = combinedPath.split('/') + const resolvedSegments: string[] = [] + + for (const segment of pathSegments) { + if (segment === '..') { + resolvedSegments.pop() // Remove the previous segment + } else if (segment !== '') { + resolvedSegments.push(segment) + } + } + + // Reconstruct the path + const resolvedPath = '/' + resolvedSegments.join('/') + + // Convert to file:// URL with proper URL encoding + const encodedPath = encodeURI(resolvedPath) + return 'file://' + encodedPath +} diff --git a/src/renderer/src/components/RichEditor/index.tsx b/src/renderer/src/components/RichEditor/index.tsx index 793ccda1ae..76ebf19b57 100644 --- a/src/renderer/src/components/RichEditor/index.tsx +++ b/src/renderer/src/components/RichEditor/index.tsx @@ -200,7 +200,8 @@ const RichEditor = ({ isFullWidth = false, fontFamily = 'default', fontSize = 16, - enableSpellCheck = false + enableSpellCheck = false, + currentFilePath // toolbarItems: _toolbarItems // TODO: Implement custom toolbar items }: RichEditorProps & { ref?: React.RefObject }) => { // Use the rich editor hook for complete editor management @@ -225,6 +226,7 @@ const RichEditor = ({ editable, enableSpellCheck, scrollParent: () => scrollContainerRef.current, + currentFilePath, onShowTableActionMenu: ({ position, actions }) => { const iconMap: Record = { insertRowBefore: , diff --git a/src/renderer/src/components/RichEditor/styles.ts b/src/renderer/src/components/RichEditor/styles.ts index 6acb943528..7155393168 100644 --- a/src/renderer/src/components/RichEditor/styles.ts +++ b/src/renderer/src/components/RichEditor/styles.ts @@ -55,13 +55,13 @@ export const ToolbarWrapper = styled.div` gap: 4px; padding: 4px 8px; border-bottom: 1px solid var(--color-border); - background: var(--color-background-soft); + background: var(--rich-editor-toolbar-bg, var(--color-background-soft)); overflow-x: auto; overflow-y: hidden; white-space: nowrap; &::-webkit-scrollbar-track { - background: var(--color-background-soft); + background: var(--rich-editor-toolbar-bg, var(--color-background-soft)); } &::-webkit-scrollbar-thumb { diff --git a/src/renderer/src/components/RichEditor/types.ts b/src/renderer/src/components/RichEditor/types.ts index 8dcadbb664..e47d445411 100644 --- a/src/renderer/src/components/RichEditor/types.ts +++ b/src/renderer/src/components/RichEditor/types.ts @@ -52,6 +52,8 @@ export interface RichEditorProps { fontSize?: number /** Whether to enable spell check */ enableSpellCheck?: boolean + /** Current markdown file path for resolving relative image paths */ + currentFilePath?: string } export interface ToolbarItem { diff --git a/src/renderer/src/components/RichEditor/useRichEditor.ts b/src/renderer/src/components/RichEditor/useRichEditor.ts index 576162fac7..547ad1815d 100644 --- a/src/renderer/src/components/RichEditor/useRichEditor.ts +++ b/src/renderer/src/components/RichEditor/useRichEditor.ts @@ -33,6 +33,7 @@ import { EnhancedImage } from './extensions/enhanced-image' import { EnhancedLink } from './extensions/enhanced-link' import { EnhancedMath } from './extensions/enhanced-math' import { Placeholder } from './extensions/placeholder' +import { RelativeImageResolver } from './extensions/relative-image-resolver' import { YamlFrontMatter } from './extensions/yaml-front-matter' import { blobToArrayBuffer, compressImage, shouldCompressImage } from './helpers/imageUtils' @@ -94,6 +95,8 @@ export interface UseRichEditorOptions { actions: { id: string; label: string; action: () => void }[] }) => void scrollParent?: () => HTMLElement | null + /** Current markdown file path for resolving relative image paths */ + currentFilePath?: string } export interface UseRichEditorReturn { @@ -157,7 +160,8 @@ export const useRichEditor = (options: UseRichEditorOptions = {}): UseRichEditor editable = true, enableSpellCheck = false, onShowTableActionMenu, - scrollParent + scrollParent, + currentFilePath } = options const [markdown, setMarkdownState] = useState(initialContent) @@ -236,6 +240,9 @@ export const useRichEditor = (options: UseRichEditorOptions = {}): UseRichEditor onLinkHoverEnd: handleLinkHoverEnd, editable: editable }), + RelativeImageResolver.configure({ + currentFilePath + }), TableOfContents.configure({ getIndex: getHierarchicalIndexes, onUpdate(content) { @@ -383,7 +390,7 @@ export const useRichEditor = (options: UseRichEditorOptions = {}): UseRichEditor }) ], // eslint-disable-next-line react-hooks/exhaustive-deps - [placeholder, activeShikiTheme, handleLinkHover, handleLinkHoverEnd] + [placeholder, activeShikiTheme, handleLinkHover, handleLinkHoverEnd, currentFilePath] ) const editor = useEditor({ diff --git a/src/renderer/src/components/VirtualList/dynamic.tsx b/src/renderer/src/components/VirtualList/dynamic.tsx index d1244ecedb..3f75e12584 100644 --- a/src/renderer/src/components/VirtualList/dynamic.tsx +++ b/src/renderer/src/components/VirtualList/dynamic.tsx @@ -97,6 +97,12 @@ export interface DynamicVirtualListProps extends InheritedVirtualizerOptions * Additional CSS class name for the container */ className?: string + + /** + * Additional DOM attributes/handlers for the scroll container + * e.g. onClick, onContextMenu for parent wrappers like antd Dropdown + */ + containerProps?: React.HTMLAttributes } function DynamicVirtualList(props: DynamicVirtualListProps) { @@ -114,9 +120,13 @@ function DynamicVirtualList(props: DynamicVirtualListProps) { autoHideScrollbar = false, header, className, + containerProps, ...restOptions } = props + // Forward provided container props to the scroll container + const domEventHandlers = containerProps ?? ({} as React.DOMAttributes) + const [showScrollbar, setShowScrollbar] = useState(!autoHideScrollbar) const timeoutRef = useRef(null) const internalScrollerRef = useRef(null) @@ -240,6 +250,7 @@ function DynamicVirtualList(props: DynamicVirtualListProps) { return ( + style={{ + justifyContent: 'flex-start', + borderBottom: '0.5px solid var(--color-border)', + borderTopLeftRadius: 10, + borderTopRightRadius: 10 + }}> {showWorkspace && ( diff --git a/src/renderer/src/pages/notes/NotesEditor.tsx b/src/renderer/src/pages/notes/NotesEditor.tsx index 5ec76441ab..9a805effa6 100644 --- a/src/renderer/src/pages/notes/NotesEditor.tsx +++ b/src/renderer/src/pages/notes/NotesEditor.tsx @@ -24,10 +24,11 @@ interface NotesEditorProps { editorRef: RefObject codeEditorRef: RefObject onMarkdownChange: (content: string) => void + currentFilePath?: string } const NotesEditor: FC = memo( - ({ activeNodeId, currentContent, tokenCount, onMarkdownChange, editorRef, codeEditorRef }) => { + ({ activeNodeId, currentContent, tokenCount, onMarkdownChange, editorRef, codeEditorRef, currentFilePath }) => { const { t } = useTranslation() const dispatch = useAppDispatch() const { settings } = useNotesSettings() @@ -90,6 +91,7 @@ const NotesEditor: FC = memo( fontFamily={settings.fontFamily} fontSize={settings.fontSize} enableSpellCheck={enableSpellCheck} + currentFilePath={currentFilePath} /> )} @@ -156,6 +158,8 @@ const RichEditorContainer = styled.div` transition: opacity 0.2s ease-in-out; .notes-rich-editor { + /* Set RichEditor toolbar background for Notes context */ + --rich-editor-toolbar-bg: var(--color-background); border: none; border-radius: 0; flex: 1; diff --git a/src/renderer/src/pages/notes/NotesPage.tsx b/src/renderer/src/pages/notes/NotesPage.tsx index 7692aa9975..f88fa34a23 100644 --- a/src/renderer/src/pages/notes/NotesPage.tsx +++ b/src/renderer/src/pages/notes/NotesPage.tsx @@ -75,7 +75,11 @@ const NotesPage: FC = () => { const [tokenCount, setTokenCount] = useState(0) const [selectedFolderId, setSelectedFolderId] = useState(null) + const [uploadProgress, setUploadProgress] = useState<{ completed: number; total: number; percentage: number } | null>( + null + ) const watcherRef = useRef<(() => void) | null>(null) + const uploadProgressListenerRef = useRef<(() => void) | null>(null) const lastContentRef = useRef('') const lastFilePathRef = useRef(undefined) const isRenamingRef = useRef(false) @@ -164,6 +168,34 @@ const NotesPage: FC = () => { refreshTree() }, [refreshTree]) + // Setup upload progress listener + useEffect(() => { + // Clean up previous listener + if (uploadProgressListenerRef.current) { + uploadProgressListenerRef.current() + uploadProgressListenerRef.current = null + } + + // Set up new listener + uploadProgressListenerRef.current = window.api.file.onUploadProgress((data) => { + setUploadProgress(data) + + // Auto-hide progress after completion + if (data.completed === data.total) { + setTimeout(() => { + setUploadProgress(null) + }, 1500) + } + }) + + return () => { + if (uploadProgressListenerRef.current) { + uploadProgressListenerRef.current() + uploadProgressListenerRef.current = null + } + } + }, []) + // Re-merge tree state when starred or expanded paths change useEffect(() => { if (notesTree.length > 0) { @@ -534,6 +566,7 @@ const NotesPage: FC = () => { logger.error('Failed to load note:', error as Error) } } else if (node.type === 'folder') { + // 点击文件夹时不切换/清空当前笔记,仅折叠/展开并记录所选文件夹 setSelectedFolderId(node.id) handleToggleExpanded(node.id) } @@ -541,6 +574,12 @@ const NotesPage: FC = () => { [dispatch, handleToggleExpanded, invalidateFileContent] ) + // 选中根(清空选择) + const handleSelectRoot = useCallback(() => { + dispatch(setActiveFilePath(undefined)) + setSelectedFolderId(null) + }, [dispatch]) + // 删除节点 const handleDeleteNode = useCallback( async (nodeId: string) => { @@ -875,7 +914,17 @@ const NotesPage: FC = () => { {t('notes.title')} - + {uploadProgress && ( + + + {t('notes.uploading')} {uploadProgress.completed} / {uploadProgress.total} ({uploadProgress.percentage}%) + + + + + + )} + {showWorkspace && ( { animate={{ width: 250, opacity: 1 }} exit={{ width: 0, opacity: 0 }} transition={{ duration: 0.3, ease: 'easeInOut' }} - style={{ overflow: 'hidden' }}> + style={{ overflow: 'hidden', marginRight: 8, borderRadius: 10 }}> { onMoveNode={handleMoveNode} onSortNodes={handleSortNodes} onUploadFiles={handleUploadFiles} + notesPath={notesPath} + refreshTree={refreshTree} /> )} @@ -916,6 +968,7 @@ const NotesPage: FC = () => { onMarkdownChange={handleMarkdownChange} editorRef={editorRef} codeEditorRef={codeEditorRef} + currentFilePath={activeFilePath} /> @@ -930,6 +983,35 @@ const Container = styled.div` width: 100%; ` +const UploadProgressBar = styled.div` + background: var(--color-background-soft); + border-bottom: 1px solid var(--color-border); + padding: 8px 16px; + display: flex; + flex-direction: column; + gap: 6px; +` + +const ProgressText = styled.div` + font-size: 12px; + color: var(--color-text-2); +` + +const ProgressBarContainer = styled.div` + width: 100%; + height: 4px; + background: var(--color-background-mute); + border-radius: 2px; + overflow: hidden; +` + +const ProgressBarFill = styled.div` + height: 100%; + background: var(--color-primary); + transition: width 0.2s ease; + border-radius: 2px; +` + const ContentContainer = styled.div` display: flex; flex: 1; @@ -948,6 +1030,8 @@ const EditorWrapper = styled.div` overflow: hidden; min-height: 0; min-width: 0; + border-radius: 10px; + background: var(--color-background); ` export default NotesPage diff --git a/src/renderer/src/pages/notes/NotesSidebar.tsx b/src/renderer/src/pages/notes/NotesSidebar.tsx index 6ed144dd7e..94ccd59033 100644 --- a/src/renderer/src/pages/notes/NotesSidebar.tsx +++ b/src/renderer/src/pages/notes/NotesSidebar.tsx @@ -1,6 +1,7 @@ import { DynamicVirtualList } from '@renderer/components/VirtualList' import { useActiveNode } from '@renderer/hooks/useNotesQuery' import NotesSidebarHeader from '@renderer/pages/notes/NotesSidebarHeader' +import { findNode } from '@renderer/services/NotesService' import { useAppSelector } from '@renderer/store' import { selectSortType } from '@renderer/store/note' import type { NotesSortType, NotesTreeNode } from '@renderer/types/note' @@ -31,6 +32,7 @@ interface NotesSidebarProps { onCreateFolder: (name: string, targetFolderId?: string) => void onCreateNote: (name: string, targetFolderId?: string) => void onSelectNode: (node: NotesTreeNode) => void + onSelectRoot: () => void onDeleteNode: (nodeId: string) => void onRenameNode: (nodeId: string, newName: string) => void onToggleExpanded: (nodeId: string) => void @@ -40,12 +42,15 @@ interface NotesSidebarProps { onUploadFiles: (files: File[]) => void notesTree: NotesTreeNode[] selectedFolderId?: string | null + notesPath?: string + refreshTree?: () => Promise } const NotesSidebar: FC = ({ onCreateFolder, onCreateNote, onSelectNode, + onSelectRoot, onDeleteNode, onRenameNode, onToggleExpanded, @@ -54,10 +59,13 @@ const NotesSidebar: FC = ({ onSortNodes, onUploadFiles, notesTree, - selectedFolderId + selectedFolderId, + notesPath, + refreshTree }) => { const { t } = useTranslation() const { activeNode } = useActiveNode(notesTree) + const isRootSelected = !selectedFolderId && !activeNode?.id const sortType = useAppSelector(selectSortType) const [isShowStarred, setIsShowStarred] = useState(false) @@ -87,7 +95,17 @@ const NotesSidebar: FC = ({ const { handleDropFiles, handleSelectFiles, handleSelectFolder } = useNotesFileUpload({ onUploadFiles, - setIsDragOverSidebar + setIsDragOverSidebar, + getTargetFolderPath: () => { + if (selectedFolderId) { + const selectedNode = findNode(notesTree, selectedFolderId) + if (selectedNode && selectedNode.type === 'folder') { + return selectedNode.externalPath + } + } + return notesPath || '' + }, + refreshTree }) const { getMenuItems } = useNotesMenu({ @@ -243,7 +261,24 @@ const NotesSidebar: FC = ({ return filteredNodes.map((node) => ({ node, depth: 0 })) } - return flattenForVirtualization(notesTree) + const normalNodes = flattenForVirtualization(notesTree) + + // Add hint-node to the end + return [ + ...normalNodes, + { + node: { + id: 'hint-node', + name: '', + type: 'hint' as const, + treePath: '', + externalPath: '', + createdAt: '', + updatedAt: '' + }, + depth: 0 + } + ] }, [notesTree, isShowStarred, isShowSearch, hasSearchKeyword, searchResults]) // Scroll to active node @@ -349,6 +384,7 @@ const NotesSidebar: FC = ({ { e.preventDefault() if (!draggedNodeId) { @@ -403,32 +439,38 @@ const NotesSidebar: FC = ({ 28} + scrollerStyle={{ flex: 1, minHeight: 0, height: 'auto' }} itemContainerStyle={{ padding: '8px 8px 0 8px' }} overscan={10} isSticky={isSticky} - getItemDepth={getItemDepth}> - {({ node, depth }) => } + getItemDepth={getItemDepth} + containerProps={{ + onContextMenu: (e) => { + const target = e.target as HTMLElement + if (!target.closest('[data-index]')) { + onSelectRoot() + setOpenDropdownKey('empty-area') + } + }, + onClick: (e) => { + const target = e.target as HTMLElement + if (!target.closest('[data-index]')) { + onSelectRoot() + } + } + }}> + {({ node, depth }) => ( + + )} - {!isShowStarred && !isShowSearch && ( -
- -
- )} {isDragOverSidebar && } @@ -442,16 +484,19 @@ const NotesSidebar: FC = ({ ) } -export const SidebarContainer = styled.div` +export const SidebarContainer = styled.div<{ $rootSelected?: boolean }>` width: 250px; min-width: 250px; - height: calc(100vh - var(--navbar-height)); + height: 100%; background-color: var(--color-background); border-right: 0.5px solid var(--color-border); - border-top-left-radius: 10px; + border-bottom: 0.5px solid var(--color-border); + border-radius: 10px; display: flex; flex-direction: column; position: relative; + box-shadow: ${({ $rootSelected }) => ($rootSelected ? '0 0 0 2px var(--color-primary) inset' : 'none')}; + transition: box-shadow 0.15s ease; ` export const NotesTreeContainer = styled.div` @@ -459,7 +504,7 @@ export const NotesTreeContainer = styled.div` overflow: hidden; display: flex; flex-direction: column; - height: calc(100vh - var(--navbar-height) - 45px); + min-height: 0; ` export const DragOverIndicator = styled.div` diff --git a/src/renderer/src/pages/notes/hooks/useNotesFileUpload.ts b/src/renderer/src/pages/notes/hooks/useNotesFileUpload.ts index aba1a90992..4eec1c615b 100644 --- a/src/renderer/src/pages/notes/hooks/useNotesFileUpload.ts +++ b/src/renderer/src/pages/notes/hooks/useNotesFileUpload.ts @@ -1,49 +1,110 @@ +import { loggerService } from '@logger' import { useCallback } from 'react' +import { useTranslation } from 'react-i18next' + +const logger = loggerService.withContext('useNotesFileUpload') interface UseNotesFileUploadProps { onUploadFiles: (files: File[]) => void setIsDragOverSidebar: (isDragOver: boolean) => void + getTargetFolderPath?: () => string | null + refreshTree?: () => Promise } -export const useNotesFileUpload = ({ onUploadFiles, setIsDragOverSidebar }: UseNotesFileUploadProps) => { +export const useNotesFileUpload = ({ + onUploadFiles, + setIsDragOverSidebar, + getTargetFolderPath, + refreshTree +}: UseNotesFileUploadProps) => { + const { t } = useTranslation() + + /** + * Handle drag-and-drop file uploads (VS Code-inspired approach) + * Uses FileSystemEntry.fullPath to preserve the complete directory structure + * This ensures dragging ~/Users/me/tmp/xxx creates target/tmp/xxx + */ const handleDropFiles = useCallback( async (e: React.DragEvent) => { e.preventDefault() setIsDragOverSidebar(false) - // 处理文件夹拖拽:从 dataTransfer.items 获取完整文件路径信息 const items = Array.from(e.dataTransfer.items) - const files: File[] = [] + if (items.length === 0) return - const processEntry = async (entry: FileSystemEntry, path: string = '') => { + // Collect all entries with their fullPath preserved + const entryDataList: Array<{ + fullPath: string + isFile: boolean + isDirectory: boolean + systemPath: string + }> = [] + + const processEntry = async (entry: FileSystemEntry): Promise => { if (entry.isFile) { const fileEntry = entry as FileSystemFileEntry return new Promise((resolve) => { - fileEntry.file((file) => { - // 手动设置 webkitRelativePath 以保持文件夹结构 - Object.defineProperty(file, 'webkitRelativePath', { - value: path + file.name, - writable: false - }) - files.push(file) + fileEntry.file(async (file) => { + // Get real system path using Electron's webUtils + const systemPath = window.api.file.getPathForFile(file) + if (systemPath) { + entryDataList.push({ + fullPath: entry.fullPath, // e.g., "/tmp/xxx/subfolder/file.md" + isFile: true, + isDirectory: false, + systemPath + }) + } resolve() }) }) } else if (entry.isDirectory) { const dirEntry = entry as FileSystemDirectoryEntry const reader = dirEntry.createReader() - return new Promise((resolve) => { - reader.readEntries(async (entries) => { - const promises = entries.map((subEntry) => processEntry(subEntry, path + entry.name + '/')) - await Promise.all(promises) - resolve() - }) + + // Add directory entry + entryDataList.push({ + fullPath: entry.fullPath, + isFile: false, + isDirectory: true, + systemPath: '' // Directories don't have systemPath (will be created) + }) + + // IMPORTANT: readEntries() has a browser limit of ~100 entries per call + // We need to call it repeatedly until it returns an empty array + return new Promise((resolve, reject) => { + const readAllEntries = () => { + reader.readEntries( + async (entries) => { + if (entries.length === 0) { + // No more entries, we're done + resolve() + return + } + + try { + // Process current batch + const promises = entries.map((subEntry) => processEntry(subEntry)) + await Promise.all(promises) + + // Read next batch + readAllEntries() + } catch (error) { + reject(error) + } + }, + (error) => { + reject(error) + } + ) + } + + readAllEntries() }) } } - // 如果支持 DataTransferItem API(文件夹拖拽) - if (items.length > 0 && items[0].webkitGetAsEntry()) { + if (items[0]?.webkitGetAsEntry()) { const promises = items.map((item) => { const entry = item.webkitGetAsEntry() return entry ? processEntry(entry) : Promise.resolve() @@ -51,10 +112,12 @@ export const useNotesFileUpload = ({ onUploadFiles, setIsDragOverSidebar }: UseN await Promise.all(promises) - if (files.length > 0) { - onUploadFiles(files) + if (entryDataList.length > 0) { + // Pass entry data list to parent for recursive upload + onUploadFiles(entryDataList as any) } } else { + // Fallback for browsers without FileSystemEntry API const regularFiles = Array.from(e.dataTransfer.files) if (regularFiles.length > 0) { onUploadFiles(regularFiles) @@ -64,45 +127,118 @@ export const useNotesFileUpload = ({ onUploadFiles, setIsDragOverSidebar }: UseN [onUploadFiles, setIsDragOverSidebar] ) - const handleSelectFiles = useCallback(() => { - const fileInput = document.createElement('input') - fileInput.type = 'file' - fileInput.multiple = true - fileInput.accept = '.md,.markdown' - fileInput.webkitdirectory = false - - fileInput.onchange = (e) => { - const target = e.target as HTMLInputElement - if (target.files && target.files.length > 0) { - const selectedFiles = Array.from(target.files) - onUploadFiles(selectedFiles) + /** + * Handle file selection via native Electron dialog + * Uses dialog.showOpenDialog in Main process for better UX and cross-platform consistency + * Direct upload using file paths - no unnecessary File object conversion + */ + const handleSelectFiles = useCallback(async () => { + try { + // Get target folder path from parent context + const targetFolderPath = getTargetFolderPath?.() || '' + if (!targetFolderPath) { + throw new Error('No target folder path available') } - fileInput.remove() - } - fileInput.click() - }, [onUploadFiles]) + // Use Electron native dialog for better UX + const files = await window.api.file.select({ + title: t('notes.select_files_to_upload'), + properties: ['openFile', 'multiSelections'], + filters: [ + { name: 'Markdown', extensions: ['md', 'markdown'] }, + { name: 'Images', extensions: ['jpg', 'jpeg', 'png', 'gif', 'webp', 'bmp', 'svg'] }, + { name: 'All Files', extensions: ['*'] } + ] + }) - const handleSelectFolder = useCallback(() => { - const folderInput = document.createElement('input') - folderInput.type = 'file' - // @ts-ignore - webkitdirectory is a non-standard attribute - folderInput.webkitdirectory = true - // @ts-ignore - directory is a non-standard attribute - folderInput.directory = true - folderInput.multiple = true + if (files && files.length > 0) { + // Extract file paths directly from FileMetadata + const filePaths = files.map((fileMetadata) => fileMetadata.path) - folderInput.onchange = (e) => { - const target = e.target as HTMLInputElement - if (target.files && target.files.length > 0) { - const selectedFiles = Array.from(target.files) - onUploadFiles(selectedFiles) + // Pause file watcher to prevent multiple refresh events + await window.api.file.pauseFileWatcher() + + try { + // Use batchUpload with file paths (Main process handles everything) + const result = await window.api.file.batchUpload(filePaths, targetFolderPath, { + allowedExtensions: ['.md', '.markdown', '.jpg', '.jpeg', '.png', '.gif', '.webp', '.bmp', '.svg'] + }) + + logger.info('File selection upload completed:', result) + + // Show success message + if (result.fileCount > 0) { + window.toast.success(t('notes.upload_success')) + + // Trigger tree refresh if callback provided + if (refreshTree) { + await refreshTree() + } + } else { + window.toast.warning(t('notes.no_valid_files')) + } + } finally { + // Resume watcher and trigger single refresh + await window.api.file.resumeFileWatcher() + } } - folderInput.remove() + } catch (error) { + logger.error('Failed to select files:', error as Error) + window.toast.error(t('notes.failed_to_select_files')) } + }, [t, getTargetFolderPath, refreshTree]) - folderInput.click() - }, [onUploadFiles]) + /** + * Handle folder selection via native Electron dialog + * Recursively lists all markdown files in the selected folder using Main process + * This provides better performance and avoids non-standard webkitdirectory API + * + * Important: We need to preserve the folder name itself (VS Code behavior) + * Example: Selecting /User/tmp should create targetPath/tmp/... + */ + const handleSelectFolder = useCallback(async () => { + try { + // Use Electron native dialog for folder selection + const folderPath = await window.api.file.selectFolder({ + title: t('notes.select_folder_to_upload'), + buttonLabel: t('notes.upload') + }) + + if (!folderPath) { + return // User cancelled + } + + logger.info('Selected folder for upload:', { folderPath }) + + // Get target folder path from parent context + const targetFolderPath = getTargetFolderPath?.() || '' + if (!targetFolderPath) { + throw new Error('No target folder path available') + } + + // Use new uploadFolder API that handles everything in Main process + const result = await window.api.file.uploadFolder(folderPath, targetFolderPath, { + allowedExtensions: ['.md', '.markdown', '.jpg', '.jpeg', '.png', '.gif', '.webp', '.bmp', '.svg'] + }) + + logger.info('Folder upload completed:', result) + + // Show success message + if (result.fileCount > 0) { + window.toast.success(t('notes.upload_success')) + + // Trigger tree refresh if callback provided + if (refreshTree) { + await refreshTree() + } + } else { + window.toast.warning(t('notes.no_markdown_files_in_folder')) + } + } catch (error) { + logger.error('Failed to select folder:', error as Error) + window.toast.error(t('notes.failed_to_select_folder')) + } + }, [t, getTargetFolderPath, refreshTree]) return { handleDropFiles, diff --git a/src/renderer/src/services/NotesService.ts b/src/renderer/src/services/NotesService.ts index 4b71941fe8..3a34d46723 100644 --- a/src/renderer/src/services/NotesService.ts +++ b/src/renderer/src/services/NotesService.ts @@ -5,6 +5,7 @@ import { getFileDirectory } from '@renderer/utils' const logger = loggerService.withContext('NotesService') const MARKDOWN_EXT = '.md' +const IMAGE_EXTS = ['.png', '.jpg', '.jpeg', '.gif', '.webp', '.svg', '.bmp'] export interface UploadResult { uploadedNodes: NotesTreeNode[] @@ -82,9 +83,11 @@ export async function renameNode(node: NotesTreeNode, newName: string): Promise< return { path: `${parentDir}/${safeName}`, name: safeName } } -export async function uploadNotes(files: File[], targetPath: string): Promise { +export async function uploadNotes( + files: File[] | Array<{ fullPath: string; isFile: boolean; isDirectory: boolean; systemPath: string }>, + targetPath: string +): Promise { const basePath = normalizePath(targetPath) - const totalFiles = files.length if (files.length === 0) { return { @@ -96,35 +99,93 @@ export async function uploadNotes(files: File[], targetPath: string): Promise + return uploadNotesRecursive(entries, targetPath) + } + + // Legacy approach: File objects (for browser File API compatibility) + const fileList = files as File[] + const totalFiles = fileList.length + try { - // Get file paths from File objects - // For browser File objects from drag-and-drop, we need to use FileReader to save temporarily - // However, for directory uploads, the files already have paths const filePaths: string[] = [] - for (const file of files) { - // @ts-ignore - webkitRelativePath exists on File objects from directory uploads - if (file.path) { - // @ts-ignore - Electron File objects have .path property - filePaths.push(file.path) + for (const file of fileList) { + const filePath = window.api.file.getPathForFile(file) + + if (filePath) { + filePaths.push(filePath) } else { - // For browser File API, we'd need to use FileReader and create temp files - // For now, fall back to the old method for these cases - logger.warn('File without path detected, using fallback method') - return uploadNotesLegacy(files, targetPath) + logger.warn('Failed to get system path for uploaded file:', { fileName: file.name }) + window.toast.warning(`Failed to get system path for file: ${file.name}`) + } + } + + if (filePaths.length === 0) { + return { + uploadedNodes: [], + totalFiles, + skippedFiles: totalFiles, + fileCount: 0, + folderCount: 0 } } // Pause file watcher to prevent N refresh events await window.api.file.pauseFileWatcher() + // Use simplified batchUpload for File objects + const result = await window.api.file.batchUpload(filePaths, basePath, { + allowedExtensions: [MARKDOWN_EXT, ...IMAGE_EXTS] + }) + + return { + uploadedNodes: [], + totalFiles, + skippedFiles: result.skippedFiles, + fileCount: result.fileCount, + folderCount: result.folderCount + } + } catch (error) { + logger.error('Legacy file upload failed:', error as Error) + return { + uploadedNodes: [], + totalFiles, + skippedFiles: totalFiles, + fileCount: 0, + folderCount: 0 + } + } +} + +/** + * Recursive upload for drag-and-drop with fullPath preserved (VS Code approach) + * Uses batch processing for better performance + */ +async function uploadNotesRecursive( + entryDataList: Array<{ fullPath: string; isFile: boolean; isDirectory: boolean; systemPath: string }>, + targetPath: string +): Promise { + const basePath = normalizePath(targetPath) + + try { + // Pause file watcher to prevent N refresh events + await window.api.file.pauseFileWatcher() + try { - // Use the new optimized batch upload API that runs in Main process - const result = await window.api.file.batchUploadMarkdown(filePaths, basePath) + // Use batch upload API for better performance (parallel processing in Main process) + const result = await window.api.file.batchUploadEntries(entryDataList, basePath, { + allowedExtensions: [MARKDOWN_EXT, ...IMAGE_EXTS] + }) return { uploadedNodes: [], - totalFiles, + totalFiles: result.fileCount + result.skippedFiles, skippedFiles: result.skippedFiles, fileCount: result.fileCount, folderCount: result.folderCount @@ -134,75 +195,8 @@ export async function uploadNotes(files: File[], targetPath: string): Promise { - const basePath = normalizePath(targetPath) - const markdownFiles = filterMarkdown(files) - const skippedFiles = files.length - markdownFiles.length - - if (markdownFiles.length === 0) { - return { - uploadedNodes: [], - totalFiles: files.length, - skippedFiles, - fileCount: 0, - folderCount: 0 - } - } - - const folders = collectFolders(markdownFiles, basePath) - await createFolders(folders) - - let fileCount = 0 - const BATCH_SIZE = 5 // Process 5 files concurrently to balance performance and responsiveness - - // Process files in batches to avoid blocking the UI thread - for (let i = 0; i < markdownFiles.length; i += BATCH_SIZE) { - const batch = markdownFiles.slice(i, i + BATCH_SIZE) - - // Process current batch in parallel - const results = await Promise.allSettled( - batch.map(async (file) => { - const { dir, name } = resolveFileTarget(file, basePath) - const { safeName } = await window.api.file.checkFileName(dir, name, true) - const finalPath = `${dir}/${safeName}${MARKDOWN_EXT}` - - const content = await file.text() - await window.api.file.write(finalPath, content) - return true - }) - ) - - // Count successful uploads - results.forEach((result) => { - if (result.status === 'fulfilled') { - fileCount += 1 - } else { - logger.error('Failed to write uploaded file:', result.reason) - } - }) - - // Yield to the event loop between batches to keep UI responsive - if (i + BATCH_SIZE < markdownFiles.length) { - await new Promise((resolve) => setTimeout(resolve, 0)) - } - } - - return { - uploadedNodes: [], - totalFiles: files.length, - skippedFiles, - fileCount, - folderCount: folders.size + logger.error('Recursive upload failed:', error as Error) + throw error } } @@ -233,57 +227,15 @@ function normalizePath(value: string): string { return value.replace(/\\/g, '/') } -function filterMarkdown(files: File[]): File[] { - return files.filter((file) => file.name.toLowerCase().endsWith(MARKDOWN_EXT)) -} - -function collectFolders(files: File[], basePath: string): Set { - const folders = new Set() - - files.forEach((file) => { - const relativePath = file.webkitRelativePath || '' - if (!relativePath.includes('/')) { - return +export const findNode = (nodes: NotesTreeNode[], nodeId: string): NotesTreeNode | null => { + for (const node of nodes) { + if (node.id === nodeId) { + return node } - - const parts = relativePath.split('/') - parts.pop() - - let current = basePath - for (const part of parts) { - current = `${current}/${part}` - folders.add(current) - } - }) - - return folders -} - -async function createFolders(folders: Set): Promise { - const ordered = Array.from(folders).sort((a, b) => a.length - b.length) - - for (const folder of ordered) { - try { - await window.api.file.mkdir(folder) - } catch (error) { - logger.debug('Skip existing folder while uploading notes', { - folder, - error: (error as Error).message - }) + if (node.children) { + const found = findNode(node.children, nodeId) + if (found) return found } } -} - -function resolveFileTarget(file: File, basePath: string): { dir: string; name: string } { - if (!file.webkitRelativePath || !file.webkitRelativePath.includes('/')) { - const nameWithoutExt = file.name.endsWith(MARKDOWN_EXT) ? file.name.slice(0, -MARKDOWN_EXT.length) : file.name - return { dir: basePath, name: nameWithoutExt } - } - - const parts = file.webkitRelativePath.split('/') - const fileName = parts.pop() || file.name - const dirPath = `${basePath}/${parts.join('/')}` - const nameWithoutExt = fileName.endsWith(MARKDOWN_EXT) ? fileName.slice(0, -MARKDOWN_EXT.length) : fileName - - return { dir: dirPath, name: nameWithoutExt } + return null } diff --git a/src/renderer/src/utils/markdownConverter.ts b/src/renderer/src/utils/markdownConverter.ts index ac55d16ed4..b8783b6838 100644 --- a/src/renderer/src/utils/markdownConverter.ts +++ b/src/renderer/src/utils/markdownConverter.ts @@ -843,7 +843,6 @@ export const htmlToMarkdown = (html: string | null | undefined): string => { /** * Converts Markdown content to HTML * @param markdown - Markdown string to convert - * @param options - Task list options * @returns HTML string */ export const markdownToHtml = (markdown: string | null | undefined): string => { @@ -951,3 +950,7 @@ export const isMarkdownContent = (content: string): boolean => { return markdownPatterns.some((pattern) => pattern.test(content)) } + +/** + +*/ diff --git a/tests/main.setup.ts b/tests/main.setup.ts index 5cadb89d02..524a239884 100644 --- a/tests/main.setup.ts +++ b/tests/main.setup.ts @@ -115,25 +115,30 @@ vi.mock('node:path', async () => { } }) -vi.mock('node:fs', () => ({ - promises: { - access: vi.fn(), - readFile: vi.fn(), - writeFile: vi.fn(), - mkdir: vi.fn(), - readdir: vi.fn(), - stat: vi.fn(), - unlink: vi.fn(), - rmdir: vi.fn() - }, - existsSync: vi.fn(), - readFileSync: vi.fn(), - writeFileSync: vi.fn(), - mkdirSync: vi.fn(), - readdirSync: vi.fn(), - statSync: vi.fn(), - unlinkSync: vi.fn(), - rmdirSync: vi.fn(), - createReadStream: vi.fn(), - createWriteStream: vi.fn() -})) +vi.mock('node:fs', async () => { + const actual = await vi.importActual('node:fs') + return { + ...actual, + default: actual, + promises: { + access: vi.fn(), + readFile: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), + readdir: vi.fn(), + stat: vi.fn(), + unlink: vi.fn(), + rmdir: vi.fn() + }, + existsSync: vi.fn(), + readFileSync: vi.fn(), + writeFileSync: vi.fn(), + mkdirSync: vi.fn(), + readdirSync: vi.fn(), + statSync: vi.fn(), + unlinkSync: vi.fn(), + rmdirSync: vi.fn(), + createReadStream: vi.fn(), + createWriteStream: vi.fn() + } +})