From f58378daa06c60c16a8364d824208a14f14dfd55 Mon Sep 17 00:00:00 2001 From: Jason Young <44939412+farion1231@users.noreply.github.com> Date: Tue, 1 Jul 2025 23:37:44 +0800 Subject: [PATCH 01/21] test: add comprehensive tests for CopyButton component (#7719) * test: add comprehensive tests for CopyButton component - Add tests for basic rendering and functionality - Add clipboard API mocking and error handling - Add tests for custom props (size, tooltip, label) - Add edge case testing (empty text, special characters) - Improve component test coverage Signed-off-by: Jason Young * fix: resolve linting issues in CopyButton tests - Sort imports alphabetically - Remove trailing whitespace - Add final newline Signed-off-by: Jason Young * refactor: consolidate similar test cases in CopyButton tests - Merge 'should render copy icon' and 'should render with basic structure' - Merge 'should apply custom size to icon' and 'should apply custom size to label' - Reduce test duplication while maintaining full coverage - Address maintainer feedback for better test organization Signed-off-by: Jason Young --------- Signed-off-by: Jason Young --- .../components/__tests__/CopyButton.test.tsx | 164 ++++++++++++++++++ 1 file changed, 164 insertions(+) create mode 100644 src/renderer/src/components/__tests__/CopyButton.test.tsx diff --git a/src/renderer/src/components/__tests__/CopyButton.test.tsx b/src/renderer/src/components/__tests__/CopyButton.test.tsx new file mode 100644 index 0000000000..dabe863d32 --- /dev/null +++ b/src/renderer/src/components/__tests__/CopyButton.test.tsx @@ -0,0 +1,164 @@ +import { render, screen } from '@testing-library/react' +import userEvent from '@testing-library/user-event' +import { beforeEach, describe, expect, it, vi } from 'vitest' + +import CopyButton from '../CopyButton' + +// Mock navigator.clipboard +const mockWriteText = vi.fn() +const mockClipboard = { + writeText: mockWriteText +} + +// Mock window.message +const mockMessage = { + success: vi.fn(), + error: vi.fn() +} + +// Mock useTranslation +vi.mock('react-i18next', () => ({ + useTranslation: () => ({ + t: (key: string) => { + const translations: Record = { + 'message.copy.success': '复制成功', + 'message.copy.failed': '复制失败' + } + return translations[key] || key + } + }) +})) + +describe('CopyButton', () => { + beforeEach(() => { + // Setup mocks + Object.assign(navigator, { clipboard: mockClipboard }) + Object.assign(window, { message: mockMessage }) + + // Clear all mocks + vi.clearAllMocks() + }) + + it('should render with basic structure and copy icon', () => { + render() + + // Should have basic clickable container + const container = document.querySelector('div') + expect(container).toBeInTheDocument() + + // Should render copy icon + const copyIcon = document.querySelector('.copy-icon') + expect(copyIcon).toBeInTheDocument() + }) + + it('should render label when provided', () => { + const labelText = 'Copy to clipboard' + render() + + expect(screen.getByText(labelText)).toBeInTheDocument() + }) + + it('should render tooltip when provided', async () => { + const tooltipText = 'Click to copy' + render() + + // Check that the component structure includes tooltip + const container = document.querySelector('div') + expect(container).toBeInTheDocument() + + // The tooltip should be rendered when hovered + const copyIcon = document.querySelector('.copy-icon') + expect(copyIcon).toBeInTheDocument() + }) + + it('should not render tooltip when not provided', () => { + render() + + // Should not have tooltip wrapper + expect(document.querySelector('.ant-tooltip')).not.toBeInTheDocument() + }) + + it('should copy text to clipboard on click', async () => { + const textToCopy = 'Hello World' + mockWriteText.mockResolvedValue(undefined) + + render() + + // Find the clickable element by using the copy icon as reference + const copyIcon = document.querySelector('.copy-icon') + const clickableElement = copyIcon?.parentElement + expect(clickableElement).toBeInTheDocument() + + await userEvent.click(clickableElement!) + + expect(mockWriteText).toHaveBeenCalledWith(textToCopy) + }) + + it('should show success message when copy succeeds', async () => { + mockWriteText.mockResolvedValue(undefined) + + render() + + const copyIcon = document.querySelector('.copy-icon') + const clickableElement = copyIcon?.parentElement + await userEvent.click(clickableElement!) + + expect(mockMessage.success).toHaveBeenCalledWith('复制成功') + expect(mockMessage.error).not.toHaveBeenCalled() + }) + + it('should show error message when copy fails', async () => { + mockWriteText.mockRejectedValue(new Error('Clipboard access denied')) + + render() + + const copyIcon = document.querySelector('.copy-icon') + const clickableElement = copyIcon?.parentElement + await userEvent.click(clickableElement!) + + expect(mockMessage.error).toHaveBeenCalledWith('复制失败') + expect(mockMessage.success).not.toHaveBeenCalled() + }) + + it('should apply custom size to icon and label', () => { + const customSize = 20 + const labelText = 'Copy' + + render() + + // Should apply custom size to icon + const copyIcon = document.querySelector('.copy-icon') + expect(copyIcon).toHaveAttribute('width', customSize.toString()) + expect(copyIcon).toHaveAttribute('height', customSize.toString()) + + // Should apply custom size to label + const label = screen.getByText(labelText) + expect(label).toHaveStyle({ fontSize: `${customSize}px` }) + }) + + it('should handle empty text', async () => { + const emptyText = '' + mockWriteText.mockResolvedValue(undefined) + + render() + + const copyIcon = document.querySelector('.copy-icon') + const clickableElement = copyIcon?.parentElement + await userEvent.click(clickableElement!) + + expect(mockWriteText).toHaveBeenCalledWith(emptyText) + }) + + it('should handle special characters', async () => { + const specialText = '特殊字符 🎉 @#$%^&*()' + mockWriteText.mockResolvedValue(undefined) + + render() + + const copyIcon = document.querySelector('.copy-icon') + const clickableElement = copyIcon?.parentElement + await userEvent.click(clickableElement!) + + expect(mockWriteText).toHaveBeenCalledWith(specialText) + }) +}) From 83f36f5e77322ef34c55ae82f3624c3e0faf06d7 Mon Sep 17 00:00:00 2001 From: SuYao Date: Wed, 2 Jul 2025 03:03:03 +0800 Subject: [PATCH 02/21] refactor(WebSearchMiddleware, linkConverter): enhance link processing and buffering logic (#7724) - Updated WebSearchMiddleware to utilize the new smartLinkConverter structure, allowing for better handling of buffered content and fallback logic. - Introduced flushLinkConverterBuffer function to clear remaining buffered content at stream end. - Modified convertLinks and smartLinkConverter functions to return structured results indicating whether content was buffered. - Enhanced unit tests to cover new functionality and edge cases for link conversion and buffering behavior. --- .../middleware/core/WebSearchMiddleware.ts | 38 +++- .../src/utils/__tests__/linkConverter.test.ts | 184 +++++++++++++++++- src/renderer/src/utils/linkConverter.ts | 53 ++++- 3 files changed, 256 insertions(+), 19 deletions(-) diff --git a/src/renderer/src/aiCore/middleware/core/WebSearchMiddleware.ts b/src/renderer/src/aiCore/middleware/core/WebSearchMiddleware.ts index 97261e3d52..70915abffa 100644 --- a/src/renderer/src/aiCore/middleware/core/WebSearchMiddleware.ts +++ b/src/renderer/src/aiCore/middleware/core/WebSearchMiddleware.ts @@ -1,5 +1,5 @@ import { ChunkType } from '@renderer/types/chunk' -import { smartLinkConverter } from '@renderer/utils/linkConverter' +import { flushLinkConverterBuffer, smartLinkConverter } from '@renderer/utils/linkConverter' import { CompletionsParams, CompletionsResult, GenericChunk } from '../schemas' import { CompletionsContext, CompletionsMiddleware } from '../types' @@ -42,20 +42,46 @@ export const WebSearchMiddleware: CompletionsMiddleware = const providerType = model.provider || 'openai' // 使用当前可用的Web搜索结果进行链接转换 const text = chunk.text - const processedText = smartLinkConverter(text, providerType, isFirstChunk) + const result = smartLinkConverter(text, providerType, isFirstChunk) if (isFirstChunk) { isFirstChunk = false } - controller.enqueue({ - ...chunk, - text: processedText - }) + + // - 如果有内容被缓冲,说明convertLinks正在等待后续chunk,不使用原文本避免重复 + // - 如果没有内容被缓冲且结果为空,可能是其他处理问题,使用原文本作为安全回退 + let finalText: string + if (result.hasBufferedContent) { + // 有内容被缓冲,使用处理后的结果(可能为空,等待后续chunk) + finalText = result.text + } else { + // 没有内容被缓冲,可以安全使用回退逻辑 + finalText = result.text || text + } + + // 只有当finalText不为空时才发送chunk + if (finalText) { + controller.enqueue({ + ...chunk, + text: finalText + }) + } } else if (chunk.type === ChunkType.LLM_WEB_SEARCH_COMPLETE) { // 暂存Web搜索结果用于链接完善 ctx._internal.webSearchState!.results = chunk.llm_web_search // 将Web搜索完成事件继续传递下去 controller.enqueue(chunk) + } else if (chunk.type === ChunkType.LLM_RESPONSE_COMPLETE) { + // 流结束时,清空链接转换器的buffer并处理剩余内容 + const remainingText = flushLinkConverterBuffer() + if (remainingText) { + controller.enqueue({ + type: ChunkType.TEXT_DELTA, + text: remainingText + }) + } + // 继续传递LLM_RESPONSE_COMPLETE事件 + controller.enqueue(chunk) } else { controller.enqueue(chunk) } diff --git a/src/renderer/src/utils/__tests__/linkConverter.test.ts b/src/renderer/src/utils/__tests__/linkConverter.test.ts index eaecc3ca1f..eabca8e284 100644 --- a/src/renderer/src/utils/__tests__/linkConverter.test.ts +++ b/src/renderer/src/utils/__tests__/linkConverter.test.ts @@ -7,7 +7,8 @@ import { convertLinksToHunyuan, convertLinksToOpenRouter, convertLinksToZhipu, - extractUrlsFromMarkdown + extractUrlsFromMarkdown, + flushLinkConverterBuffer } from '../linkConverter' describe('linkConverter', () => { @@ -90,22 +91,197 @@ describe('linkConverter', () => { it('should convert links with domain-like text to numbered links', () => { const input = '查看这个网站 [example.com](https://example.com)' const result = convertLinks(input, true) - expect(result).toBe('查看这个网站 [1](https://example.com)') + expect(result.text).toBe('查看这个网站 [1](https://example.com)') + expect(result.hasBufferedContent).toBe(false) }) it('should handle parenthesized link format ([host](url))', () => { const input = '这里有链接 ([example.com](https://example.com))' const result = convertLinks(input, true) - expect(result).toBe('这里有链接 [1](https://example.com)') + expect(result.text).toBe('这里有链接 [1](https://example.com)') + expect(result.hasBufferedContent).toBe(false) }) it('should use the same counter for duplicate URLs', () => { const input = '第一个链接 [example.com](https://example.com) 和第二个相同链接 [subdomain.example.com](https://example.com)' const result = convertLinks(input, true) - expect(result).toBe( + expect(result.text).toBe( '第一个链接 [1](https://example.com) 和第二个相同链接 [1](https://example.com)' ) + expect(result.hasBufferedContent).toBe(false) + }) + + it('should not misinterpret code placeholders as incomplete links', () => { + const input = + 'The most common reason for a `404` error is that the repository specified in the `owner` and `repo`' + const result = convertLinks(input, true) + expect(result.text).toBe( + 'The most common reason for a `404` error is that the repository specified in the `owner` and `repo`' + ) + expect(result.hasBufferedContent).toBe(false) + }) + + it('should handle text with square brackets that are not links', () => { + const input = 'Use [owner] and [repo] placeholders in your configuration [file]' + const result = convertLinks(input, true) + expect(result.text).toBe('Use [owner] and [repo] placeholders in your configuration [file]') + expect(result.hasBufferedContent).toBe(false) + }) + + it('should handle markdown code blocks with square brackets', () => { + const input = 'In the code: `const config = { [key]: value }` you can see [brackets]' + const result = convertLinks(input, true) + expect(result.text).toBe('In the code: `const config = { [key]: value }` you can see [brackets]') + expect(result.hasBufferedContent).toBe(false) + }) + + it('should properly handle partial markdown link patterns', () => { + // 这种情况下,[text] 后面没有紧跟 (,所以不应该被当作潜在链接 + const input = 'Check the [documentation] for more details' + const result = convertLinks(input, true) + expect(result.text).toBe('Check the [documentation] for more details') + expect(result.hasBufferedContent).toBe(false) + }) + + it('should correctly identify and handle real incomplete links', () => { + // 第一个块包含真正的不完整链接模式 + const chunk1 = 'Visit [example.com](' + const result1 = convertLinks(chunk1, true) + expect(result1.text).toBe('Visit ') + expect(result1.hasBufferedContent).toBe(true) + + // 第二个块完成该链接 + const chunk2 = 'https://example.com) for more info' + const result2 = convertLinks(chunk2, false) + expect(result2.text).toBe('[1](https://example.com) for more info') + expect(result2.hasBufferedContent).toBe(false) + }) + + it('should handle mixed content with real links and placeholders', () => { + const input = 'Configure [owner] and [repo] in [GitHub](https://github.com) settings' + const result = convertLinks(input, true) + expect(result.text).toBe('Configure [owner] and [repo] in GitHub [1](https://github.com) settings') + expect(result.hasBufferedContent).toBe(false) + }) + + it('should handle empty text', () => { + const input = '' + const result = convertLinks(input, true) + expect(result.text).toBe('') + expect(result.hasBufferedContent).toBe(false) + }) + + it('should handle text with only square brackets', () => { + const input = '[][][]' + const result = convertLinks(input, true) + expect(result.text).toBe('[][][]') + expect(result.hasBufferedContent).toBe(false) + }) + + describe('streaming small chunks simulation', () => { + it('should handle non-link placeholders in small chunks without buffering', () => { + // 模拟用户遇到的问题:包含方括号占位符的文本被分成小chunks + const chunks = [ + 'The most common reason for a `404` error is that the repository specified in the `', + 'owner` and `', + 'repo` parameters are incorrect.' + ] + + let accumulatedText = '' + + // 第一个chunk + const result1 = convertLinks(chunks[0], true) + expect(result1.text).toBe(chunks[0]) // 应该立即返回,不缓冲 + expect(result1.hasBufferedContent).toBe(false) + accumulatedText += result1.text + + // 第二个chunk + const result2 = convertLinks(chunks[1], false) + expect(result2.text).toBe(chunks[1]) // 应该立即返回,不缓冲 + expect(result2.hasBufferedContent).toBe(false) + accumulatedText += result2.text + + // 第三个chunk + const result3 = convertLinks(chunks[2], false) + expect(result3.text).toBe(chunks[2]) // 应该立即返回,不缓冲 + expect(result3.hasBufferedContent).toBe(false) + accumulatedText += result3.text + + // 验证最终结果 + expect(accumulatedText).toBe(chunks.join('')) + expect(accumulatedText).toBe( + 'The most common reason for a `404` error is that the repository specified in the `owner` and `repo` parameters are incorrect.' + ) + }) + + it('should handle real links split across small chunks with proper buffering', () => { + // 模拟真实链接被分割成小chunks的情况 - 更现实的分割方式 + const chunks = [ + 'Please visit [example.com](', // 不完整链接 + 'https://example.com) for details' // 完成链接 + ] + + let accumulatedText = '' + + // 第一个chunk:包含不完整链接 [text]( + const result1 = convertLinks(chunks[0], true) + expect(result1.text).toBe('Please visit ') // 只返回安全部分 + expect(result1.hasBufferedContent).toBe(true) // [example.com]( 被缓冲 + accumulatedText += result1.text + + // 第二个chunk:完成链接 + const result2 = convertLinks(chunks[1], false) + expect(result2.text).toBe('[1](https://example.com) for details') // 完整链接 + 剩余文本 + expect(result2.hasBufferedContent).toBe(false) + accumulatedText += result2.text + + // 验证最终结果 + expect(accumulatedText).toBe('Please visit [1](https://example.com) for details') + }) + + it('should handle mixed content with placeholders and real links in small chunks', () => { + // 混合内容:既有占位符又有真实链接 - 更现实的分割方式 + const chunks = [ + 'Configure [owner] and [repo] in [GitHub](', // 占位符 + 不完整链接 + 'https://github.com) settings page.' // 完成链接 + ] + + let accumulatedText = '' + + // 第一个chunk:包含占位符和不完整链接 + const result1 = convertLinks(chunks[0], true) + expect(result1.text).toBe('Configure [owner] and [repo] in ') // 占位符保留,链接部分被缓冲 + expect(result1.hasBufferedContent).toBe(true) // [GitHub]( 被缓冲 + accumulatedText += result1.text + + // 第二个chunk:完成链接 + const result2 = convertLinks(chunks[1], false) + expect(result2.text).toBe('GitHub [1](https://github.com) settings page.') // 完整链接 + 剩余文本 + expect(result2.hasBufferedContent).toBe(false) + accumulatedText += result2.text + + // 验证最终结果 + expect(accumulatedText).toBe( + 'Configure [owner] and [repo] in GitHub [1](https://github.com) settings page.' + ) + expect(accumulatedText).toContain('[owner] and [repo]') // 占位符保持原样 + expect(accumulatedText).toContain('[1](https://github.com)') // 链接被转换 + }) + + it('should properly handle buffer flush at stream end', () => { + // 测试流结束时的buffer清理 + const incompleteChunk = 'Check the documentation at [GitHub](' + const result = convertLinks(incompleteChunk, true) + + // 应该有内容被缓冲 + expect(result.hasBufferedContent).toBe(true) + expect(result.text).toBe('Check the documentation at ') // 只返回安全部分 + + // 模拟流结束,强制清空buffer + const remainingText = flushLinkConverterBuffer() + expect(remainingText).toBe('[GitHub](') // buffer中的剩余内容 + }) }) }) diff --git a/src/renderer/src/utils/linkConverter.ts b/src/renderer/src/utils/linkConverter.ts index 238c88b10e..652c2f4283 100644 --- a/src/renderer/src/utils/linkConverter.ts +++ b/src/renderer/src/utils/linkConverter.ts @@ -126,9 +126,12 @@ export function convertLinksToHunyuan(text: string, webSearch: any[], resetCount * * @param {string} text The current chunk of text to process * @param {boolean} resetCounter Whether to reset the counter and buffer - * @returns {string} Processed text with complete links converted + * @returns {{text: string, hasBufferedContent: boolean}} Processed text and whether content was buffered */ -export function convertLinks(text: string, resetCounter: boolean = false): string { +export function convertLinks( + text: string, + resetCounter: boolean = false +): { text: string; hasBufferedContent: boolean } { if (resetCounter) { linkCounter = 1 buffer = '' @@ -158,12 +161,22 @@ export function convertLinks(text: string, resetCounter: boolean = false): strin } else if (buffer[i] === '[') { // Check if this could be the start of a regular link const substring = buffer.substring(i) - const match = /^\[([^\]]+)\]\(([^)]+)\)/.exec(substring) - if (!match) { + // 检查是否是真正的不完整链接:[text]( 但没有完整的 url) + const incompleteLink = /^\[([^\]]+)\]\s*\([^)]*$/.test(substring) + if (incompleteLink) { safePoint = i break } + + // 检查是否是完整的链接但需要验证 + const completeLink = /^\[([^\]]+)\]\(([^)]+)\)/.test(substring) + if (completeLink) { + // 如果是完整链接,继续处理,不设置safePoint + continue + } + + // 如果不是潜在的链接格式,继续检查 } } @@ -171,6 +184,9 @@ export function convertLinks(text: string, resetCounter: boolean = false): strin const safeBuffer = buffer.substring(0, safePoint) buffer = buffer.substring(safePoint) + // 检查是否有内容被保留在buffer中 + const hasBufferedContent = buffer.length > 0 + // Process the safe buffer to handle complete links let result = '' let position = 0 @@ -237,7 +253,10 @@ export function convertLinks(text: string, resetCounter: boolean = false): strin position++ } - return result + return { + text: result, + hasBufferedContent + } } /** @@ -439,13 +458,13 @@ export function extractWebSearchReferences(text: string): Array<{ * @param {any[]} webSearchResults Web搜索结果数组 * @param {string} providerType Provider类型 ('openai', 'zhipu', 'hunyuan', 'openrouter', etc.) * @param {boolean} resetCounter 是否重置计数器 - * @returns {string} 转换后的文本 + * @returns {{text: string, hasBufferedContent: boolean}} 转换后的文本和是否有内容被缓冲 */ export function smartLinkConverter( text: string, providerType: string = 'openai', resetCounter: boolean = false -): string { +): { text: string; hasBufferedContent: boolean } { // 检测文本中的引用模式 const references = extractWebSearchReferences(text) @@ -458,10 +477,26 @@ export function smartLinkConverter( const hasZhipuPattern = references.some((ref) => ref.placeholder.includes('ref_')) if (hasZhipuPattern) { - return convertLinksToZhipu(text, resetCounter) + return { + text: convertLinksToZhipu(text, resetCounter), + hasBufferedContent: false + } } else if (providerType === 'openrouter') { - return convertLinksToOpenRouter(text, resetCounter) + return { + text: convertLinksToOpenRouter(text, resetCounter), + hasBufferedContent: false + } } else { return convertLinks(text, resetCounter) } } + +/** + * 强制返回buffer中的所有内容,用于流结束时清空缓冲区 + * @returns {string} buffer中剩余的所有内容 + */ +export function flushLinkConverterBuffer(): string { + const remainingBuffer = buffer + buffer = '' + return remainingBuffer +} From 6a2e04aaebfe63b383f660fae77568cbb22c5d0c Mon Sep 17 00:00:00 2001 From: kangfenmao Date: Wed, 2 Jul 2025 10:04:14 +0800 Subject: [PATCH 03/21] Revert "fix(WindowService): remove backgroundThrottling option for cleaner window configuration (#7704)" This reverts commit 3eb6d08b349a681636fd7bb4bc7982f98a83bd2f. --- src/main/services/WindowService.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/services/WindowService.ts b/src/main/services/WindowService.ts index c81926f5d2..65132eb54f 100644 --- a/src/main/services/WindowService.ts +++ b/src/main/services/WindowService.ts @@ -71,7 +71,8 @@ export class WindowService { webSecurity: false, webviewTag: true, allowRunningInsecureContent: true, - zoomFactor: configManager.getZoomFactor() + zoomFactor: configManager.getZoomFactor(), + backgroundThrottling: false } }) From 8d9ac7299a6ae2a7c8b4d9fcd6f1c60ae8394f44 Mon Sep 17 00:00:00 2001 From: one Date: Wed, 2 Jul 2025 10:22:17 +0800 Subject: [PATCH 04/21] chore(ci): update dependabot (#7725) --- .github/dependabot.yml | 88 +++++++++++++++++++++--------------------- 1 file changed, 43 insertions(+), 45 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index f530d6e3bf..e2b17486db 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -4,38 +4,26 @@ updates: directory: "/" schedule: interval: "monthly" - open-pull-requests-limit: 7 + open-pull-requests-limit: 5 target-branch: "main" commit-message: prefix: "chore" include: "scope" + ignore: + - dependency-name: "*" + update-types: + - "version-update:semver-major" + - dependency-name: "@google/genai" + - dependency-name: "antd" + - dependency-name: "epub" + - dependency-name: "openai" groups: - # 核心框架 - core-framework: + # CherryStudio 自定义包 + cherrystudio-packages: patterns: - - "react" - - "react-dom" - - "electron" - - "typescript" - - "@types/react*" - - "@types/node" - update-types: - - "minor" - - "patch" - - # Electron 生态和构建工具 - electron-build: - patterns: - - "electron-*" - - "@electron*" - - "vite" - - "@vitejs/*" - - "dotenv-cli" - - "rollup-plugin-*" - - "@swc/*" - update-types: - - "minor" - - "patch" + - "@cherrystudio/*" + - "@kangfenmao/*" + - "selection-hook" # 测试工具 testing-tools: @@ -44,30 +32,40 @@ updates: - "@vitest/*" - "playwright" - "@playwright/*" - - "eslint*" - - "@eslint*" + - "testing-library/*" + - "jest-styled-components" + + # Lint 工具 + lint-tools: + patterns: + - "eslint" + - "eslint-plugin-*" + - "@eslint/*" + - "@eslint-react/*" + - "@electron-toolkit/eslint-config-*" - "prettier" - "husky" - "lint-staged" - update-types: - - "minor" - - "patch" - # CherryStudio 自定义包 - cherrystudio-packages: + # Markdown + markdown: patterns: - - "@cherrystudio/*" - update-types: - - "minor" - - "patch" - - # 兜底其他 dependencies - other-dependencies: - dependency-type: "production" - - # 兜底其他 devDependencies - other-dev-dependencies: - dependency-type: "development" + - "react-markdown" + - "rehype-katex" + - "rehype-mathjax" + - "rehype-raw" + - "remark-cjk-friendly" + - "remark-gfm" + - "remark-math" + - "remove-markdown" + - "markdown-it" + - "@shikijs/markdown-it" + - "shiki" + - "@uiw/codemirror-extensions-langs" + - "@uiw/codemirror-themes-all" + - "@uiw/react-codemirror" + - "fast-diff" + - "mermaid" - package-ecosystem: "github-actions" directory: "/" From 4b92a5ef1e2399c2ddae5eaf26021c54673d6c8e Mon Sep 17 00:00:00 2001 From: beyondkmp Date: Wed, 2 Jul 2025 10:57:30 +0800 Subject: [PATCH 05/21] chore: update electron dependency to version 35.6.0 in package.json and yarn.lock (#7730) --- package.json | 2 +- yarn.lock | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/package.json b/package.json index 93b7ce697f..c81c770ebc 100644 --- a/package.json +++ b/package.json @@ -148,7 +148,7 @@ "diff": "^7.0.0", "docx": "^9.0.2", "dotenv-cli": "^7.4.2", - "electron": "35.4.0", + "electron": "35.6.0", "electron-builder": "26.0.15", "electron-devtools-installer": "^3.2.0", "electron-log": "^5.1.5", diff --git a/yarn.lock b/yarn.lock index fd5375cb38..49b5b8fb84 100644 --- a/yarn.lock +++ b/yarn.lock @@ -5723,7 +5723,7 @@ __metadata: diff: "npm:^7.0.0" docx: "npm:^9.0.2" dotenv-cli: "npm:^7.4.2" - electron: "npm:35.4.0" + electron: "npm:35.6.0" electron-builder: "npm:26.0.15" electron-devtools-installer: "npm:^3.2.0" electron-log: "npm:^5.1.5" @@ -8672,16 +8672,16 @@ __metadata: languageName: node linkType: hard -"electron@npm:35.4.0": - version: 35.4.0 - resolution: "electron@npm:35.4.0" +"electron@npm:35.6.0": + version: 35.6.0 + resolution: "electron@npm:35.6.0" dependencies: "@electron/get": "npm:^2.0.0" "@types/node": "npm:^22.7.7" extract-zip: "npm:^2.0.1" bin: electron: cli.js - checksum: 10c0/657c374421b433d7bec2bae27f8241285f1bf7f89a55d143e68d5c40cc19020cd2885052b402d946228f998c7399401dc1b9c7641b2ca00350fceaea6e37fbfc + checksum: 10c0/a8feb656ce9173607f23517753ba47933e716ba362695e1a31bc52bcd9003bc29160e0c2aa43373a30d7c02620fcc837fdbb3c37382cb2b28466f5018e296be1 languageName: node linkType: hard From 990ec5cd5cb8c44d670c47f886f480483044c378 Mon Sep 17 00:00:00 2001 From: Phantom <59059173+EurFelux@users.noreply.github.com> Date: Wed, 2 Jul 2025 11:34:53 +0800 Subject: [PATCH 06/21] fix(MessageMenubar): Add check for empty relatedUserMessageBlocks to prevent errors (#7733) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix(MessageMenubar): 修复未找到相关用户消息块时的处理逻辑 添加对relatedUserMessageBlocks为空的检查,避免后续逻辑报错 * fix(MessageMenubar): 修复检查消息块类型时的空引用问题 --- src/renderer/src/pages/home/Messages/MessageMenubar.tsx | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/renderer/src/pages/home/Messages/MessageMenubar.tsx b/src/renderer/src/pages/home/Messages/MessageMenubar.tsx index 6426ec6213..ea5e043c33 100644 --- a/src/renderer/src/pages/home/Messages/MessageMenubar.tsx +++ b/src/renderer/src/pages/home/Messages/MessageMenubar.tsx @@ -346,7 +346,7 @@ const MessageMenubar: FC = (props) => { return () => true } const state = store.getState() - const topicMessages = selectMessagesForTopic(state, topic.id) + const topicMessages: Message[] = selectMessagesForTopic(state, topic.id) // 理论上助手消息只会关联一条用户消息 const relatedUserMessage = topicMessages.find((msg) => { return msg.role === 'user' && message.askId === msg.id @@ -360,7 +360,11 @@ const MessageMenubar: FC = (props) => { messageBlocksSelectors.selectById(store.getState(), msgBlockId) ) - if (relatedUserMessageBlocks.some((block) => block.type === MessageBlockType.IMAGE)) { + if (!relatedUserMessageBlocks) { + return () => true + } + + if (relatedUserMessageBlocks.some((block) => block && block.type === MessageBlockType.IMAGE)) { return (m: Model) => isVisionModel(m) } else { return () => true From 19212e576f28c6aef7b2c07bf4929b3e7f43da04 Mon Sep 17 00:00:00 2001 From: kangfenmao Date: Wed, 2 Jul 2025 13:22:33 +0800 Subject: [PATCH 07/21] Revert "feat: Add S3 Backup (#6802)" This reverts commit 3f5901766d4d7d0a7780185c7f2d67c455dc274f. # Conflicts: # src/renderer/src/i18n/locales/zh-cn.json # src/renderer/src/i18n/locales/zh-tw.json --- packages/shared/IpcChannel.ts | 5 - src/main/ipc.ts | 5 - src/main/services/BackupManager.ts | 234 ++--------- src/main/services/RemoteStorage.ts | 126 +++--- src/preload/index.ts | 19 +- .../src/components/S3BackupManager.tsx | 298 -------------- src/renderer/src/components/S3Modals.tsx | 258 ------------ src/renderer/src/i18n/locales/en-us.json | 64 --- src/renderer/src/i18n/locales/ja-jp.json | 64 --- src/renderer/src/i18n/locales/ru-ru.json | 64 --- src/renderer/src/i18n/locales/zh-cn.json | 66 +--- src/renderer/src/i18n/locales/zh-tw.json | 66 +--- src/renderer/src/init.ts | 4 +- .../settings/DataSettings/DataSettings.tsx | 10 +- .../settings/DataSettings/S3Settings.tsx | 276 ------------- src/renderer/src/services/BackupService.ts | 366 ++---------------- src/renderer/src/store/backup.ts | 11 +- src/renderer/src/store/settings.ts | 34 +- src/renderer/src/types/index.ts | 12 - yarn.lock | 80 ---- 20 files changed, 122 insertions(+), 1940 deletions(-) delete mode 100644 src/renderer/src/components/S3BackupManager.tsx delete mode 100644 src/renderer/src/components/S3Modals.tsx delete mode 100644 src/renderer/src/pages/settings/DataSettings/S3Settings.tsx diff --git a/packages/shared/IpcChannel.ts b/packages/shared/IpcChannel.ts index ca49bd40c5..daea5dad6e 100644 --- a/packages/shared/IpcChannel.ts +++ b/packages/shared/IpcChannel.ts @@ -153,11 +153,6 @@ export enum IpcChannel { Backup_CheckConnection = 'backup:checkConnection', Backup_CreateDirectory = 'backup:createDirectory', Backup_DeleteWebdavFile = 'backup:deleteWebdavFile', - Backup_BackupToS3 = 'backup:backupToS3', - Backup_RestoreFromS3 = 'backup:restoreFromS3', - Backup_ListS3Files = 'backup:listS3Files', - Backup_DeleteS3File = 'backup:deleteS3File', - Backup_CheckS3Connection = 'backup:checkS3Connection', // zip Zip_Compress = 'zip:compress', diff --git a/src/main/ipc.ts b/src/main/ipc.ts index af043c7c8c..8c6810bcdc 100644 --- a/src/main/ipc.ts +++ b/src/main/ipc.ts @@ -344,11 +344,6 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) { ipcMain.handle(IpcChannel.Backup_CheckConnection, backupManager.checkConnection) ipcMain.handle(IpcChannel.Backup_CreateDirectory, backupManager.createDirectory) ipcMain.handle(IpcChannel.Backup_DeleteWebdavFile, backupManager.deleteWebdavFile) - ipcMain.handle(IpcChannel.Backup_BackupToS3, backupManager.backupToS3) - ipcMain.handle(IpcChannel.Backup_RestoreFromS3, backupManager.restoreFromS3) - ipcMain.handle(IpcChannel.Backup_ListS3Files, backupManager.listS3Files) - ipcMain.handle(IpcChannel.Backup_DeleteS3File, backupManager.deleteS3File) - ipcMain.handle(IpcChannel.Backup_CheckS3Connection, backupManager.checkS3Connection) // file ipcMain.handle(IpcChannel.File_Open, fileManager.open) diff --git a/src/main/services/BackupManager.ts b/src/main/services/BackupManager.ts index 6e0c813e6d..e994e90bed 100644 --- a/src/main/services/BackupManager.ts +++ b/src/main/services/BackupManager.ts @@ -1,6 +1,5 @@ import { IpcChannel } from '@shared/IpcChannel' import { WebDavConfig } from '@types' -import { S3Config } from '@types' import archiver from 'archiver' import { exec } from 'child_process' import { app } from 'electron' @@ -11,7 +10,6 @@ import * as path from 'path' import { CreateDirectoryOptions, FileStat } from 'webdav' import { getDataPath } from '../utils' -import S3Storage from './RemoteStorage' import WebDav from './WebDav' import { windowService } from './WindowService' @@ -27,11 +25,6 @@ class BackupManager { this.restoreFromWebdav = this.restoreFromWebdav.bind(this) this.listWebdavFiles = this.listWebdavFiles.bind(this) this.deleteWebdavFile = this.deleteWebdavFile.bind(this) - this.backupToS3 = this.backupToS3.bind(this) - this.restoreFromS3 = this.restoreFromS3.bind(this) - this.listS3Files = this.listS3Files.bind(this) - this.deleteS3File = this.deleteS3File.bind(this) - this.checkS3Connection = this.checkS3Connection.bind(this) } private async setWritableRecursive(dirPath: string): Promise { @@ -92,11 +85,7 @@ class BackupManager { const onProgress = (processData: { stage: string; progress: number; total: number }) => { mainWindow?.webContents.send(IpcChannel.BackupProgress, processData) - // 只在关键阶段记录日志:开始、结束和主要阶段转换点 - const logStages = ['preparing', 'writing_data', 'preparing_compression', 'completed'] - if (logStages.includes(processData.stage) || processData.progress === 100) { - Logger.log('[BackupManager] backup progress', processData) - } + Logger.log('[BackupManager] backup progress', processData) } try { @@ -158,23 +147,18 @@ class BackupManager { let totalBytes = 0 let processedBytes = 0 - // 首先计算总文件数和总大小,但不记录详细日志 + // 首先计算总文件数和总大小 const calculateTotals = async (dirPath: string) => { - try { - const items = await fs.readdir(dirPath, { withFileTypes: true }) - for (const item of items) { - const fullPath = path.join(dirPath, item.name) - if (item.isDirectory()) { - await calculateTotals(fullPath) - } else { - totalEntries++ - const stats = await fs.stat(fullPath) - totalBytes += stats.size - } + const items = await fs.readdir(dirPath, { withFileTypes: true }) + for (const item of items) { + const fullPath = path.join(dirPath, item.name) + if (item.isDirectory()) { + await calculateTotals(fullPath) + } else { + totalEntries++ + const stats = await fs.stat(fullPath) + totalBytes += stats.size } - } catch (error) { - // 仅在出错时记录日志 - Logger.error('[BackupManager] Error calculating totals:', error) } } @@ -246,11 +230,7 @@ class BackupManager { const onProgress = (processData: { stage: string; progress: number; total: number }) => { mainWindow?.webContents.send(IpcChannel.RestoreProgress, processData) - // 只在关键阶段记录日志 - const logStages = ['preparing', 'extracting', 'extracted', 'reading_data', 'completed'] - if (logStages.includes(processData.stage) || processData.progress === 100) { - Logger.log('[BackupManager] restore progress', processData) - } + Logger.log('[BackupManager] restore progress', processData) } try { @@ -402,54 +382,21 @@ class BackupManager { destination: string, onProgress: (size: number) => void ): Promise { - // 先统计总文件数 - let totalFiles = 0 - let processedFiles = 0 - let lastProgressReported = 0 + const items = await fs.readdir(source, { withFileTypes: true }) - // 计算总文件数 - const countFiles = async (dir: string): Promise => { - let count = 0 - const items = await fs.readdir(dir, { withFileTypes: true }) - for (const item of items) { - if (item.isDirectory()) { - count += await countFiles(path.join(dir, item.name)) - } else { - count++ - } - } - return count - } + for (const item of items) { + const sourcePath = path.join(source, item.name) + const destPath = path.join(destination, item.name) - totalFiles = await countFiles(source) - - // 复制文件并更新进度 - const copyDir = async (src: string, dest: string): Promise => { - const items = await fs.readdir(src, { withFileTypes: true }) - - for (const item of items) { - const sourcePath = path.join(src, item.name) - const destPath = path.join(dest, item.name) - - if (item.isDirectory()) { - await fs.ensureDir(destPath) - await copyDir(sourcePath, destPath) - } else { - const stats = await fs.stat(sourcePath) - await fs.copy(sourcePath, destPath) - processedFiles++ - - // 只在进度变化超过5%时报告进度 - const currentProgress = Math.floor((processedFiles / totalFiles) * 100) - if (currentProgress - lastProgressReported >= 5 || processedFiles === totalFiles) { - lastProgressReported = currentProgress - onProgress(stats.size) - } - } + if (item.isDirectory()) { + await fs.ensureDir(destPath) + await this.copyDirWithProgress(sourcePath, destPath, onProgress) + } else { + const stats = await fs.stat(sourcePath) + await fs.copy(sourcePath, destPath) + onProgress(stats.size) } } - - await copyDir(source, destination) } async checkConnection(_: Electron.IpcMainInvokeEvent, webdavConfig: WebDavConfig) { @@ -476,141 +423,6 @@ class BackupManager { throw new Error(error.message || 'Failed to delete backup file') } } - - async backupToS3(_: Electron.IpcMainInvokeEvent, data: string, s3Config: S3Config) { - // 获取设备名 - const os = require('os') - const deviceName = os.hostname ? os.hostname() : 'device' - const timestamp = new Date() - .toISOString() - .replace(/[-:T.Z]/g, '') - .slice(0, 14) - const filename = s3Config.fileName || `cherry-studio.backup.${deviceName}.${timestamp}.zip` - - // 不记录详细日志,只记录开始和结束 - Logger.log(`[BackupManager] Starting S3 backup to ${filename}`) - - const backupedFilePath = await this.backup(_, filename, data, undefined, s3Config.skipBackupFile) - const s3Client = new S3Storage('s3', { - endpoint: s3Config.endpoint, - region: s3Config.region, - bucket: s3Config.bucket, - access_key_id: s3Config.access_key_id, - secret_access_key: s3Config.secret_access_key, - root: s3Config.root || '' - }) - try { - const fileBuffer = await fs.promises.readFile(backupedFilePath) - const result = await s3Client.putFileContents(filename, fileBuffer) - await fs.remove(backupedFilePath) - - Logger.log(`[BackupManager] S3 backup completed successfully: ${filename}`) - return result - } catch (error) { - Logger.error(`[BackupManager] S3 backup failed:`, error) - await fs.remove(backupedFilePath) - throw error - } - } - - async restoreFromS3(_: Electron.IpcMainInvokeEvent, s3Config: S3Config) { - const filename = s3Config.fileName || 'cherry-studio.backup.zip' - - // 只记录开始和结束或错误 - Logger.log(`[BackupManager] Starting restore from S3: ${filename}`) - - const s3Client = new S3Storage('s3', { - endpoint: s3Config.endpoint, - region: s3Config.region, - bucket: s3Config.bucket, - access_key_id: s3Config.access_key_id, - secret_access_key: s3Config.secret_access_key, - root: s3Config.root || '' - }) - try { - const retrievedFile = await s3Client.getFileContents(filename) - const backupedFilePath = path.join(this.backupDir, filename) - if (!fs.existsSync(this.backupDir)) { - fs.mkdirSync(this.backupDir, { recursive: true }) - } - await new Promise((resolve, reject) => { - const writeStream = fs.createWriteStream(backupedFilePath) - writeStream.write(retrievedFile as Buffer) - writeStream.end() - writeStream.on('finish', () => resolve()) - writeStream.on('error', (error) => reject(error)) - }) - - Logger.log(`[BackupManager] S3 restore file downloaded successfully: ${filename}`) - return await this.restore(_, backupedFilePath) - } catch (error: any) { - Logger.error('[BackupManager] Failed to restore from S3:', error) - throw new Error(error.message || 'Failed to restore backup file') - } - } - - listS3Files = async (_: Electron.IpcMainInvokeEvent, s3Config: S3Config) => { - try { - const s3Client = new S3Storage('s3', { - endpoint: s3Config.endpoint, - region: s3Config.region, - bucket: s3Config.bucket, - access_key_id: s3Config.access_key_id, - secret_access_key: s3Config.secret_access_key, - root: s3Config.root || '' - }) - const entries = await s3Client.instance?.list('/') - const files: Array<{ fileName: string; modifiedTime: string; size: number }> = [] - if (entries) { - for await (const entry of entries) { - const path = entry.path() - if (path.endsWith('.zip')) { - const meta = await s3Client.instance!.stat(path) - if (meta.isFile()) { - files.push({ - fileName: path.replace(/^\/+/, ''), - modifiedTime: meta.lastModified || '', - size: Number(meta.contentLength || 0n) - }) - } - } - } - } - return files.sort((a, b) => new Date(b.modifiedTime).getTime() - new Date(a.modifiedTime).getTime()) - } catch (error: any) { - Logger.error('Failed to list S3 files:', error) - throw new Error(error.message || 'Failed to list backup files') - } - } - - async deleteS3File(_: Electron.IpcMainInvokeEvent, fileName: string, s3Config: S3Config) { - try { - const s3Client = new S3Storage('s3', { - endpoint: s3Config.endpoint, - region: s3Config.region, - bucket: s3Config.bucket, - access_key_id: s3Config.access_key_id, - secret_access_key: s3Config.secret_access_key, - root: s3Config.root || '' - }) - return await s3Client.deleteFile(fileName) - } catch (error: any) { - Logger.error('Failed to delete S3 file:', error) - throw new Error(error.message || 'Failed to delete backup file') - } - } - - async checkS3Connection(_: Electron.IpcMainInvokeEvent, s3Config: S3Config) { - const s3Client = new S3Storage('s3', { - endpoint: s3Config.endpoint, - region: s3Config.region, - bucket: s3Config.bucket, - access_key_id: s3Config.access_key_id, - secret_access_key: s3Config.secret_access_key, - root: s3Config.root || '' - }) - return await s3Client.checkConnection() - } } export default BackupManager diff --git a/src/main/services/RemoteStorage.ts b/src/main/services/RemoteStorage.ts index 4efc57b6c6..b62489bbbe 100644 --- a/src/main/services/RemoteStorage.ts +++ b/src/main/services/RemoteStorage.ts @@ -1,83 +1,57 @@ -import Logger from 'electron-log' -import type { Operator as OperatorType } from 'opendal' -const { Operator } = require('opendal') +// import Logger from 'electron-log' +// import { Operator } from 'opendal' -export default class S3Storage { - public instance: OperatorType | undefined +// export default class RemoteStorage { +// public instance: Operator | undefined - /** - * - * @param scheme is the scheme for opendal services. Available value includes "azblob", "azdls", "cos", "gcs", "obs", "oss", "s3", "webdav", "webhdfs", "aliyun-drive", "alluxio", "azfile", "dropbox", "gdrive", "onedrive", "postgresql", "mysql", "redis", "swift", "mongodb", "alluxio", "b2", "seafile", "upyun", "koofr", "yandex-disk" - * @param options is the options for given opendal services. Valid options depend on the scheme. Checkout https://docs.rs/opendal/latest/opendal/services/index.html for all valid options. - * - * For example, use minio as remote storage: - * - * ```typescript - * const storage = new S3Storage('s3', { - * endpoint: 'http://localhost:9000', - * region: 'us-east-1', - * bucket: 'testbucket', - * access_key_id: 'user', - * secret_access_key: 'password', - * root: '/path/to/basepath', - * }) - * ``` - */ - constructor(scheme: string, options?: Record | undefined | null) { - this.instance = new Operator(scheme, options) +// /** +// * +// * @param scheme is the scheme for opendal services. Available value includes "azblob", "azdls", "cos", "gcs", "obs", "oss", "s3", "webdav", "webhdfs", "aliyun-drive", "alluxio", "azfile", "dropbox", "gdrive", "onedrive", "postgresql", "mysql", "redis", "swift", "mongodb", "alluxio", "b2", "seafile", "upyun", "koofr", "yandex-disk" +// * @param options is the options for given opendal services. Valid options depend on the scheme. Checkout https://docs.rs/opendal/latest/opendal/services/index.html for all valid options. +// * +// * For example, use minio as remote storage: +// * +// * ```typescript +// * const storage = new RemoteStorage('s3', { +// * endpoint: 'http://localhost:9000', +// * region: 'us-east-1', +// * bucket: 'testbucket', +// * access_key_id: 'user', +// * secret_access_key: 'password', +// * root: '/path/to/basepath', +// * }) +// * ``` +// */ +// constructor(scheme: string, options?: Record | undefined | null) { +// this.instance = new Operator(scheme, options) - this.putFileContents = this.putFileContents.bind(this) - this.getFileContents = this.getFileContents.bind(this) - } +// this.putFileContents = this.putFileContents.bind(this) +// this.getFileContents = this.getFileContents.bind(this) +// } - public putFileContents = async (filename: string, data: string | Buffer) => { - if (!this.instance) { - return new Error('RemoteStorage client not initialized') - } +// public putFileContents = async (filename: string, data: string | Buffer) => { +// if (!this.instance) { +// return new Error('RemoteStorage client not initialized') +// } - try { - return await this.instance.write(filename, data) - } catch (error) { - Logger.error('[RemoteStorage] Error putting file contents:', error) - throw error - } - } +// try { +// return await this.instance.write(filename, data) +// } catch (error) { +// Logger.error('[RemoteStorage] Error putting file contents:', error) +// throw error +// } +// } - public getFileContents = async (filename: string) => { - if (!this.instance) { - throw new Error('RemoteStorage client not initialized') - } +// public getFileContents = async (filename: string) => { +// if (!this.instance) { +// throw new Error('RemoteStorage client not initialized') +// } - try { - return await this.instance.read(filename) - } catch (error) { - Logger.error('[RemoteStorage] Error getting file contents:', error) - throw error - } - } - - public deleteFile = async (filename: string) => { - if (!this.instance) { - throw new Error('RemoteStorage client not initialized') - } - try { - return await this.instance.delete(filename) - } catch (error) { - Logger.error('[RemoteStorage] Error deleting file:', error) - throw error - } - } - - public checkConnection = async () => { - if (!this.instance) { - throw new Error('RemoteStorage client not initialized') - } - try { - // 检查根目录是否可访问 - return await this.instance.stat('/') - } catch (error) { - Logger.error('[RemoteStorage] Error checking connection:', error) - throw error - } - } -} +// try { +// return await this.instance.read(filename) +// } catch (error) { +// Logger.error('[RemoteStorage] Error getting file contents:', error) +// throw error +// } +// } +// } diff --git a/src/preload/index.ts b/src/preload/index.ts index f6e49ece10..8412e00bc3 100644 --- a/src/preload/index.ts +++ b/src/preload/index.ts @@ -2,16 +2,7 @@ import type { ExtractChunkData } from '@cherrystudio/embedjs-interfaces' import { electronAPI } from '@electron-toolkit/preload' import { UpgradeChannel } from '@shared/config/constant' import { IpcChannel } from '@shared/IpcChannel' -import { - FileType, - KnowledgeBaseParams, - KnowledgeItem, - MCPServer, - S3Config, - Shortcut, - ThemeMode, - WebDavConfig -} from '@types' +import { FileType, KnowledgeBaseParams, KnowledgeItem, MCPServer, Shortcut, ThemeMode, WebDavConfig } from '@types' import { contextBridge, ipcRenderer, OpenDialogOptions, shell, webUtils } from 'electron' import { Notification } from 'src/renderer/src/types/notification' import { CreateDirectoryOptions } from 'webdav' @@ -80,13 +71,7 @@ const api = { createDirectory: (webdavConfig: WebDavConfig, path: string, options?: CreateDirectoryOptions) => ipcRenderer.invoke(IpcChannel.Backup_CreateDirectory, webdavConfig, path, options), deleteWebdavFile: (fileName: string, webdavConfig: WebDavConfig) => - ipcRenderer.invoke(IpcChannel.Backup_DeleteWebdavFile, fileName, webdavConfig), - backupToS3: (data: string, s3Config: S3Config) => ipcRenderer.invoke(IpcChannel.Backup_BackupToS3, data, s3Config), - restoreFromS3: (s3Config: S3Config) => ipcRenderer.invoke(IpcChannel.Backup_RestoreFromS3, s3Config), - listS3Files: (s3Config: S3Config) => ipcRenderer.invoke(IpcChannel.Backup_ListS3Files, s3Config), - deleteS3File: (fileName: string, s3Config: S3Config) => - ipcRenderer.invoke(IpcChannel.Backup_DeleteS3File, fileName, s3Config), - checkS3Connection: (s3Config: S3Config) => ipcRenderer.invoke(IpcChannel.Backup_CheckS3Connection, s3Config) + ipcRenderer.invoke(IpcChannel.Backup_DeleteWebdavFile, fileName, webdavConfig) }, file: { select: (options?: OpenDialogOptions) => ipcRenderer.invoke(IpcChannel.File_Select, options), diff --git a/src/renderer/src/components/S3BackupManager.tsx b/src/renderer/src/components/S3BackupManager.tsx deleted file mode 100644 index ecc9ed88ef..0000000000 --- a/src/renderer/src/components/S3BackupManager.tsx +++ /dev/null @@ -1,298 +0,0 @@ -import { DeleteOutlined, ExclamationCircleOutlined, ReloadOutlined } from '@ant-design/icons' -import { restoreFromS3 } from '@renderer/services/BackupService' -import { formatFileSize } from '@renderer/utils' -import { Button, Modal, Table, Tooltip } from 'antd' -import dayjs from 'dayjs' -import { useCallback, useEffect, useState } from 'react' -import { useTranslation } from 'react-i18next' - -interface BackupFile { - fileName: string - modifiedTime: string - size: number -} - -interface S3Config { - endpoint: string - region: string - bucket: string - access_key_id: string - secret_access_key: string - root?: string -} - -interface S3BackupManagerProps { - visible: boolean - onClose: () => void - s3Config: { - endpoint?: string - region?: string - bucket?: string - access_key_id?: string - secret_access_key?: string - root?: string - } - restoreMethod?: (fileName: string) => Promise -} - -export function S3BackupManager({ visible, onClose, s3Config, restoreMethod }: S3BackupManagerProps) { - const [backupFiles, setBackupFiles] = useState([]) - const [loading, setLoading] = useState(false) - const [selectedRowKeys, setSelectedRowKeys] = useState([]) - const [deleting, setDeleting] = useState(false) - const [restoring, setRestoring] = useState(false) - const [pagination, setPagination] = useState({ - current: 1, - pageSize: 5, - total: 0 - }) - const { t } = useTranslation() - - const { endpoint, region, bucket, access_key_id, secret_access_key, root } = s3Config - - const fetchBackupFiles = useCallback(async () => { - if (!endpoint || !region || !bucket || !access_key_id || !secret_access_key) { - window.message.error(t('settings.data.s3.manager.config.incomplete')) - return - } - - setLoading(true) - try { - const files = await window.api.backup.listS3Files({ - endpoint, - region, - bucket, - access_key_id, - secret_access_key, - root - } as S3Config) - setBackupFiles(files) - setPagination((prev) => ({ - ...prev, - total: files.length - })) - } catch (error: any) { - window.message.error(t('settings.data.s3.manager.files.fetch.error', { message: error.message })) - } finally { - setLoading(false) - } - }, [endpoint, region, bucket, access_key_id, secret_access_key, root, t]) - - useEffect(() => { - if (visible) { - fetchBackupFiles() - setSelectedRowKeys([]) - setPagination((prev) => ({ - ...prev, - current: 1 - })) - } - }, [visible, fetchBackupFiles]) - - const handleTableChange = (pagination: any) => { - setPagination(pagination) - } - - const handleDeleteSelected = async () => { - if (selectedRowKeys.length === 0) { - window.message.warning(t('settings.data.s3.manager.select.warning')) - return - } - - if (!endpoint || !region || !bucket || !access_key_id || !secret_access_key) { - window.message.error(t('settings.data.s3.manager.config.incomplete')) - return - } - - window.modal.confirm({ - title: t('settings.data.s3.manager.delete.confirm.title'), - icon: , - content: t('settings.data.s3.manager.delete.confirm.multiple', { count: selectedRowKeys.length }), - okText: t('settings.data.s3.manager.delete.confirm.title'), - cancelText: t('common.cancel'), - centered: true, - onOk: async () => { - setDeleting(true) - try { - // 依次删除选中的文件 - for (const key of selectedRowKeys) { - await window.api.backup.deleteS3File(key.toString(), { - endpoint, - region, - bucket, - access_key_id, - secret_access_key, - root - } as S3Config) - } - window.message.success( - t('settings.data.s3.manager.delete.success.multiple', { count: selectedRowKeys.length }) - ) - setSelectedRowKeys([]) - await fetchBackupFiles() - } catch (error: any) { - window.message.error(t('settings.data.s3.manager.delete.error', { message: error.message })) - } finally { - setDeleting(false) - } - } - }) - } - - const handleDeleteSingle = async (fileName: string) => { - if (!endpoint || !region || !bucket || !access_key_id || !secret_access_key) { - window.message.error(t('settings.data.s3.manager.config.incomplete')) - return - } - - window.modal.confirm({ - title: t('settings.data.s3.manager.delete.confirm.title'), - icon: , - content: t('settings.data.s3.manager.delete.confirm.single', { fileName }), - okText: t('settings.data.s3.manager.delete.confirm.title'), - cancelText: t('common.cancel'), - centered: true, - onOk: async () => { - setDeleting(true) - try { - await window.api.backup.deleteS3File(fileName, { - endpoint, - region, - bucket, - access_key_id, - secret_access_key, - root - } as S3Config) - window.message.success(t('settings.data.s3.manager.delete.success.single')) - await fetchBackupFiles() - } catch (error: any) { - window.message.error(t('settings.data.s3.manager.delete.error', { message: error.message })) - } finally { - setDeleting(false) - } - } - }) - } - - const handleRestore = async (fileName: string) => { - if (!endpoint || !region || !bucket || !access_key_id || !secret_access_key) { - window.message.error(t('settings.data.s3.manager.config.incomplete')) - return - } - - window.modal.confirm({ - title: t('settings.data.s3.restore.confirm.title'), - icon: , - content: t('settings.data.s3.restore.confirm.content'), - okText: t('settings.data.s3.restore.confirm.ok'), - cancelText: t('settings.data.s3.restore.confirm.cancel'), - centered: true, - onOk: async () => { - setRestoring(true) - try { - await (restoreMethod || restoreFromS3)(fileName) - window.message.success(t('settings.data.s3.restore.success')) - onClose() // 关闭模态框 - } catch (error: any) { - window.message.error(t('settings.data.s3.restore.error', { message: error.message })) - } finally { - setRestoring(false) - } - } - }) - } - - const columns = [ - { - title: t('settings.data.s3.manager.columns.fileName'), - dataIndex: 'fileName', - key: 'fileName', - ellipsis: { - showTitle: false - }, - render: (fileName: string) => ( - - {fileName} - - ) - }, - { - title: t('settings.data.s3.manager.columns.modifiedTime'), - dataIndex: 'modifiedTime', - key: 'modifiedTime', - width: 180, - render: (time: string) => dayjs(time).format('YYYY-MM-DD HH:mm:ss') - }, - { - title: t('settings.data.s3.manager.columns.size'), - dataIndex: 'size', - key: 'size', - width: 120, - render: (size: number) => formatFileSize(size) - }, - { - title: t('settings.data.s3.manager.columns.actions'), - key: 'action', - width: 160, - render: (_: any, record: BackupFile) => ( - <> - - - - ) - } - ] - - const rowSelection = { - selectedRowKeys, - onChange: (selectedRowKeys: React.Key[]) => { - setSelectedRowKeys(selectedRowKeys) - } - } - - return ( - } onClick={fetchBackupFiles} disabled={loading}> - {t('settings.data.s3.manager.refresh')} - , - , - - ]}> - - - ) -} diff --git a/src/renderer/src/components/S3Modals.tsx b/src/renderer/src/components/S3Modals.tsx deleted file mode 100644 index a74ad2e9ca..0000000000 --- a/src/renderer/src/components/S3Modals.tsx +++ /dev/null @@ -1,258 +0,0 @@ -import { backupToS3, handleData } from '@renderer/services/BackupService' -import { formatFileSize } from '@renderer/utils' -import { Input, Modal, Select, Spin } from 'antd' -import dayjs from 'dayjs' -import { useCallback, useState } from 'react' -import { useTranslation } from 'react-i18next' - -interface BackupFile { - fileName: string - modifiedTime: string - size: number -} - -export function useS3BackupModal() { - const [customFileName, setCustomFileName] = useState('') - const [isModalVisible, setIsModalVisible] = useState(false) - const [backuping, setBackuping] = useState(false) - - const handleBackup = async () => { - setBackuping(true) - try { - await backupToS3({ customFileName, showMessage: true }) - } finally { - setBackuping(false) - setIsModalVisible(false) - } - } - - const handleCancel = () => { - setIsModalVisible(false) - } - - const showBackupModal = useCallback(async () => { - // 获取默认文件名 - const deviceType = await window.api.system.getDeviceType() - const hostname = await window.api.system.getHostname() - const timestamp = dayjs().format('YYYYMMDDHHmmss') - const defaultFileName = `cherry-studio.${timestamp}.${hostname}.${deviceType}.zip` - setCustomFileName(defaultFileName) - setIsModalVisible(true) - }, []) - - return { - isModalVisible, - handleBackup, - handleCancel, - backuping, - customFileName, - setCustomFileName, - showBackupModal - } -} - -type S3BackupModalProps = { - isModalVisible: boolean - handleBackup: () => Promise - handleCancel: () => void - backuping: boolean - customFileName: string - setCustomFileName: (value: string) => void -} - -export function S3BackupModal({ - isModalVisible, - handleBackup, - handleCancel, - backuping, - customFileName, - setCustomFileName -}: S3BackupModalProps) { - const { t } = useTranslation() - - return ( - - setCustomFileName(e.target.value)} - placeholder={t('settings.data.s3.backup.modal.filename.placeholder')} - /> - - ) -} - -interface UseS3RestoreModalProps { - endpoint: string | undefined - region: string | undefined - bucket: string | undefined - access_key_id: string | undefined - secret_access_key: string | undefined - root?: string | undefined -} - -export function useS3RestoreModal({ - endpoint, - region, - bucket, - access_key_id, - secret_access_key, - root -}: UseS3RestoreModalProps) { - const [isRestoreModalVisible, setIsRestoreModalVisible] = useState(false) - const [restoring, setRestoring] = useState(false) - const [selectedFile, setSelectedFile] = useState(null) - const [loadingFiles, setLoadingFiles] = useState(false) - const [backupFiles, setBackupFiles] = useState([]) - const { t } = useTranslation() - - const showRestoreModal = useCallback(async () => { - if (!endpoint || !region || !bucket || !access_key_id || !secret_access_key) { - window.message.error({ content: t('settings.data.s3.manager.config.incomplete'), key: 's3-error' }) - return - } - - setIsRestoreModalVisible(true) - setLoadingFiles(true) - try { - const files = await window.api.backup.listS3Files({ - endpoint, - region, - bucket, - access_key_id, - secret_access_key, - root - }) - setBackupFiles(files) - } catch (error: any) { - window.message.error({ - content: t('settings.data.s3.manager.files.fetch.error', { message: error.message }), - key: 'list-files-error' - }) - } finally { - setLoadingFiles(false) - } - }, [endpoint, region, bucket, access_key_id, secret_access_key, root, t]) - - const handleRestore = useCallback(async () => { - if (!selectedFile || !endpoint || !region || !bucket || !access_key_id || !secret_access_key) { - window.message.error({ - content: !selectedFile - ? t('settings.data.s3.restore.file.required') - : t('settings.data.s3.restore.config.incomplete'), - key: 'restore-error' - }) - return - } - - window.modal.confirm({ - title: t('settings.data.s3.restore.confirm.title'), - content: t('settings.data.s3.restore.confirm.content'), - okText: t('settings.data.s3.restore.confirm.ok'), - cancelText: t('settings.data.s3.restore.confirm.cancel'), - centered: true, - onOk: async () => { - setRestoring(true) - try { - const data = await window.api.backup.restoreFromS3({ - endpoint, - region, - bucket, - access_key_id, - secret_access_key, - root, - fileName: selectedFile - }) - await handleData(JSON.parse(data)) - window.message.success(t('settings.data.s3.restore.success')) - setIsRestoreModalVisible(false) - } catch (error: any) { - window.message.error({ - content: t('settings.data.s3.restore.error', { message: error.message }), - key: 'restore-error' - }) - } finally { - setRestoring(false) - } - } - }) - }, [selectedFile, endpoint, region, bucket, access_key_id, secret_access_key, root, t]) - - const handleCancel = () => { - setIsRestoreModalVisible(false) - } - - return { - isRestoreModalVisible, - handleRestore, - handleCancel, - restoring, - selectedFile, - setSelectedFile, - loadingFiles, - backupFiles, - showRestoreModal - } -} - -type S3RestoreModalProps = ReturnType - -export function S3RestoreModal({ - isRestoreModalVisible, - handleRestore, - handleCancel, - restoring, - selectedFile, - setSelectedFile, - loadingFiles, - backupFiles -}: S3RestoreModalProps) { - const { t } = useTranslation() - - return ( - -
- setEndpoint(e.target.value)} - style={{ width: 250 }} - type="url" - onBlur={() => dispatch(setS3({ ...s3, endpoint: endpoint || '' }))} - /> - - - - {t('settings.data.s3.region')} - setRegion(e.target.value)} - style={{ width: 250 }} - onBlur={() => dispatch(setS3({ ...s3, region: region || '' }))} - /> - - - - {t('settings.data.s3.bucket')} - setBucket(e.target.value)} - style={{ width: 250 }} - onBlur={() => dispatch(setS3({ ...s3, bucket: bucket || '' }))} - /> - - - - {t('settings.data.s3.accessKeyId')} - setAccessKeyId(e.target.value)} - style={{ width: 250 }} - onBlur={() => dispatch(setS3({ ...s3, accessKeyId: accessKeyId || '' }))} - /> - - - - {t('settings.data.s3.secretAccessKey')} - setSecretAccessKey(e.target.value)} - style={{ width: 250 }} - onBlur={() => dispatch(setS3({ ...s3, secretAccessKey: secretAccessKey || '' }))} - /> - - - - {t('settings.data.s3.root')} - setRoot(e.target.value)} - style={{ width: 250 }} - onBlur={() => dispatch(setS3({ ...s3, root: root || '' }))} - /> - - - - {t('settings.data.s3.backup.operation')} - - - - - - - - {t('settings.data.s3.autoSync')} - - - - - {t('settings.data.s3.maxBackups')} - - - - - {t('settings.data.s3.skipBackupFile')} - - - - {t('settings.data.s3.skipBackupFile.help')} - - {syncInterval > 0 && ( - <> - - - {t('settings.data.s3.syncStatus')} - {renderSyncStatus()} - - - )} - <> - - - - - - ) -} - -export default S3Settings diff --git a/src/renderer/src/services/BackupService.ts b/src/renderer/src/services/BackupService.ts index b99ea6c77e..3d78b2752a 100644 --- a/src/renderer/src/services/BackupService.ts +++ b/src/renderer/src/services/BackupService.ts @@ -4,62 +4,11 @@ import { upgradeToV7 } from '@renderer/databases/upgrades' import i18n from '@renderer/i18n' import store from '@renderer/store' import { setWebDAVSyncState } from '@renderer/store/backup' -import { setS3SyncState } from '@renderer/store/backup' import { uuid } from '@renderer/utils' import dayjs from 'dayjs' import { NotificationService } from './NotificationService' -// 重试删除S3文件的辅助函数 -async function deleteS3FileWithRetry(fileName: string, s3Config: any, maxRetries = 3) { - let lastError: Error | null = null - - for (let attempt = 1; attempt <= maxRetries; attempt++) { - try { - await window.api.backup.deleteS3File(fileName, s3Config) - Logger.log(`[Backup] Successfully deleted old backup file: ${fileName} (attempt ${attempt})`) - return true - } catch (error: any) { - lastError = error - Logger.warn(`[Backup] Delete attempt ${attempt}/${maxRetries} failed for ${fileName}:`, error.message) - - // 如果不是最后一次尝试,等待一段时间再重试 - if (attempt < maxRetries) { - const delay = attempt * 1000 + Math.random() * 1000 // 1-2秒的随机延迟 - await new Promise((resolve) => setTimeout(resolve, delay)) - } - } - } - - Logger.error(`[Backup] Failed to delete old backup file after ${maxRetries} attempts: ${fileName}`, lastError) - return false -} - -// 重试删除WebDAV文件的辅助函数 -async function deleteWebdavFileWithRetry(fileName: string, webdavConfig: any, maxRetries = 3) { - let lastError: Error | null = null - - for (let attempt = 1; attempt <= maxRetries; attempt++) { - try { - await window.api.backup.deleteWebdavFile(fileName, webdavConfig) - Logger.log(`[Backup] Successfully deleted old backup file: ${fileName} (attempt ${attempt})`) - return true - } catch (error: any) { - lastError = error - Logger.warn(`[Backup] Delete attempt ${attempt}/${maxRetries} failed for ${fileName}:`, error.message) - - // 如果不是最后一次尝试,等待一段时间再重试 - if (attempt < maxRetries) { - const delay = attempt * 1000 + Math.random() * 1000 // 1-2秒的随机延迟 - await new Promise((resolve) => setTimeout(resolve, delay)) - } - } - } - - Logger.error(`[Backup] Failed to delete old backup file after ${maxRetries} attempts: ${fileName}`, lastError) - return false -} - export async function backup(skipBackupFile: boolean) { const filename = `cherry-studio.${dayjs().format('YYYYMMDDHHmm')}.zip` const fileContnet = await getBackupData() @@ -212,21 +161,17 @@ export async function backupToWebdav({ // 文件已按修改时间降序排序,所以最旧的文件在末尾 const filesToDelete = currentDeviceFiles.slice(webdavMaxBackups) - Logger.log(`[Backup] Cleaning up ${filesToDelete.length} old backup files`) - - // 串行删除文件,避免并发请求导致的问题 - for (let i = 0; i < filesToDelete.length; i++) { - const file = filesToDelete[i] - await deleteWebdavFileWithRetry(file.fileName, { - webdavHost, - webdavUser, - webdavPass, - webdavPath - }) - - // 在删除操作之间添加短暂延迟,避免请求过于频繁 - if (i < filesToDelete.length - 1) { - await new Promise((resolve) => setTimeout(resolve, 500)) + for (const file of filesToDelete) { + try { + await window.api.backup.deleteWebdavFile(file.fileName, { + webdavHost, + webdavUser, + webdavPass, + webdavPath + }) + Logger.log(`[Backup] Deleted old backup file: ${file.fileName}`) + } catch (error) { + Logger.error(`[Backup] Failed to delete old backup file: ${file.fileName}`, error) } } } @@ -297,201 +242,6 @@ export async function restoreFromWebdav(fileName?: string) { } } -// 备份到 S3 -export async function backupToS3({ - showMessage = false, - customFileName = '', - autoBackupProcess = false -}: { showMessage?: boolean; customFileName?: string; autoBackupProcess?: boolean } = {}) { - const notificationService = NotificationService.getInstance() - if (isManualBackupRunning) { - Logger.log('[Backup] Manual backup already in progress') - return - } - - // force set showMessage to false when auto backup process - if (autoBackupProcess) { - showMessage = false - } - - isManualBackupRunning = true - - store.dispatch(setS3SyncState({ syncing: true, lastSyncError: null })) - - const { - s3: { - endpoint: s3Endpoint, - region: s3Region, - bucket: s3Bucket, - accessKeyId: s3AccessKeyId, - secretAccessKey: s3SecretAccessKey, - root: s3Root, - maxBackups: s3MaxBackups, - skipBackupFile: s3SkipBackupFile - } - } = store.getState().settings - let deviceType = 'unknown' - let hostname = 'unknown' - try { - deviceType = (await window.api.system.getDeviceType()) || 'unknown' - hostname = (await window.api.system.getHostname()) || 'unknown' - } catch (error) { - Logger.error('[Backup] Failed to get device type or hostname:', error) - } - const timestamp = dayjs().format('YYYYMMDDHHmmss') - const backupFileName = customFileName || `cherry-studio.${timestamp}.${hostname}.${deviceType}.zip` - const finalFileName = backupFileName.endsWith('.zip') ? backupFileName : `${backupFileName}.zip` - const backupData = await getBackupData() - - // 上传文件 - try { - await window.api.backup.backupToS3(backupData, { - endpoint: s3Endpoint, - region: s3Region, - bucket: s3Bucket, - access_key_id: s3AccessKeyId, - secret_access_key: s3SecretAccessKey, - root: s3Root, - fileName: finalFileName, - skipBackupFile: s3SkipBackupFile - }) - - // S3上传成功 - store.dispatch( - setS3SyncState({ - lastSyncError: null - }) - ) - notificationService.send({ - id: uuid(), - type: 'success', - title: i18n.t('common.success'), - message: i18n.t('message.backup.success'), - silent: false, - timestamp: Date.now(), - source: 'backup' - }) - showMessage && window.message.success({ content: i18n.t('message.backup.success'), key: 'backup' }) - - // 清理旧备份文件 - if (s3MaxBackups > 0) { - try { - // 获取所有备份文件 - const files = await window.api.backup.listS3Files({ - endpoint: s3Endpoint, - region: s3Region, - bucket: s3Bucket, - access_key_id: s3AccessKeyId, - secret_access_key: s3SecretAccessKey, - root: s3Root - }) - - // 筛选当前设备的备份文件 - const currentDeviceFiles = files.filter((file) => { - // 检查文件名是否包含当前设备的标识信息 - return file.fileName.includes(deviceType) && file.fileName.includes(hostname) - }) - - // 如果当前设备的备份文件数量超过最大保留数量,删除最旧的文件 - if (currentDeviceFiles.length > s3MaxBackups) { - // 文件已按修改时间降序排序,所以最旧的文件在末尾 - const filesToDelete = currentDeviceFiles.slice(s3MaxBackups) - - Logger.log(`[Backup] Cleaning up ${filesToDelete.length} old backup files`) - - // 串行删除文件,避免并发请求导致的问题 - for (let i = 0; i < filesToDelete.length; i++) { - const file = filesToDelete[i] - await deleteS3FileWithRetry(file.fileName, { - endpoint: s3Endpoint, - region: s3Region, - bucket: s3Bucket, - access_key_id: s3AccessKeyId, - secret_access_key: s3SecretAccessKey, - root: s3Root - }) - - // 在删除操作之间添加短暂延迟,避免请求过于频繁 - if (i < filesToDelete.length - 1) { - await new Promise((resolve) => setTimeout(resolve, 500)) - } - } - } - } catch (error) { - Logger.error('[Backup] Failed to clean up old backup files:', error) - } - } - } catch (error: any) { - // if auto backup process, throw error - if (autoBackupProcess) { - throw error - } - notificationService.send({ - id: uuid(), - type: 'error', - title: i18n.t('message.backup.failed'), - message: error.message, - silent: false, - timestamp: Date.now(), - source: 'backup' - }) - store.dispatch(setS3SyncState({ lastSyncError: error.message })) - console.error('[Backup] backupToS3: Error uploading file to S3:', error) - showMessage && window.message.error({ content: i18n.t('message.backup.failed'), key: 'backup' }) - throw error - } finally { - if (!autoBackupProcess) { - store.dispatch( - setS3SyncState({ - lastSyncTime: Date.now(), - syncing: false - }) - ) - } - isManualBackupRunning = false - } -} - -// 从 S3 恢复 -export async function restoreFromS3(fileName?: string) { - const { - s3: { - endpoint: s3Endpoint, - region: s3Region, - bucket: s3Bucket, - accessKeyId: s3AccessKeyId, - secretAccessKey: s3SecretAccessKey, - root: s3Root - } - } = store.getState().settings - let data = '' - - try { - data = await window.api.backup.restoreFromS3({ - endpoint: s3Endpoint, - region: s3Region, - bucket: s3Bucket, - access_key_id: s3AccessKeyId, - secret_access_key: s3SecretAccessKey, - root: s3Root, - fileName - }) - } catch (error: any) { - console.error('[Backup] restoreFromS3: Error downloading file from S3:', error) - window.modal.error({ - title: i18n.t('message.restore.failed'), - content: error.message - }) - } - - try { - await handleData(JSON.parse(data)) - } catch (error) { - console.error('[Backup] Error downloading file from S3:', error) - window.message.error({ content: i18n.t('error.backup.file_format'), key: 'restore' }) - } -} - let autoSyncStarted = false let syncTimeout: NodeJS.Timeout | null = null let isAutoBackupRunning = false @@ -502,17 +252,9 @@ export function startAutoSync(immediate = false) { return } - const { - webdavAutoSync, - webdavHost, - s3: { autoSync: s3AutoSync, endpoint: s3Endpoint } - } = store.getState().settings + const { webdavAutoSync, webdavHost } = store.getState().settings - // 检查WebDAV或S3自动同步配置 - const hasWebdavConfig = webdavAutoSync && webdavHost - const hasS3Config = s3AutoSync && s3Endpoint - - if (!hasWebdavConfig && !hasS3Config) { + if (!webdavAutoSync || !webdavHost) { Logger.log('[AutoSync] Invalid sync settings, auto sync disabled') return } @@ -535,29 +277,22 @@ export function startAutoSync(immediate = false) { syncTimeout = null } - const { - webdavSyncInterval: _webdavSyncInterval, - s3: { syncInterval: _s3SyncInterval } - } = store.getState().settings - const { webdavSync, s3Sync } = store.getState().backup + const { webdavSyncInterval } = store.getState().settings + const { webdavSync } = store.getState().backup - // 使用当前激活的同步配置 - const syncInterval = hasWebdavConfig ? _webdavSyncInterval : _s3SyncInterval - const lastSyncTime = hasWebdavConfig ? webdavSync?.lastSyncTime : s3Sync?.lastSyncTime - - if (syncInterval <= 0) { + if (webdavSyncInterval <= 0) { Logger.log('[AutoSync] Invalid sync interval, auto sync disabled') stopAutoSync() return } // 用户指定的自动备份时间间隔(毫秒) - const requiredInterval = syncInterval * 60 * 1000 + const requiredInterval = webdavSyncInterval * 60 * 1000 let timeUntilNextSync = 1000 //also immediate switch (type) { - case 'fromLastSyncTime': // 如果存在最后一次同步的时间,以它为参考计算下一次同步的时间 - timeUntilNextSync = Math.max(1000, (lastSyncTime || 0) + requiredInterval - Date.now()) + case 'fromLastSyncTime': // 如果存在最后一次同步WebDAV的时间,以它为参考计算下一次同步的时间 + timeUntilNextSync = Math.max(1000, (webdavSync?.lastSyncTime || 0) + requiredInterval - Date.now()) break case 'fromNow': timeUntilNextSync = requiredInterval @@ -566,9 +301,8 @@ export function startAutoSync(immediate = false) { syncTimeout = setTimeout(performAutoBackup, timeUntilNextSync) - const backupType = hasWebdavConfig ? 'WebDAV' : 'S3' Logger.log( - `[AutoSync] Next ${backupType} sync scheduled in ${Math.floor(timeUntilNextSync / 1000 / 60)} minutes ${Math.floor( + `[AutoSync] Next sync scheduled in ${Math.floor(timeUntilNextSync / 1000 / 60)} minutes ${Math.floor( (timeUntilNextSync / 1000) % 60 )} seconds` ) @@ -587,28 +321,17 @@ export function startAutoSync(immediate = false) { while (retryCount < maxRetries) { try { - const backupType = hasWebdavConfig ? 'WebDAV' : 'S3' - Logger.log(`[AutoSync] Starting auto ${backupType} backup... (attempt ${retryCount + 1}/${maxRetries})`) + Logger.log(`[AutoSync] Starting auto backup... (attempt ${retryCount + 1}/${maxRetries})`) - if (hasWebdavConfig) { - await backupToWebdav({ autoBackupProcess: true }) - store.dispatch( - setWebDAVSyncState({ - lastSyncError: null, - lastSyncTime: Date.now(), - syncing: false - }) - ) - } else if (hasS3Config) { - await backupToS3({ autoBackupProcess: true }) - store.dispatch( - setS3SyncState({ - lastSyncError: null, - lastSyncTime: Date.now(), - syncing: false - }) - ) - } + await backupToWebdav({ autoBackupProcess: true }) + + store.dispatch( + setWebDAVSyncState({ + lastSyncError: null, + lastSyncTime: Date.now(), + syncing: false + }) + ) isAutoBackupRunning = false scheduleNextBackup() @@ -617,31 +340,20 @@ export function startAutoSync(immediate = false) { } catch (error: any) { retryCount++ if (retryCount === maxRetries) { - const backupType = hasWebdavConfig ? 'WebDAV' : 'S3' - Logger.error(`[AutoSync] Auto ${backupType} backup failed after all retries:`, error) + Logger.error('[AutoSync] Auto backup failed after all retries:', error) - if (hasWebdavConfig) { - store.dispatch( - setWebDAVSyncState({ - lastSyncError: 'Auto backup failed', - lastSyncTime: Date.now(), - syncing: false - }) - ) - } else if (hasS3Config) { - store.dispatch( - setS3SyncState({ - lastSyncError: 'Auto backup failed', - lastSyncTime: Date.now(), - syncing: false - }) - ) - } + store.dispatch( + setWebDAVSyncState({ + lastSyncError: 'Auto backup failed', + lastSyncTime: Date.now(), + syncing: false + }) + ) //only show 1 time error modal, and autoback stopped until user click ok await window.modal.error({ title: i18n.t('message.backup.failed'), - content: `[${backupType} Auto Backup] ${new Date().toLocaleString()} ` + error.message + content: `[WebDAV Auto Backup] ${new Date().toLocaleString()} ` + error.message }) scheduleNextBackup('fromNow') diff --git a/src/renderer/src/store/backup.ts b/src/renderer/src/store/backup.ts index 0740032efb..a8b7d342c5 100644 --- a/src/renderer/src/store/backup.ts +++ b/src/renderer/src/store/backup.ts @@ -8,7 +8,6 @@ export interface WebDAVSyncState { export interface BackupState { webdavSync: WebDAVSyncState - s3Sync: WebDAVSyncState } const initialState: BackupState = { @@ -16,11 +15,6 @@ const initialState: BackupState = { lastSyncTime: null, syncing: false, lastSyncError: null - }, - s3Sync: { - lastSyncTime: null, - syncing: false, - lastSyncError: null } } @@ -30,12 +24,9 @@ const backupSlice = createSlice({ reducers: { setWebDAVSyncState: (state, action: PayloadAction>) => { state.webdavSync = { ...state.webdavSync, ...action.payload } - }, - setS3SyncState: (state, action: PayloadAction>) => { - state.s3Sync = { ...state.s3Sync, ...action.payload } } } }) -export const { setWebDAVSyncState, setS3SyncState } = backupSlice.actions +export const { setWebDAVSyncState } = backupSlice.actions export default backupSlice.reducer diff --git a/src/renderer/src/store/settings.ts b/src/renderer/src/store/settings.ts index 8afbafc2a7..7d8e14ed11 100644 --- a/src/renderer/src/store/settings.ts +++ b/src/renderer/src/store/settings.ts @@ -37,19 +37,6 @@ export type UserTheme = { colorPrimary: string } -export interface S3Config { - endpoint: string - region: string - bucket: string - accessKeyId: string - secretAccessKey: string - root: string - autoSync: boolean - syncInterval: number - maxBackups: number - skipBackupFile: boolean -} - export interface SettingsState { showAssistants: boolean showTopics: boolean @@ -198,7 +185,6 @@ export interface SettingsState { knowledgeEmbed: boolean } defaultPaintingProvider: PaintingProvider - s3: S3Config } export type MultiModelMessageStyle = 'horizontal' | 'vertical' | 'fold' | 'grid' @@ -343,19 +329,7 @@ export const initialState: SettingsState = { backup: false, knowledgeEmbed: false }, - defaultPaintingProvider: 'aihubmix', - s3: { - endpoint: '', - region: '', - bucket: '', - accessKeyId: '', - secretAccessKey: '', - root: '', - autoSync: false, - syncInterval: 0, - maxBackups: 0, - skipBackupFile: false - } + defaultPaintingProvider: 'aihubmix' } const settingsSlice = createSlice({ @@ -719,9 +693,6 @@ const settingsSlice = createSlice({ }, setDefaultPaintingProvider: (state, action: PayloadAction) => { state.defaultPaintingProvider = action.payload - }, - setS3: (state, action: PayloadAction) => { - state.s3 = action.payload } } }) @@ -830,8 +801,7 @@ export const { setOpenAISummaryText, setOpenAIServiceTier, setNotificationSettings, - setDefaultPaintingProvider, - setS3 + setDefaultPaintingProvider } = settingsSlice.actions export default settingsSlice.reducer diff --git a/src/renderer/src/types/index.ts b/src/renderer/src/types/index.ts index 448f04c647..3b4cc5cdc3 100644 --- a/src/renderer/src/types/index.ts +++ b/src/renderer/src/types/index.ts @@ -730,16 +730,4 @@ export interface StoreSyncAction { export type OpenAISummaryText = 'auto' | 'concise' | 'detailed' | 'off' export type OpenAIServiceTier = 'auto' | 'default' | 'flex' - -export type S3Config = { - endpoint: string - region: string - bucket: string - access_key_id: string - secret_access_key: string - root?: string - fileName?: string - skipBackupFile?: boolean -} - export type { Message } from './newMessage' diff --git a/yarn.lock b/yarn.lock index 49b5b8fb84..aaf7a9f457 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3198,55 +3198,6 @@ __metadata: languageName: node linkType: hard -"@opendal/lib-darwin-arm64@npm:0.47.11": - version: 0.47.11 - resolution: "@opendal/lib-darwin-arm64@npm:0.47.11" - conditions: os=darwin & cpu=arm64 - languageName: node - linkType: hard - -"@opendal/lib-darwin-x64@npm:0.47.11": - version: 0.47.11 - resolution: "@opendal/lib-darwin-x64@npm:0.47.11" - conditions: os=darwin & cpu=x64 - languageName: node - linkType: hard - -"@opendal/lib-linux-arm64-gnu@npm:0.47.11": - version: 0.47.11 - resolution: "@opendal/lib-linux-arm64-gnu@npm:0.47.11" - conditions: os=linux & cpu=arm64 & libc=glibc - languageName: node - linkType: hard - -"@opendal/lib-linux-arm64-musl@npm:0.47.11": - version: 0.47.11 - resolution: "@opendal/lib-linux-arm64-musl@npm:0.47.11" - conditions: os=linux & cpu=arm64 & libc=glibc - languageName: node - linkType: hard - -"@opendal/lib-linux-x64-gnu@npm:0.47.11": - version: 0.47.11 - resolution: "@opendal/lib-linux-x64-gnu@npm:0.47.11" - conditions: os=linux & cpu=x64 & libc=glibc - languageName: node - linkType: hard - -"@opendal/lib-win32-arm64-msvc@npm:0.47.11": - version: 0.47.11 - resolution: "@opendal/lib-win32-arm64-msvc@npm:0.47.11" - conditions: os=win32 & cpu=arm64 - languageName: node - linkType: hard - -"@opendal/lib-win32-x64-msvc@npm:0.47.11": - version: 0.47.11 - resolution: "@opendal/lib-win32-x64-msvc@npm:0.47.11" - conditions: os=win32 & cpu=x64 - languageName: node - linkType: hard - "@parcel/watcher-android-arm64@npm:2.5.1": version: 2.5.1 resolution: "@parcel/watcher-android-arm64@npm:2.5.1" @@ -5761,7 +5712,6 @@ __metadata: npx-scope-finder: "npm:^1.2.0" officeparser: "npm:^4.1.1" openai: "patch:openai@npm%3A5.1.0#~/.yarn/patches/openai-npm-5.1.0-0e7b3ccb07.patch" - opendal: "npm:0.47.11" os-proxy-config: "npm:^1.1.2" p-queue: "npm:^8.1.0" playwright: "npm:^1.52.0" @@ -14246,36 +14196,6 @@ __metadata: languageName: node linkType: hard -"opendal@npm:0.47.11": - version: 0.47.11 - resolution: "opendal@npm:0.47.11" - dependencies: - "@opendal/lib-darwin-arm64": "npm:0.47.11" - "@opendal/lib-darwin-x64": "npm:0.47.11" - "@opendal/lib-linux-arm64-gnu": "npm:0.47.11" - "@opendal/lib-linux-arm64-musl": "npm:0.47.11" - "@opendal/lib-linux-x64-gnu": "npm:0.47.11" - "@opendal/lib-win32-arm64-msvc": "npm:0.47.11" - "@opendal/lib-win32-x64-msvc": "npm:0.47.11" - dependenciesMeta: - "@opendal/lib-darwin-arm64": - optional: true - "@opendal/lib-darwin-x64": - optional: true - "@opendal/lib-linux-arm64-gnu": - optional: true - "@opendal/lib-linux-arm64-musl": - optional: true - "@opendal/lib-linux-x64-gnu": - optional: true - "@opendal/lib-win32-arm64-msvc": - optional: true - "@opendal/lib-win32-x64-msvc": - optional: true - checksum: 10c0/0783da2651bb27ac693ce38938d12b00124530fb965364517eef3de17b3ff898cdecf06260a79a7d70745d57c2ba952a753a4bab52e0831aa7232c3a69120225 - languageName: node - linkType: hard - "option@npm:~0.2.1": version: 0.2.4 resolution: "option@npm:0.2.4" From a7abebc8f4155b5993380e99bab9525203fe2e0a Mon Sep 17 00:00:00 2001 From: one Date: Wed, 2 Jul 2025 15:03:31 +0800 Subject: [PATCH 08/21] fix: remove opendal (#7753) --- package.json | 1 - 1 file changed, 1 deletion(-) diff --git a/package.json b/package.json index c81c770ebc..a2ccbcb70a 100644 --- a/package.json +++ b/package.json @@ -64,7 +64,6 @@ "jsdom": "26.1.0", "node-stream-zip": "^1.15.0", "notion-helper": "^1.3.22", - "opendal": "0.47.11", "os-proxy-config": "^1.1.2", "selection-hook": "^0.9.23", "turndown": "7.2.0" From 9f291941804251fea95aa65a9d0d5df1a84cd5e6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E4=BA=A2=E5=A5=8B=E7=8C=AB?= Date: Wed, 2 Jul 2025 15:23:02 +0800 Subject: [PATCH 09/21] refactor: Restructure the knowledge base directory (#7754) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 重构知识库目录结构,代码逻辑完全不变 ├── embeddings │ ├── Embeddings.ts │ ├── EmbeddingsFactory.ts │ └── VoyageEmbeddings.ts ├── loader │ ├── draftsExportLoader.ts │ ├── epubLoader.ts │ ├── index.ts │ ├── noteLoader.ts │ └── odLoader.ts └── reranker ├── BaseReranker.ts ├── GeneralReranker.ts └── Reranker.ts 4 directories, 11 files --- src/main/{ => knowledage}/embeddings/Embeddings.ts | 0 src/main/{ => knowledage}/embeddings/EmbeddingsFactory.ts | 0 src/main/{ => knowledage}/embeddings/VoyageEmbeddings.ts | 0 src/main/{ => knowledage}/loader/draftsExportLoader.ts | 0 src/main/{ => knowledage}/loader/epubLoader.ts | 0 src/main/{ => knowledage}/loader/index.ts | 0 src/main/{ => knowledage}/loader/noteLoader.ts | 0 src/main/{ => knowledage}/loader/odLoader.ts | 0 src/main/{ => knowledage}/reranker/BaseReranker.ts | 0 src/main/{ => knowledage}/reranker/GeneralReranker.ts | 0 src/main/{ => knowledage}/reranker/Reranker.ts | 0 src/main/services/KnowledgeService.ts | 8 ++++---- 12 files changed, 4 insertions(+), 4 deletions(-) rename src/main/{ => knowledage}/embeddings/Embeddings.ts (100%) rename src/main/{ => knowledage}/embeddings/EmbeddingsFactory.ts (100%) rename src/main/{ => knowledage}/embeddings/VoyageEmbeddings.ts (100%) rename src/main/{ => knowledage}/loader/draftsExportLoader.ts (100%) rename src/main/{ => knowledage}/loader/epubLoader.ts (100%) rename src/main/{ => knowledage}/loader/index.ts (100%) rename src/main/{ => knowledage}/loader/noteLoader.ts (100%) rename src/main/{ => knowledage}/loader/odLoader.ts (100%) rename src/main/{ => knowledage}/reranker/BaseReranker.ts (100%) rename src/main/{ => knowledage}/reranker/GeneralReranker.ts (100%) rename src/main/{ => knowledage}/reranker/Reranker.ts (100%) diff --git a/src/main/embeddings/Embeddings.ts b/src/main/knowledage/embeddings/Embeddings.ts similarity index 100% rename from src/main/embeddings/Embeddings.ts rename to src/main/knowledage/embeddings/Embeddings.ts diff --git a/src/main/embeddings/EmbeddingsFactory.ts b/src/main/knowledage/embeddings/EmbeddingsFactory.ts similarity index 100% rename from src/main/embeddings/EmbeddingsFactory.ts rename to src/main/knowledage/embeddings/EmbeddingsFactory.ts diff --git a/src/main/embeddings/VoyageEmbeddings.ts b/src/main/knowledage/embeddings/VoyageEmbeddings.ts similarity index 100% rename from src/main/embeddings/VoyageEmbeddings.ts rename to src/main/knowledage/embeddings/VoyageEmbeddings.ts diff --git a/src/main/loader/draftsExportLoader.ts b/src/main/knowledage/loader/draftsExportLoader.ts similarity index 100% rename from src/main/loader/draftsExportLoader.ts rename to src/main/knowledage/loader/draftsExportLoader.ts diff --git a/src/main/loader/epubLoader.ts b/src/main/knowledage/loader/epubLoader.ts similarity index 100% rename from src/main/loader/epubLoader.ts rename to src/main/knowledage/loader/epubLoader.ts diff --git a/src/main/loader/index.ts b/src/main/knowledage/loader/index.ts similarity index 100% rename from src/main/loader/index.ts rename to src/main/knowledage/loader/index.ts diff --git a/src/main/loader/noteLoader.ts b/src/main/knowledage/loader/noteLoader.ts similarity index 100% rename from src/main/loader/noteLoader.ts rename to src/main/knowledage/loader/noteLoader.ts diff --git a/src/main/loader/odLoader.ts b/src/main/knowledage/loader/odLoader.ts similarity index 100% rename from src/main/loader/odLoader.ts rename to src/main/knowledage/loader/odLoader.ts diff --git a/src/main/reranker/BaseReranker.ts b/src/main/knowledage/reranker/BaseReranker.ts similarity index 100% rename from src/main/reranker/BaseReranker.ts rename to src/main/knowledage/reranker/BaseReranker.ts diff --git a/src/main/reranker/GeneralReranker.ts b/src/main/knowledage/reranker/GeneralReranker.ts similarity index 100% rename from src/main/reranker/GeneralReranker.ts rename to src/main/knowledage/reranker/GeneralReranker.ts diff --git a/src/main/reranker/Reranker.ts b/src/main/knowledage/reranker/Reranker.ts similarity index 100% rename from src/main/reranker/Reranker.ts rename to src/main/knowledage/reranker/Reranker.ts diff --git a/src/main/services/KnowledgeService.ts b/src/main/services/KnowledgeService.ts index d2d381c598..686e643711 100644 --- a/src/main/services/KnowledgeService.ts +++ b/src/main/services/KnowledgeService.ts @@ -21,10 +21,10 @@ import type { ExtractChunkData } from '@cherrystudio/embedjs-interfaces' import { LibSqlDb } from '@cherrystudio/embedjs-libsql' import { SitemapLoader } from '@cherrystudio/embedjs-loader-sitemap' import { WebLoader } from '@cherrystudio/embedjs-loader-web' -import Embeddings from '@main/embeddings/Embeddings' -import { addFileLoader } from '@main/loader' -import { NoteLoader } from '@main/loader/noteLoader' -import Reranker from '@main/reranker/Reranker' +import Embeddings from '@main/knowledage/embeddings/Embeddings' +import { addFileLoader } from '@main/knowledage/loader' +import { NoteLoader } from '@main/knowledage/loader/noteLoader' +import Reranker from '@main/knowledage/reranker/Reranker' import { windowService } from '@main/services/WindowService' import { getDataPath } from '@main/utils' import { getAllFiles } from '@main/utils/file' From d5e8ffc00f2dace0b5a888303ef6f4e5d7e343e3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=87=AA=E7=94=B1=E7=9A=84=E4=B8=96=E7=95=8C=E4=BA=BA?= <3196812536@qq.com> Date: Wed, 2 Jul 2025 15:23:58 +0800 Subject: [PATCH 10/21] fix: add custom prompt setting for translate model (#7623) * fix: add custom prompt setting for translate model Introduces a UI section in TranslateSettings to allow users to view and edit the custom prompt for the translation model. The prompt is now saved to the database and can be toggled for display in the settings modal. * fix: add reset button for translate prompt and improve prompt editing Introduced a reset button to restore the translate prompt to its default value. Updated the prompt editing area to use local state, improved UI with a rounded Textarea, and ensured prompt changes are dispatched to the store. * refactor: bidirectional settings layout in TranslatePage Removed unnecessary margin and conditional wrapper for the bidirectional settings. The Space component is now only rendered when bidirectional mode is enabled, improving layout clarity and reducing extra DOM nesting. * Update TranslatePage.tsx --- .../src/pages/translate/TranslatePage.tsx | 80 ++++++++++++++++--- 1 file changed, 69 insertions(+), 11 deletions(-) diff --git a/src/renderer/src/pages/translate/TranslatePage.tsx b/src/renderer/src/pages/translate/TranslatePage.tsx index 353f75bf81..8d4e7116b6 100644 --- a/src/renderer/src/pages/translate/TranslatePage.tsx +++ b/src/renderer/src/pages/translate/TranslatePage.tsx @@ -1,16 +1,20 @@ -import { CheckOutlined, DeleteOutlined, HistoryOutlined, SendOutlined } from '@ant-design/icons' +import { CheckOutlined, DeleteOutlined, HistoryOutlined, RedoOutlined, SendOutlined } from '@ant-design/icons' import { Navbar, NavbarCenter } from '@renderer/components/app/Navbar' import CopyIcon from '@renderer/components/Icons/CopyIcon' import { HStack } from '@renderer/components/Layout' import { isEmbeddingModel } from '@renderer/config/models' +import { TRANSLATE_PROMPT } from '@renderer/config/prompts' import { translateLanguageOptions } from '@renderer/config/translate' import { useCodeStyle } from '@renderer/context/CodeStyleProvider' import db from '@renderer/databases' import { useDefaultModel } from '@renderer/hooks/useAssistant' import { useProviders } from '@renderer/hooks/useProvider' +import { useSettings } from '@renderer/hooks/useSettings' import { fetchTranslate } from '@renderer/services/ApiService' import { getDefaultTranslateAssistant } from '@renderer/services/AssistantService' import { getModelUniqId, hasModel } from '@renderer/services/ModelService' +import { useAppDispatch } from '@renderer/store' +import { setTranslateModelPrompt } from '@renderer/store/settings' import type { Model, TranslateHistory } from '@renderer/types' import { runAsyncFunction, uuid } from '@renderer/utils' import { @@ -24,7 +28,7 @@ import TextArea, { TextAreaRef } from 'antd/es/input/TextArea' import dayjs from 'dayjs' import { useLiveQuery } from 'dexie-react-hooks' import { find, isEmpty, sortBy } from 'lodash' -import { HelpCircle, Settings2, TriangleAlert } from 'lucide-react' +import { ChevronDown, HelpCircle, Settings2, TriangleAlert } from 'lucide-react' import { FC, useEffect, useMemo, useRef, useState } from 'react' import { useTranslation } from 'react-i18next' import styled from 'styled-components' @@ -65,7 +69,11 @@ const TranslateSettings: FC<{ selectOptions }) => { const { t } = useTranslation() + const { translateModelPrompt } = useSettings() + const dispatch = useAppDispatch() const [localPair, setLocalPair] = useState<[string, string]>(bidirectionalPair) + const [showPrompt, setShowPrompt] = useState(false) + const [localPrompt, setLocalPrompt] = useState(translateModelPrompt) const defaultTranslateModel = useMemo( () => (hasModel(translateModel) ? getModelUniqId(translateModel) : undefined), @@ -74,7 +82,8 @@ const TranslateSettings: FC<{ useEffect(() => { setLocalPair(bidirectionalPair) - }, [bidirectionalPair, visible]) + setLocalPrompt(translateModelPrompt) + }, [bidirectionalPair, translateModelPrompt, visible]) const handleSave = () => { if (localPair[0] === localPair[1]) { @@ -88,6 +97,8 @@ const TranslateSettings: FC<{ db.settings.put({ id: 'translate:bidirectional:pair', value: localPair }) db.settings.put({ id: 'translate:scroll:sync', value: isScrollSyncEnabled }) db.settings.put({ id: 'translate:markdown:enabled', value: enableMarkdown }) + db.settings.put({ id: 'translate:model:prompt', value: localPrompt }) + dispatch(setTranslateModelPrompt(localPrompt)) window.message.success({ content: t('message.save.success.title'), key: 'translate-settings-save' @@ -112,7 +123,14 @@ const TranslateSettings: FC<{ width={420}>
-
{t('translate.settings.model')}
+
+ {t('translate.settings.model')} + + + + + +
+ + )} +
+ +
+ +
setShowPrompt(!showPrompt)}> + {t('settings.models.translate_model_prompt_title')} + +
+ {localPrompt !== TRANSLATE_PROMPT && ( + +
+ +
+