mirror of
https://github.com/CherryHQ/cherry-studio.git
synced 2025-12-19 06:30:10 +08:00
Merge branch 'main' into v2
This commit is contained in:
commit
74ca4443d6
2
.github/workflows/auto-i18n.yml
vendored
2
.github/workflows/auto-i18n.yml
vendored
@ -23,7 +23,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: 🐈⬛ Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
||||
2
.github/workflows/claude-code-review.yml
vendored
2
.github/workflows/claude-code-review.yml
vendored
@ -27,7 +27,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
|
||||
2
.github/workflows/claude-translator.yml
vendored
2
.github/workflows/claude-translator.yml
vendored
@ -32,7 +32,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
|
||||
2
.github/workflows/claude.yml
vendored
2
.github/workflows/claude.yml
vendored
@ -37,7 +37,7 @@ jobs:
|
||||
actions: read # Required for Claude to read CI results on PRs
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
|
||||
6
.github/workflows/github-issue-tracker.yml
vendored
6
.github/workflows/github-issue-tracker.yml
vendored
@ -19,7 +19,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Check Beijing Time
|
||||
id: check_time
|
||||
@ -42,7 +42,7 @@ jobs:
|
||||
|
||||
- name: Add pending label if in quiet hours
|
||||
if: steps.check_time.outputs.should_delay == 'true'
|
||||
uses: actions/github-script@v7
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
github.rest.issues.addLabels({
|
||||
@ -118,7 +118,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
|
||||
2
.github/workflows/nightly-build.yml
vendored
2
.github/workflows/nightly-build.yml
vendored
@ -51,7 +51,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out Git repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: main
|
||||
|
||||
|
||||
2
.github/workflows/pr-ci.yml
vendored
2
.github/workflows/pr-ci.yml
vendored
@ -21,7 +21,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out Git repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v6
|
||||
|
||||
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
@ -25,7 +25,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out Git repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
||||
36
.github/workflows/update-app-upgrade-config.yml
vendored
36
.github/workflows/update-app-upgrade-config.yml
vendored
@ -19,10 +19,9 @@ on:
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
propose-update:
|
||||
update-config:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'workflow_dispatch' || (github.event_name == 'release' && github.event.release.draft == false)
|
||||
|
||||
@ -135,7 +134,7 @@ jobs:
|
||||
|
||||
- name: Checkout default branch
|
||||
if: steps.check.outputs.should_run == 'true'
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ github.event.repository.default_branch }}
|
||||
path: main
|
||||
@ -143,7 +142,7 @@ jobs:
|
||||
|
||||
- name: Checkout x-files/app-upgrade-config branch
|
||||
if: steps.check.outputs.should_run == 'true'
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: x-files/app-upgrade-config
|
||||
path: cs
|
||||
@ -187,25 +186,20 @@ jobs:
|
||||
echo "changed=true" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Create pull request
|
||||
- name: Commit and push changes
|
||||
if: steps.check.outputs.should_run == 'true' && steps.diff.outputs.changed == 'true'
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
path: cs
|
||||
base: x-files/app-upgrade-config
|
||||
branch: chore/update-app-upgrade-config/${{ steps.meta.outputs.safe_tag }}
|
||||
commit-message: "🤖 chore: sync app-upgrade-config for ${{ steps.meta.outputs.tag }}"
|
||||
title: "chore: update app-upgrade-config for ${{ steps.meta.outputs.tag }}"
|
||||
body: |
|
||||
Automated update triggered by `${{ steps.meta.outputs.trigger }}`.
|
||||
working-directory: cs
|
||||
run: |
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
git add app-upgrade-config.json
|
||||
git commit -m "chore: sync app-upgrade-config for ${{ steps.meta.outputs.tag }}" -m "Automated update triggered by \`${{ steps.meta.outputs.trigger }}\`.
|
||||
|
||||
- Source tag: `${{ steps.meta.outputs.tag }}`
|
||||
- Pre-release: `${{ steps.meta.outputs.prerelease }}`
|
||||
- Latest: `${{ steps.meta.outputs.latest }}`
|
||||
- Workflow run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
labels: |
|
||||
automation
|
||||
app-upgrade
|
||||
- Source tag: \`${{ steps.meta.outputs.tag }}\`
|
||||
- Pre-release: \`${{ steps.meta.outputs.prerelease }}\`
|
||||
- Latest: \`${{ steps.meta.outputs.latest }}\`
|
||||
- Workflow run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}"
|
||||
git push origin x-files/app-upgrade-config
|
||||
|
||||
- name: No changes detected
|
||||
if: steps.check.outputs.should_run == 'true' && steps.diff.outputs.changed != 'true'
|
||||
|
||||
@ -135,108 +135,66 @@ artifactBuildCompleted: scripts/artifact-build-completed.js
|
||||
releaseInfo:
|
||||
releaseNotes: |
|
||||
<!--LANG:en-->
|
||||
A New Era of Intelligence with Cherry Studio 1.7.1
|
||||
Cherry Studio 1.7.2 - Stability & Enhancement Update
|
||||
|
||||
Today we're releasing Cherry Studio 1.7.1 — our most ambitious update yet, introducing Agent: autonomous AI that thinks, plans, and acts.
|
||||
This release focuses on stability improvements, bug fixes, and quality-of-life enhancements.
|
||||
|
||||
For years, AI assistants have been reactive — waiting for your commands, responding to your questions. With Agent, we're changing that. Now, AI can truly work alongside you: understanding complex goals, breaking them into steps, and executing them independently.
|
||||
🔧 Improvements
|
||||
- Enhanced update dialog functionality and state management
|
||||
- Improved ImageViewer context menu UX
|
||||
- Better temperature and top_p parameter handling
|
||||
- User-configurable stream options for OpenAI API
|
||||
- Translation feature now supports document files
|
||||
|
||||
This is what we've been building toward. And it's just the beginning.
|
||||
🤖 AI & Models
|
||||
- Added explicit thinking token support for Gemini 3 Pro Image
|
||||
- Updated DeepSeek logic to match DeepSeek v3.2
|
||||
- Updated AiOnly default models
|
||||
- Updated AI model configurations to latest versions
|
||||
|
||||
🤖 Meet Agent
|
||||
Imagine having a brilliant colleague who never sleeps. Give Agent a goal — write a report, analyze data, refactor code — and watch it work. It reasons through problems, breaks them into steps, calls the right tools, and adapts when things change.
|
||||
♿ Accessibility
|
||||
- Improved screen reader (NVDA) support with aria-label attributes
|
||||
- Added Slovak language support for spell check
|
||||
|
||||
- **Think → Plan → Act**: From goal to execution, fully autonomous
|
||||
- **Deep Reasoning**: Multi-turn thinking that solves real problems
|
||||
- **Tool Mastery**: File operations, web search, code execution, and more
|
||||
- **Skill Plugins**: Extend with custom commands and capabilities
|
||||
- **You Stay in Control**: Real-time approval for sensitive actions
|
||||
- **Full Visibility**: Every thought, every decision, fully transparent
|
||||
|
||||
🌐 Expanding Ecosystem
|
||||
- **New Providers**: HuggingFace, Mistral, CherryIN, AI Gateway, Intel OVMS, Didi MCP
|
||||
- **New Models**: Claude 4.5 Haiku, DeepSeek v3.2, GLM-4.6, Doubao, Ling series
|
||||
- **MCP Integration**: Alibaba Cloud, ModelScope, Higress, MCP.so, TokenFlux and more
|
||||
|
||||
📚 Smarter Knowledge Base
|
||||
- **OpenMinerU**: Self-hosted document processing
|
||||
- **Full-Text Search**: Find anything instantly across your notes
|
||||
- **Enhanced Tool Selection**: Smarter configuration for better AI assistance
|
||||
|
||||
📝 Notes, Reimagined
|
||||
- Full-text search with highlighted results
|
||||
- AI-powered smart rename
|
||||
- Export as image
|
||||
- Auto-wrap for tables
|
||||
|
||||
🖼️ Image & OCR
|
||||
- Intel OVMS painting capabilities
|
||||
- Intel OpenVINO NPU-accelerated OCR
|
||||
|
||||
🌍 Now in 10+ Languages
|
||||
- Added German support
|
||||
- Enhanced internationalization
|
||||
|
||||
⚡ Faster & More Polished
|
||||
- Electron 38 upgrade
|
||||
- New MCP management interface
|
||||
- Dozens of UI refinements
|
||||
|
||||
❤️ Fully Open Source
|
||||
Commercial restrictions removed. Cherry Studio now follows standard AGPL v3 — free for teams of any size.
|
||||
|
||||
The Agent Era is here. We can't wait to see what you'll create.
|
||||
🐛 Bug Fixes
|
||||
- Fixed Quick Assistant shortcut registration issue
|
||||
- Fixed UI freeze on multi-file selection via batch processing
|
||||
- Fixed assistant default model update when editing model capabilities
|
||||
- Fixed provider handling and API key rotation logic
|
||||
- Fixed OVMS API URL path formation
|
||||
- Fixed custom parameters placement for Vercel AI Gateway
|
||||
- Fixed topic message blocks clearing
|
||||
- Fixed input bar blocking enter send while generating
|
||||
|
||||
<!--LANG:zh-CN-->
|
||||
Cherry Studio 1.7.1:开启智能新纪元
|
||||
Cherry Studio 1.7.2 - 稳定性与功能增强更新
|
||||
|
||||
今天,我们正式发布 Cherry Studio 1.7.1 —— 迄今最具雄心的版本,带来全新的 Agent:能够自主思考、规划和行动的 AI。
|
||||
本次更新专注于稳定性改进、问题修复和用户体验提升。
|
||||
|
||||
多年来,AI 助手一直是被动的——等待你的指令,回应你的问题。Agent 改变了这一切。现在,AI 能够真正与你并肩工作:理解复杂目标,将其拆解为步骤,并独立执行。
|
||||
🔧 功能改进
|
||||
- 增强更新对话框功能和状态管理
|
||||
- 优化图片查看器右键菜单体验
|
||||
- 改进温度和 top_p 参数处理逻辑
|
||||
- 支持用户自定义 OpenAI API 流式选项
|
||||
- 翻译功能现已支持文档文件
|
||||
|
||||
这是我们一直在构建的未来。而这,仅仅是开始。
|
||||
🤖 AI 与模型
|
||||
- 为 Gemini 3 Pro Image 添加显式思考 token 支持
|
||||
- 更新 DeepSeek 逻辑以适配 DeepSeek v3.2
|
||||
- 更新 AiOnly 默认模型
|
||||
- 更新 AI 模型配置至最新版本
|
||||
|
||||
🤖 认识 Agent
|
||||
想象一位永不疲倦的得力伙伴。给 Agent 一个目标——撰写报告、分析数据、重构代码——然后看它工作。它会推理问题、拆解步骤、调用工具,并在情况变化时灵活应对。
|
||||
♿ 无障碍支持
|
||||
- 改进屏幕阅读器 (NVDA) 支持,添加 aria-label 属性
|
||||
- 新增斯洛伐克语拼写检查支持
|
||||
|
||||
- **思考 → 规划 → 行动**:从目标到执行,全程自主
|
||||
- **深度推理**:多轮思考,解决真实问题
|
||||
- **工具大师**:文件操作、网络搜索、代码执行,样样精通
|
||||
- **技能插件**:自定义命令,无限扩展
|
||||
- **你掌控全局**:敏感操作,实时审批
|
||||
- **完全透明**:每一步思考,每一个决策,清晰可见
|
||||
|
||||
🌐 生态持续壮大
|
||||
- **新增服务商**:Hugging Face、Mistral、Perplexity、SophNet、AI Gateway、Cerebras AI
|
||||
- **新增模型**:Gemini 3、Gemini 3 Pro(支持图像预览)、GPT-5.1、Claude Opus 4.5
|
||||
- **MCP 集成**:百炼、魔搭、Higress、MCP.so、TokenFlux 等平台
|
||||
|
||||
📚 更智能的知识库
|
||||
- **OpenMinerU**:本地自部署文档处理
|
||||
- **全文搜索**:笔记内容一搜即达
|
||||
- **增强工具选择**:更智能的配置,更好的 AI 协助
|
||||
|
||||
📝 笔记,焕然一新
|
||||
- 全文搜索,结果高亮
|
||||
- AI 智能重命名
|
||||
- 导出为图片
|
||||
- 表格自动换行
|
||||
|
||||
🖼️ 图像与 OCR
|
||||
- Intel OVMS 绘图能力
|
||||
- Intel OpenVINO NPU 加速 OCR
|
||||
|
||||
🌍 支持 10+ 种语言
|
||||
- 新增德语支持
|
||||
- 全面增强国际化
|
||||
|
||||
⚡ 更快、更精致
|
||||
- 升级 Electron 38
|
||||
- 新的 MCP 管理界面
|
||||
- 数十处 UI 细节打磨
|
||||
|
||||
❤️ 完全开源
|
||||
商用限制已移除。Cherry Studio 现遵循标准 AGPL v3 协议——任意规模团队均可自由使用。
|
||||
|
||||
Agent 纪元已至。期待你的创造。
|
||||
🐛 问题修复
|
||||
- 修复快捷助手无法注册快捷键的问题
|
||||
- 修复多文件选择时 UI 冻结问题(通过批处理优化)
|
||||
- 修复编辑模型能力时助手默认模型更新问题
|
||||
- 修复服务商处理和 API 密钥轮换逻辑
|
||||
- 修复 OVMS API URL 路径格式问题
|
||||
- 修复 Vercel AI Gateway 自定义参数位置问题
|
||||
- 修复话题消息块清理问题
|
||||
- 修复生成时输入框阻止回车发送的问题
|
||||
<!--LANG:END-->
|
||||
|
||||
@ -196,6 +196,9 @@ export enum IpcChannel {
|
||||
File_ValidateNotesDirectory = 'file:validateNotesDirectory',
|
||||
File_StartWatcher = 'file:startWatcher',
|
||||
File_StopWatcher = 'file:stopWatcher',
|
||||
File_PauseWatcher = 'file:pauseWatcher',
|
||||
File_ResumeWatcher = 'file:resumeWatcher',
|
||||
File_BatchUploadMarkdown = 'file:batchUploadMarkdown',
|
||||
File_ShowInFolder = 'file:showInFolder',
|
||||
|
||||
// file service
|
||||
@ -301,6 +304,8 @@ export enum IpcChannel {
|
||||
Selection_ActionWindowClose = 'selection:action-window-close',
|
||||
Selection_ActionWindowMinimize = 'selection:action-window-minimize',
|
||||
Selection_ActionWindowPin = 'selection:action-window-pin',
|
||||
// [Windows only] Electron bug workaround - can be removed once https://github.com/electron/electron/issues/48554 is fixed
|
||||
Selection_ActionWindowResize = 'selection:action-window-resize',
|
||||
Selection_ProcessAction = 'selection:process-action',
|
||||
Selection_UpdateActionData = 'selection:update-action-data',
|
||||
|
||||
|
||||
@ -10,7 +10,7 @@ export type LoaderReturn = {
|
||||
messageSource?: 'preprocess' | 'embedding' | 'validation'
|
||||
}
|
||||
|
||||
export type FileChangeEventType = 'add' | 'change' | 'unlink' | 'addDir' | 'unlinkDir'
|
||||
export type FileChangeEventType = 'add' | 'change' | 'unlink' | 'addDir' | 'unlinkDir' | 'refresh'
|
||||
|
||||
export type FileChangeEvent = {
|
||||
eventType: FileChangeEventType
|
||||
|
||||
3
packages/shared/data/cache/cacheSchemas.ts
vendored
3
packages/shared/data/cache/cacheSchemas.ts
vendored
@ -58,7 +58,8 @@ export const DefaultUseCache: UseCacheSchema = {
|
||||
downloading: false,
|
||||
downloaded: false,
|
||||
downloadProgress: 0,
|
||||
available: false
|
||||
available: false,
|
||||
ignore: false
|
||||
},
|
||||
'app.user.avatar': '',
|
||||
|
||||
|
||||
@ -8,6 +8,7 @@ export type CacheAppUpdateState = {
|
||||
downloaded: boolean
|
||||
downloadProgress: number
|
||||
available: boolean
|
||||
ignore: boolean
|
||||
}
|
||||
|
||||
export type CacheActiveSearches = Record<string, WebSearchStatus>
|
||||
|
||||
@ -8,7 +8,7 @@ import { loggerService } from '@logger'
|
||||
import { isLinux, isMac, isPortable, isWin } from '@main/constant'
|
||||
import { generateSignature } from '@main/integration/cherryai'
|
||||
import anthropicService from '@main/services/AnthropicService'
|
||||
import { getBinaryPath, isBinaryExists, runInstallScript } from '@main/utils/process'
|
||||
import { findGitBash, getBinaryPath, isBinaryExists, runInstallScript } from '@main/utils/process'
|
||||
import { handleZoomFactor } from '@main/utils/zoom'
|
||||
import type { SpanEntity, TokenUsage } from '@mcp-trace/trace-core'
|
||||
import { MIN_WINDOW_HEIGHT, MIN_WINDOW_WIDTH } from '@shared/config/constant'
|
||||
@ -500,35 +500,17 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
}
|
||||
|
||||
try {
|
||||
// Check common Git Bash installation paths
|
||||
const commonPaths = [
|
||||
path.join(process.env.ProgramFiles || 'C:\\Program Files', 'Git', 'bin', 'bash.exe'),
|
||||
path.join(process.env['ProgramFiles(x86)'] || 'C:\\Program Files (x86)', 'Git', 'bin', 'bash.exe'),
|
||||
path.join(process.env.LOCALAPPDATA || '', 'Programs', 'Git', 'bin', 'bash.exe')
|
||||
]
|
||||
const bashPath = findGitBash()
|
||||
|
||||
// Check if any of the common paths exist
|
||||
for (const bashPath of commonPaths) {
|
||||
if (fs.existsSync(bashPath)) {
|
||||
logger.debug('Git Bash found', { path: bashPath })
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
// Check if git is in PATH
|
||||
const { execSync } = require('child_process')
|
||||
try {
|
||||
execSync('git --version', { stdio: 'ignore' })
|
||||
logger.debug('Git found in PATH')
|
||||
if (bashPath) {
|
||||
logger.info('Git Bash is available', { path: bashPath })
|
||||
return true
|
||||
} catch {
|
||||
// Git not in PATH
|
||||
}
|
||||
|
||||
logger.debug('Git Bash not found on Windows system')
|
||||
logger.warn('Git Bash not found. Please install Git for Windows from https://git-scm.com/downloads/win')
|
||||
return false
|
||||
} catch (error) {
|
||||
logger.error('Error checking Git Bash', error as Error)
|
||||
logger.error('Unexpected error checking Git Bash', error as Error)
|
||||
return false
|
||||
}
|
||||
})
|
||||
@ -596,6 +578,9 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
ipcMain.handle(IpcChannel.File_ValidateNotesDirectory, fileManager.validateNotesDirectory.bind(fileManager))
|
||||
ipcMain.handle(IpcChannel.File_StartWatcher, fileManager.startFileWatcher.bind(fileManager))
|
||||
ipcMain.handle(IpcChannel.File_StopWatcher, fileManager.stopFileWatcher.bind(fileManager))
|
||||
ipcMain.handle(IpcChannel.File_PauseWatcher, fileManager.pauseFileWatcher.bind(fileManager))
|
||||
ipcMain.handle(IpcChannel.File_ResumeWatcher, fileManager.resumeFileWatcher.bind(fileManager))
|
||||
ipcMain.handle(IpcChannel.File_BatchUploadMarkdown, fileManager.batchUploadMarkdownFiles.bind(fileManager))
|
||||
ipcMain.handle(IpcChannel.File_ShowInFolder, fileManager.showInFolder.bind(fileManager))
|
||||
|
||||
// file service
|
||||
|
||||
@ -151,6 +151,7 @@ class FileStorage {
|
||||
private currentWatchPath?: string
|
||||
private debounceTimer?: NodeJS.Timeout
|
||||
private watcherConfig: Required<FileWatcherConfig> = DEFAULT_WATCHER_CONFIG
|
||||
private isPaused = false
|
||||
|
||||
constructor() {
|
||||
this.initStorageDir()
|
||||
@ -478,13 +479,16 @@ class FileStorage {
|
||||
}
|
||||
}
|
||||
|
||||
public readFile = async (
|
||||
_: Electron.IpcMainInvokeEvent,
|
||||
id: string,
|
||||
detectEncoding: boolean = false
|
||||
): Promise<string> => {
|
||||
const filePath = path.join(this.storageDir, id)
|
||||
|
||||
/**
|
||||
* Core file reading logic that handles both documents and text files.
|
||||
*
|
||||
* @private
|
||||
* @param filePath - Full path to the file
|
||||
* @param detectEncoding - Whether to auto-detect text file encoding
|
||||
* @returns Promise resolving to the extracted text content
|
||||
* @throws Error if file reading fails
|
||||
*/
|
||||
private async readFileCore(filePath: string, detectEncoding: boolean = false): Promise<string> {
|
||||
const fileExtension = path.extname(filePath)
|
||||
|
||||
if (documentExts.includes(fileExtension)) {
|
||||
@ -504,7 +508,7 @@ class FileStorage {
|
||||
return data
|
||||
} catch (error) {
|
||||
chdir(originalCwd)
|
||||
logger.error('Failed to read file:', error as Error)
|
||||
logger.error('Failed to read document file:', error as Error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
@ -516,11 +520,72 @@ class FileStorage {
|
||||
return fs.readFileSync(filePath, 'utf-8')
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to read file:', error as Error)
|
||||
logger.error('Failed to read text file:', error as Error)
|
||||
throw new Error(`Failed to read file: ${filePath}.`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads and extracts content from a stored file.
|
||||
*
|
||||
* Supports multiple file formats including:
|
||||
* - Complex documents: .pdf, .doc, .docx, .pptx, .xlsx, .odt, .odp, .ods
|
||||
* - Text files: .txt, .md, .json, .csv, etc.
|
||||
* - Code files: .js, .ts, .py, .java, etc.
|
||||
*
|
||||
* For document formats, extracts text content using specialized parsers:
|
||||
* - .doc files: Uses word-extractor library
|
||||
* - Other Office formats: Uses officeparser library
|
||||
*
|
||||
* For text files, can optionally detect encoding automatically.
|
||||
*
|
||||
* @param _ - Electron IPC invoke event (unused)
|
||||
* @param id - File identifier with extension (e.g., "uuid.docx")
|
||||
* @param detectEncoding - Whether to auto-detect text file encoding (default: false)
|
||||
* @returns Promise resolving to the extracted text content of the file
|
||||
* @throws Error if file reading fails or file is not found
|
||||
*
|
||||
* @example
|
||||
* // Read a DOCX file
|
||||
* const content = await readFile(event, "document.docx");
|
||||
*
|
||||
* @example
|
||||
* // Read a text file with encoding detection
|
||||
* const content = await readFile(event, "text.txt", true);
|
||||
*
|
||||
* @example
|
||||
* // Read a PDF file
|
||||
* const content = await readFile(event, "manual.pdf");
|
||||
*/
|
||||
public readFile = async (
|
||||
_: Electron.IpcMainInvokeEvent,
|
||||
id: string,
|
||||
detectEncoding: boolean = false
|
||||
): Promise<string> => {
|
||||
const filePath = path.join(this.storageDir, id)
|
||||
return this.readFileCore(filePath, detectEncoding)
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads and extracts content from an external file path.
|
||||
*
|
||||
* Similar to readFile, but operates on external file paths instead of stored files.
|
||||
* Supports the same file formats including complex documents and text files.
|
||||
*
|
||||
* @param _ - Electron IPC invoke event (unused)
|
||||
* @param filePath - Absolute path to the external file
|
||||
* @param detectEncoding - Whether to auto-detect text file encoding (default: false)
|
||||
* @returns Promise resolving to the extracted text content of the file
|
||||
* @throws Error if file does not exist or reading fails
|
||||
*
|
||||
* @example
|
||||
* // Read an external DOCX file
|
||||
* const content = await readExternalFile(event, "/path/to/document.docx");
|
||||
*
|
||||
* @example
|
||||
* // Read an external text file with encoding detection
|
||||
* const content = await readExternalFile(event, "/path/to/text.txt", true);
|
||||
*/
|
||||
public readExternalFile = async (
|
||||
_: Electron.IpcMainInvokeEvent,
|
||||
filePath: string,
|
||||
@ -530,40 +595,7 @@ class FileStorage {
|
||||
throw new Error(`File does not exist: ${filePath}`)
|
||||
}
|
||||
|
||||
const fileExtension = path.extname(filePath)
|
||||
|
||||
if (documentExts.includes(fileExtension)) {
|
||||
const originalCwd = process.cwd()
|
||||
try {
|
||||
chdir(this.tempDir)
|
||||
|
||||
if (fileExtension === '.doc') {
|
||||
const extractor = new WordExtractor()
|
||||
const extracted = await extractor.extract(filePath)
|
||||
chdir(originalCwd)
|
||||
return extracted.getBody()
|
||||
}
|
||||
|
||||
const data = await officeParser.parseOfficeAsync(filePath)
|
||||
chdir(originalCwd)
|
||||
return data
|
||||
} catch (error) {
|
||||
chdir(originalCwd)
|
||||
logger.error('Failed to read file:', error as Error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
if (detectEncoding) {
|
||||
return readTextFileWithAutoEncoding(filePath)
|
||||
} else {
|
||||
return fs.readFileSync(filePath, 'utf-8')
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to read file:', error as Error)
|
||||
throw new Error(`Failed to read file: ${filePath}.`)
|
||||
}
|
||||
return this.readFileCore(filePath, detectEncoding)
|
||||
}
|
||||
|
||||
public createTempFile = async (_: Electron.IpcMainInvokeEvent, fileName: string): Promise<string> => {
|
||||
@ -1448,6 +1480,12 @@ class FileStorage {
|
||||
|
||||
private createChangeHandler() {
|
||||
return (eventType: string, filePath: string) => {
|
||||
// Skip processing if watcher is paused
|
||||
if (this.isPaused) {
|
||||
logger.debug('File change ignored (watcher paused)', { eventType, filePath })
|
||||
return
|
||||
}
|
||||
|
||||
if (!this.shouldWatchFile(filePath, eventType)) {
|
||||
return
|
||||
}
|
||||
@ -1605,6 +1643,165 @@ class FileStorage {
|
||||
logger.error('Failed to show item in folder:', error as Error)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch upload markdown files from native File objects
|
||||
* This handles all I/O operations in the Main process to avoid blocking Renderer
|
||||
*/
|
||||
public batchUploadMarkdownFiles = async (
|
||||
_: Electron.IpcMainInvokeEvent,
|
||||
filePaths: string[],
|
||||
targetPath: string
|
||||
): Promise<{
|
||||
fileCount: number
|
||||
folderCount: number
|
||||
skippedFiles: number
|
||||
}> => {
|
||||
try {
|
||||
logger.info('Starting batch upload', { fileCount: filePaths.length, targetPath })
|
||||
|
||||
const basePath = path.resolve(targetPath)
|
||||
const MARKDOWN_EXTS = ['.md', '.markdown']
|
||||
|
||||
// Filter markdown files
|
||||
const markdownFiles = filePaths.filter((filePath) => {
|
||||
const ext = path.extname(filePath).toLowerCase()
|
||||
return MARKDOWN_EXTS.includes(ext)
|
||||
})
|
||||
|
||||
const skippedFiles = filePaths.length - markdownFiles.length
|
||||
|
||||
if (markdownFiles.length === 0) {
|
||||
return { fileCount: 0, folderCount: 0, skippedFiles }
|
||||
}
|
||||
|
||||
// Collect unique folders needed
|
||||
const foldersSet = new Set<string>()
|
||||
const fileOperations: Array<{ sourcePath: string; targetPath: string }> = []
|
||||
|
||||
for (const filePath of markdownFiles) {
|
||||
try {
|
||||
// Get relative path if file is from a directory upload
|
||||
const fileName = path.basename(filePath)
|
||||
const relativePath = path.dirname(filePath)
|
||||
|
||||
// Determine target directory structure
|
||||
let targetDir = basePath
|
||||
const folderParts: string[] = []
|
||||
|
||||
// Extract folder structure from file path for nested uploads
|
||||
// This is a simplified version - in real scenario we'd need the original directory structure
|
||||
if (relativePath && relativePath !== '.') {
|
||||
const parts = relativePath.split(path.sep)
|
||||
// Get the last few parts that represent the folder structure within upload
|
||||
const relevantParts = parts.slice(Math.max(0, parts.length - 3))
|
||||
folderParts.push(...relevantParts)
|
||||
}
|
||||
|
||||
// Build target directory path
|
||||
for (const part of folderParts) {
|
||||
targetDir = path.join(targetDir, part)
|
||||
foldersSet.add(targetDir)
|
||||
}
|
||||
|
||||
// Determine final file name
|
||||
const nameWithoutExt = fileName.endsWith('.md')
|
||||
? fileName.slice(0, -3)
|
||||
: fileName.endsWith('.markdown')
|
||||
? fileName.slice(0, -9)
|
||||
: fileName
|
||||
|
||||
const { safeName } = await this.fileNameGuard(_, targetDir, nameWithoutExt, true)
|
||||
const finalPath = path.join(targetDir, safeName + '.md')
|
||||
|
||||
fileOperations.push({ sourcePath: filePath, targetPath: finalPath })
|
||||
} catch (error) {
|
||||
logger.error('Failed to prepare file operation:', error as Error, { filePath })
|
||||
}
|
||||
}
|
||||
|
||||
// Create folders in order (shallow to deep)
|
||||
const sortedFolders = Array.from(foldersSet).sort((a, b) => a.length - b.length)
|
||||
for (const folder of sortedFolders) {
|
||||
try {
|
||||
if (!fs.existsSync(folder)) {
|
||||
await fs.promises.mkdir(folder, { recursive: true })
|
||||
}
|
||||
} catch (error) {
|
||||
logger.debug('Folder already exists or creation failed', { folder, error: (error as Error).message })
|
||||
}
|
||||
}
|
||||
|
||||
// Process files in batches
|
||||
const BATCH_SIZE = 10 // Higher batch size since we're in Main process
|
||||
let successCount = 0
|
||||
|
||||
for (let i = 0; i < fileOperations.length; i += BATCH_SIZE) {
|
||||
const batch = fileOperations.slice(i, i + BATCH_SIZE)
|
||||
|
||||
const results = await Promise.allSettled(
|
||||
batch.map(async (op) => {
|
||||
// Read from source and write to target in Main process
|
||||
const content = await fs.promises.readFile(op.sourcePath, 'utf-8')
|
||||
await fs.promises.writeFile(op.targetPath, content, 'utf-8')
|
||||
return true
|
||||
})
|
||||
)
|
||||
|
||||
results.forEach((result, index) => {
|
||||
if (result.status === 'fulfilled') {
|
||||
successCount++
|
||||
} else {
|
||||
logger.error('Failed to upload file:', result.reason, {
|
||||
file: batch[index].sourcePath
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
logger.info('Batch upload completed', {
|
||||
successCount,
|
||||
folderCount: foldersSet.size,
|
||||
skippedFiles
|
||||
})
|
||||
|
||||
return {
|
||||
fileCount: successCount,
|
||||
folderCount: foldersSet.size,
|
||||
skippedFiles
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Batch upload failed:', error as Error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Pause file watcher to prevent events during batch operations
|
||||
*/
|
||||
public pauseFileWatcher = async (): Promise<void> => {
|
||||
if (this.watcher) {
|
||||
logger.debug('Pausing file watcher')
|
||||
this.isPaused = true
|
||||
// Clear any pending debounced notifications
|
||||
if (this.debounceTimer) {
|
||||
clearTimeout(this.debounceTimer)
|
||||
this.debounceTimer = undefined
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resume file watcher and trigger a refresh
|
||||
*/
|
||||
public resumeFileWatcher = async (): Promise<void> => {
|
||||
if (this.watcher && this.currentWatchPath) {
|
||||
logger.debug('Resuming file watcher')
|
||||
this.isPaused = false
|
||||
// Send a synthetic refresh event to trigger tree reload
|
||||
this.notifyChange('refresh', this.currentWatchPath)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const fileStorage = new FileStorage()
|
||||
|
||||
@ -1398,6 +1398,50 @@ export class SelectionService {
|
||||
actionWindow.setAlwaysOnTop(isPinned)
|
||||
}
|
||||
|
||||
/**
|
||||
* [Windows only] Manual window resize handler
|
||||
*
|
||||
* ELECTRON BUG WORKAROUND:
|
||||
* In Electron, when using `frame: false` + `transparent: true`, the native window
|
||||
* resize functionality is broken on Windows. This is a known Electron bug.
|
||||
* See: https://github.com/electron/electron/issues/48554
|
||||
*
|
||||
* This method can be removed once the Electron bug is fixed.
|
||||
*/
|
||||
public resizeActionWindow(actionWindow: BrowserWindow, deltaX: number, deltaY: number, direction: string): void {
|
||||
const bounds = actionWindow.getBounds()
|
||||
const minWidth = 300
|
||||
const minHeight = 200
|
||||
|
||||
let { x, y, width, height } = bounds
|
||||
|
||||
// Handle horizontal resize
|
||||
if (direction.includes('e')) {
|
||||
width = Math.max(minWidth, width + deltaX)
|
||||
}
|
||||
if (direction.includes('w')) {
|
||||
const newWidth = Math.max(minWidth, width - deltaX)
|
||||
if (newWidth !== width) {
|
||||
x = x + (width - newWidth)
|
||||
width = newWidth
|
||||
}
|
||||
}
|
||||
|
||||
// Handle vertical resize
|
||||
if (direction.includes('s')) {
|
||||
height = Math.max(minHeight, height + deltaY)
|
||||
}
|
||||
if (direction.includes('n')) {
|
||||
const newHeight = Math.max(minHeight, height - deltaY)
|
||||
if (newHeight !== height) {
|
||||
y = y + (height - newHeight)
|
||||
height = newHeight
|
||||
}
|
||||
}
|
||||
|
||||
actionWindow.setBounds({ x, y, width, height })
|
||||
}
|
||||
|
||||
/**
|
||||
* Update trigger mode behavior
|
||||
* Switches between selection-based and alt-key based triggering
|
||||
@ -1494,6 +1538,18 @@ export class SelectionService {
|
||||
}
|
||||
})
|
||||
|
||||
// [Windows only] Electron bug workaround - can be removed once fixed
|
||||
// See: https://github.com/electron/electron/issues/48554
|
||||
ipcMain.handle(
|
||||
IpcChannel.Selection_ActionWindowResize,
|
||||
(event, deltaX: number, deltaY: number, direction: string) => {
|
||||
const actionWindow = BrowserWindow.fromWebContents(event.sender)
|
||||
if (actionWindow) {
|
||||
selectionService?.resizeActionWindow(actionWindow, deltaX, deltaY, direction)
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
this.isIpcHandlerRegistered = true
|
||||
}
|
||||
|
||||
|
||||
@ -35,6 +35,15 @@ function getShortcutHandler(shortcut: Shortcut) {
|
||||
}
|
||||
case 'mini_window':
|
||||
return () => {
|
||||
// 在处理器内部检查QuickAssistant状态,而不是在注册时检查
|
||||
const quickAssistantEnabled = preferenceService.get('feature.quick_assistant.enabled')
|
||||
logger.info(`mini_window shortcut triggered, QuickAssistant enabled: ${quickAssistantEnabled}`)
|
||||
|
||||
if (!quickAssistantEnabled) {
|
||||
logger.warn('QuickAssistant is disabled, ignoring mini_window shortcut trigger')
|
||||
return
|
||||
}
|
||||
|
||||
windowService.toggleMiniWindow()
|
||||
}
|
||||
case 'selection_assistant_toggle':
|
||||
@ -190,11 +199,10 @@ export function registerShortcuts(window: BrowserWindow) {
|
||||
break
|
||||
|
||||
case 'mini_window':
|
||||
//available only when QuickAssistant enabled
|
||||
if (!preferenceService.get('feature.quick_assistant.enabled')) {
|
||||
return
|
||||
}
|
||||
// 移除注册时的条件检查,在处理器内部进行检查
|
||||
logger.info(`Processing mini_window shortcut, enabled: ${shortcut.enabled}`)
|
||||
showMiniWindowAccelerator = formatShortcutKey(shortcut.shortcut)
|
||||
logger.debug(`Mini window accelerator set to: ${showMiniWindowAccelerator}`)
|
||||
break
|
||||
|
||||
case 'selection_assistant_toggle':
|
||||
|
||||
@ -271,9 +271,9 @@ export class WindowService {
|
||||
'https://account.siliconflow.cn/oauth',
|
||||
'https://cloud.siliconflow.cn/bills',
|
||||
'https://cloud.siliconflow.cn/expensebill',
|
||||
'https://aihubmix.com/token',
|
||||
'https://aihubmix.com/topup',
|
||||
'https://aihubmix.com/statistics',
|
||||
'https://console.aihubmix.com/token',
|
||||
'https://console.aihubmix.com/topup',
|
||||
'https://console.aihubmix.com/statistics',
|
||||
'https://dash.302.ai/sso/login',
|
||||
'https://dash.302.ai/charge',
|
||||
'https://www.aiionly.com/login'
|
||||
|
||||
@ -76,7 +76,7 @@ export abstract class BaseService {
|
||||
* Get database instance
|
||||
* Automatically waits for initialization to complete
|
||||
*/
|
||||
protected async getDatabase() {
|
||||
public async getDatabase() {
|
||||
const dbManager = await DatabaseManager.getInstance()
|
||||
return dbManager.getDatabase()
|
||||
}
|
||||
|
||||
572
src/main/utils/__tests__/process.test.ts
Normal file
572
src/main/utils/__tests__/process.test.ts
Normal file
@ -0,0 +1,572 @@
|
||||
import { execFileSync } from 'child_process'
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { findExecutable, findGitBash } from '../process'
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('child_process')
|
||||
vi.mock('fs')
|
||||
vi.mock('path')
|
||||
|
||||
// These tests only run on Windows since the functions have platform guards
|
||||
describe.skipIf(process.platform !== 'win32')('process utilities', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
|
||||
// Mock path.join to concatenate paths with backslashes (Windows-style)
|
||||
vi.mocked(path.join).mockImplementation((...args) => args.join('\\'))
|
||||
|
||||
// Mock path.resolve to handle path resolution with .. support
|
||||
vi.mocked(path.resolve).mockImplementation((...args) => {
|
||||
let result = args.join('\\')
|
||||
|
||||
// Handle .. navigation
|
||||
while (result.includes('\\..')) {
|
||||
result = result.replace(/\\[^\\]+\\\.\./g, '')
|
||||
}
|
||||
|
||||
// Ensure absolute path
|
||||
if (!result.match(/^[A-Z]:/)) {
|
||||
result = `C:\\cwd\\${result}`
|
||||
}
|
||||
|
||||
return result
|
||||
})
|
||||
|
||||
// Mock path.dirname
|
||||
vi.mocked(path.dirname).mockImplementation((p) => {
|
||||
const parts = p.split('\\')
|
||||
parts.pop()
|
||||
return parts.join('\\')
|
||||
})
|
||||
|
||||
// Mock path.sep
|
||||
Object.defineProperty(path, 'sep', { value: '\\', writable: true })
|
||||
|
||||
// Mock process.cwd()
|
||||
vi.spyOn(process, 'cwd').mockReturnValue('C:\\cwd')
|
||||
})
|
||||
|
||||
describe('findExecutable', () => {
|
||||
describe('git common paths', () => {
|
||||
it('should find git at Program Files path', () => {
|
||||
const gitPath = 'C:\\Program Files\\Git\\cmd\\git.exe'
|
||||
process.env.ProgramFiles = 'C:\\Program Files'
|
||||
|
||||
vi.mocked(fs.existsSync).mockImplementation((p) => p === gitPath)
|
||||
|
||||
const result = findExecutable('git')
|
||||
|
||||
expect(result).toBe(gitPath)
|
||||
expect(fs.existsSync).toHaveBeenCalledWith(gitPath)
|
||||
})
|
||||
|
||||
it('should find git at Program Files (x86) path', () => {
|
||||
const gitPath = 'C:\\Program Files (x86)\\Git\\cmd\\git.exe'
|
||||
process.env['ProgramFiles(x86)'] = 'C:\\Program Files (x86)'
|
||||
|
||||
vi.mocked(fs.existsSync).mockImplementation((p) => p === gitPath)
|
||||
|
||||
const result = findExecutable('git')
|
||||
|
||||
expect(result).toBe(gitPath)
|
||||
expect(fs.existsSync).toHaveBeenCalledWith(gitPath)
|
||||
})
|
||||
|
||||
it('should use fallback paths when environment variables are not set', () => {
|
||||
delete process.env.ProgramFiles
|
||||
delete process.env['ProgramFiles(x86)']
|
||||
|
||||
const gitPath = 'C:\\Program Files\\Git\\cmd\\git.exe'
|
||||
vi.mocked(fs.existsSync).mockImplementation((p) => p === gitPath)
|
||||
|
||||
const result = findExecutable('git')
|
||||
|
||||
expect(result).toBe(gitPath)
|
||||
})
|
||||
})
|
||||
|
||||
describe('where.exe PATH lookup', () => {
|
||||
beforeEach(() => {
|
||||
Object.defineProperty(process, 'platform', { value: 'win32', writable: true })
|
||||
// Common paths don't exist
|
||||
vi.mocked(fs.existsSync).mockReturnValue(false)
|
||||
})
|
||||
|
||||
it('should find executable via where.exe', () => {
|
||||
const gitPath = 'C:\\Git\\bin\\git.exe'
|
||||
|
||||
vi.mocked(execFileSync).mockReturnValue(gitPath)
|
||||
|
||||
const result = findExecutable('git')
|
||||
|
||||
expect(result).toBe(gitPath)
|
||||
expect(execFileSync).toHaveBeenCalledWith('where.exe', ['git.exe'], {
|
||||
encoding: 'utf8',
|
||||
stdio: ['pipe', 'pipe', 'pipe']
|
||||
})
|
||||
})
|
||||
|
||||
it('should add .exe extension when calling where.exe', () => {
|
||||
vi.mocked(execFileSync).mockImplementation(() => {
|
||||
throw new Error('Not found')
|
||||
})
|
||||
|
||||
findExecutable('node')
|
||||
|
||||
expect(execFileSync).toHaveBeenCalledWith('where.exe', ['node.exe'], expect.any(Object))
|
||||
})
|
||||
|
||||
it('should handle Windows line endings (CRLF)', () => {
|
||||
const gitPath1 = 'C:\\Git\\bin\\git.exe'
|
||||
const gitPath2 = 'C:\\Program Files\\Git\\cmd\\git.exe'
|
||||
|
||||
vi.mocked(execFileSync).mockReturnValue(`${gitPath1}\r\n${gitPath2}\r\n`)
|
||||
|
||||
const result = findExecutable('git')
|
||||
|
||||
// Should return the first valid path
|
||||
expect(result).toBe(gitPath1)
|
||||
})
|
||||
|
||||
it('should handle Unix line endings (LF)', () => {
|
||||
const gitPath1 = 'C:\\Git\\bin\\git.exe'
|
||||
const gitPath2 = 'C:\\Program Files\\Git\\cmd\\git.exe'
|
||||
|
||||
vi.mocked(execFileSync).mockReturnValue(`${gitPath1}\n${gitPath2}\n`)
|
||||
|
||||
const result = findExecutable('git')
|
||||
|
||||
expect(result).toBe(gitPath1)
|
||||
})
|
||||
|
||||
it('should handle mixed line endings', () => {
|
||||
const gitPath1 = 'C:\\Git\\bin\\git.exe'
|
||||
const gitPath2 = 'C:\\Program Files\\Git\\cmd\\git.exe'
|
||||
|
||||
vi.mocked(execFileSync).mockReturnValue(`${gitPath1}\r\n${gitPath2}\n`)
|
||||
|
||||
const result = findExecutable('git')
|
||||
|
||||
expect(result).toBe(gitPath1)
|
||||
})
|
||||
|
||||
it('should trim whitespace from paths', () => {
|
||||
const gitPath = 'C:\\Git\\bin\\git.exe'
|
||||
|
||||
vi.mocked(execFileSync).mockReturnValue(` ${gitPath} \n`)
|
||||
|
||||
const result = findExecutable('git')
|
||||
|
||||
expect(result).toBe(gitPath)
|
||||
})
|
||||
|
||||
it('should filter empty lines', () => {
|
||||
const gitPath = 'C:\\Git\\bin\\git.exe'
|
||||
|
||||
vi.mocked(execFileSync).mockReturnValue(`\n\n${gitPath}\n\n`)
|
||||
|
||||
const result = findExecutable('git')
|
||||
|
||||
expect(result).toBe(gitPath)
|
||||
})
|
||||
})
|
||||
|
||||
describe('security checks', () => {
|
||||
beforeEach(() => {
|
||||
Object.defineProperty(process, 'platform', { value: 'win32', writable: true })
|
||||
vi.mocked(fs.existsSync).mockReturnValue(false)
|
||||
})
|
||||
|
||||
it('should skip executables in current directory', () => {
|
||||
const maliciousPath = 'C:\\cwd\\git.exe'
|
||||
const safePath = 'C:\\Git\\bin\\git.exe'
|
||||
|
||||
vi.mocked(execFileSync).mockReturnValue(`${maliciousPath}\n${safePath}`)
|
||||
|
||||
vi.mocked(path.resolve).mockImplementation((p) => {
|
||||
if (p.includes('cwd\\git.exe')) return 'c:\\cwd\\git.exe'
|
||||
return 'c:\\git\\bin\\git.exe'
|
||||
})
|
||||
|
||||
vi.mocked(path.dirname).mockImplementation((p) => {
|
||||
if (p.includes('cwd\\git.exe')) return 'c:\\cwd'
|
||||
return 'c:\\git\\bin'
|
||||
})
|
||||
|
||||
const result = findExecutable('git')
|
||||
|
||||
// Should skip malicious path and return safe path
|
||||
expect(result).toBe(safePath)
|
||||
})
|
||||
|
||||
it('should skip executables in current directory subdirectories', () => {
|
||||
const maliciousPath = 'C:\\cwd\\subdir\\git.exe'
|
||||
const safePath = 'C:\\Git\\bin\\git.exe'
|
||||
|
||||
vi.mocked(execFileSync).mockReturnValue(`${maliciousPath}\n${safePath}`)
|
||||
|
||||
vi.mocked(path.resolve).mockImplementation((p) => {
|
||||
if (p.includes('cwd\\subdir')) return 'c:\\cwd\\subdir\\git.exe'
|
||||
return 'c:\\git\\bin\\git.exe'
|
||||
})
|
||||
|
||||
vi.mocked(path.dirname).mockImplementation((p) => {
|
||||
if (p.includes('cwd\\subdir')) return 'c:\\cwd\\subdir'
|
||||
return 'c:\\git\\bin'
|
||||
})
|
||||
|
||||
const result = findExecutable('git')
|
||||
|
||||
expect(result).toBe(safePath)
|
||||
})
|
||||
|
||||
it('should return null when only malicious executables are found', () => {
|
||||
const maliciousPath = 'C:\\cwd\\git.exe'
|
||||
|
||||
vi.mocked(execFileSync).mockReturnValue(maliciousPath)
|
||||
|
||||
vi.mocked(path.resolve).mockReturnValue('c:\\cwd\\git.exe')
|
||||
vi.mocked(path.dirname).mockReturnValue('c:\\cwd')
|
||||
|
||||
const result = findExecutable('git')
|
||||
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
describe('error handling', () => {
|
||||
beforeEach(() => {
|
||||
Object.defineProperty(process, 'platform', { value: 'win32', writable: true })
|
||||
vi.mocked(fs.existsSync).mockReturnValue(false)
|
||||
})
|
||||
|
||||
it('should return null when where.exe fails', () => {
|
||||
vi.mocked(execFileSync).mockImplementation(() => {
|
||||
throw new Error('Command failed')
|
||||
})
|
||||
|
||||
const result = findExecutable('nonexistent')
|
||||
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
|
||||
it('should return null when where.exe returns empty output', () => {
|
||||
vi.mocked(execFileSync).mockReturnValue('')
|
||||
|
||||
const result = findExecutable('git')
|
||||
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
|
||||
it('should return null when where.exe returns only whitespace', () => {
|
||||
vi.mocked(execFileSync).mockReturnValue(' \n\n ')
|
||||
|
||||
const result = findExecutable('git')
|
||||
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
describe('non-git executables', () => {
|
||||
beforeEach(() => {
|
||||
Object.defineProperty(process, 'platform', { value: 'win32', writable: true })
|
||||
})
|
||||
|
||||
it('should skip common paths check for non-git executables', () => {
|
||||
const nodePath = 'C:\\Program Files\\nodejs\\node.exe'
|
||||
|
||||
vi.mocked(execFileSync).mockReturnValue(nodePath)
|
||||
|
||||
const result = findExecutable('node')
|
||||
|
||||
expect(result).toBe(nodePath)
|
||||
// Should not check common Git paths
|
||||
expect(fs.existsSync).not.toHaveBeenCalledWith(expect.stringContaining('Git\\cmd\\node.exe'))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('findGitBash', () => {
|
||||
describe('git.exe path derivation', () => {
|
||||
it('should derive bash.exe from standard Git installation (Git/cmd/git.exe)', () => {
|
||||
const gitPath = 'C:\\Program Files\\Git\\cmd\\git.exe'
|
||||
const bashPath = 'C:\\Program Files\\Git\\bin\\bash.exe'
|
||||
|
||||
// findExecutable will find git at common path
|
||||
process.env.ProgramFiles = 'C:\\Program Files'
|
||||
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
||||
return p === gitPath || p === bashPath
|
||||
})
|
||||
|
||||
const result = findGitBash()
|
||||
|
||||
expect(result).toBe(bashPath)
|
||||
})
|
||||
|
||||
it('should derive bash.exe from portable Git installation (Git/bin/git.exe)', () => {
|
||||
const gitPath = 'C:\\PortableGit\\bin\\git.exe'
|
||||
const bashPath = 'C:\\PortableGit\\bin\\bash.exe'
|
||||
|
||||
// Mock: common git paths don't exist, but where.exe finds portable git
|
||||
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
||||
const pathStr = p?.toString() || ''
|
||||
// Common git paths don't exist
|
||||
if (pathStr.includes('Program Files\\Git\\cmd\\git.exe')) return false
|
||||
if (pathStr.includes('Program Files (x86)\\Git\\cmd\\git.exe')) return false
|
||||
// Portable bash.exe exists at Git/bin/bash.exe (second path in possibleBashPaths)
|
||||
if (pathStr === bashPath) return true
|
||||
return false
|
||||
})
|
||||
|
||||
// where.exe returns portable git path
|
||||
vi.mocked(execFileSync).mockReturnValue(gitPath)
|
||||
|
||||
const result = findGitBash()
|
||||
|
||||
expect(result).toBe(bashPath)
|
||||
})
|
||||
|
||||
it('should derive bash.exe from MSYS2 Git installation (Git/usr/bin/bash.exe)', () => {
|
||||
const gitPath = 'C:\\msys64\\usr\\bin\\git.exe'
|
||||
const bashPath = 'C:\\msys64\\usr\\bin\\bash.exe'
|
||||
|
||||
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
||||
const pathStr = p?.toString() || ''
|
||||
// Common git paths don't exist
|
||||
if (pathStr.includes('Program Files\\Git\\cmd\\git.exe')) return false
|
||||
if (pathStr.includes('Program Files (x86)\\Git\\cmd\\git.exe')) return false
|
||||
// MSYS2 bash.exe exists at usr/bin/bash.exe (third path in possibleBashPaths)
|
||||
if (pathStr === bashPath) return true
|
||||
return false
|
||||
})
|
||||
|
||||
vi.mocked(execFileSync).mockReturnValue(gitPath)
|
||||
|
||||
const result = findGitBash()
|
||||
|
||||
expect(result).toBe(bashPath)
|
||||
})
|
||||
|
||||
it('should try multiple bash.exe locations in order', () => {
|
||||
const gitPath = 'C:\\Git\\cmd\\git.exe'
|
||||
const bashPath = 'C:\\Git\\bin\\bash.exe'
|
||||
|
||||
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
||||
const pathStr = p?.toString() || ''
|
||||
// Common git paths don't exist
|
||||
if (pathStr.includes('Program Files\\Git\\cmd\\git.exe')) return false
|
||||
if (pathStr.includes('Program Files (x86)\\Git\\cmd\\git.exe')) return false
|
||||
// Standard path exists (first in possibleBashPaths)
|
||||
if (pathStr === bashPath) return true
|
||||
return false
|
||||
})
|
||||
|
||||
vi.mocked(execFileSync).mockReturnValue(gitPath)
|
||||
|
||||
const result = findGitBash()
|
||||
|
||||
expect(result).toBe(bashPath)
|
||||
})
|
||||
|
||||
it('should handle when git.exe is found but bash.exe is not at any derived location', () => {
|
||||
const gitPath = 'C:\\Git\\cmd\\git.exe'
|
||||
|
||||
// git.exe exists via where.exe, but bash.exe doesn't exist at any derived location
|
||||
vi.mocked(fs.existsSync).mockImplementation(() => {
|
||||
// Only return false for all bash.exe checks
|
||||
return false
|
||||
})
|
||||
|
||||
vi.mocked(execFileSync).mockReturnValue(gitPath)
|
||||
|
||||
const result = findGitBash()
|
||||
|
||||
// Should fall back to common paths check
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
describe('common paths fallback', () => {
|
||||
beforeEach(() => {
|
||||
// git.exe not found
|
||||
vi.mocked(execFileSync).mockImplementation(() => {
|
||||
throw new Error('Not found')
|
||||
})
|
||||
})
|
||||
|
||||
it('should check Program Files path', () => {
|
||||
const bashPath = 'C:\\Program Files\\Git\\bin\\bash.exe'
|
||||
process.env.ProgramFiles = 'C:\\Program Files'
|
||||
|
||||
vi.mocked(fs.existsSync).mockImplementation((p) => p === bashPath)
|
||||
|
||||
const result = findGitBash()
|
||||
|
||||
expect(result).toBe(bashPath)
|
||||
})
|
||||
|
||||
it('should check Program Files (x86) path', () => {
|
||||
const bashPath = 'C:\\Program Files (x86)\\Git\\bin\\bash.exe'
|
||||
process.env['ProgramFiles(x86)'] = 'C:\\Program Files (x86)'
|
||||
|
||||
vi.mocked(fs.existsSync).mockImplementation((p) => p === bashPath)
|
||||
|
||||
const result = findGitBash()
|
||||
|
||||
expect(result).toBe(bashPath)
|
||||
})
|
||||
|
||||
it('should check LOCALAPPDATA path', () => {
|
||||
const bashPath = 'C:\\Users\\User\\AppData\\Local\\Programs\\Git\\bin\\bash.exe'
|
||||
process.env.LOCALAPPDATA = 'C:\\Users\\User\\AppData\\Local'
|
||||
|
||||
vi.mocked(fs.existsSync).mockImplementation((p) => p === bashPath)
|
||||
|
||||
const result = findGitBash()
|
||||
|
||||
expect(result).toBe(bashPath)
|
||||
})
|
||||
|
||||
it('should skip LOCALAPPDATA check when environment variable is not set', () => {
|
||||
delete process.env.LOCALAPPDATA
|
||||
|
||||
vi.mocked(fs.existsSync).mockReturnValue(false)
|
||||
|
||||
const result = findGitBash()
|
||||
|
||||
expect(result).toBeNull()
|
||||
// Should not check invalid path with empty LOCALAPPDATA
|
||||
expect(fs.existsSync).not.toHaveBeenCalledWith(expect.stringContaining('undefined'))
|
||||
})
|
||||
|
||||
it('should use fallback values when environment variables are not set', () => {
|
||||
delete process.env.ProgramFiles
|
||||
delete process.env['ProgramFiles(x86)']
|
||||
|
||||
const bashPath = 'C:\\Program Files\\Git\\bin\\bash.exe'
|
||||
vi.mocked(fs.existsSync).mockImplementation((p) => p === bashPath)
|
||||
|
||||
const result = findGitBash()
|
||||
|
||||
expect(result).toBe(bashPath)
|
||||
})
|
||||
})
|
||||
|
||||
describe('priority order', () => {
|
||||
it('should prioritize git.exe derivation over common paths', () => {
|
||||
const gitPath = 'C:\\CustomPath\\Git\\cmd\\git.exe'
|
||||
const derivedBashPath = 'C:\\CustomPath\\Git\\bin\\bash.exe'
|
||||
const commonBashPath = 'C:\\Program Files\\Git\\bin\\bash.exe'
|
||||
|
||||
// Both exist
|
||||
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
||||
const pathStr = p?.toString() || ''
|
||||
// Common git paths don't exist (so findExecutable uses where.exe)
|
||||
if (pathStr.includes('Program Files\\Git\\cmd\\git.exe')) return false
|
||||
if (pathStr.includes('Program Files (x86)\\Git\\cmd\\git.exe')) return false
|
||||
// Both bash paths exist, but derived should be checked first
|
||||
if (pathStr === derivedBashPath) return true
|
||||
if (pathStr === commonBashPath) return true
|
||||
return false
|
||||
})
|
||||
|
||||
vi.mocked(execFileSync).mockReturnValue(gitPath)
|
||||
|
||||
const result = findGitBash()
|
||||
|
||||
// Should return derived path, not common path
|
||||
expect(result).toBe(derivedBashPath)
|
||||
})
|
||||
})
|
||||
|
||||
describe('error scenarios', () => {
|
||||
it('should return null when Git is not installed anywhere', () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(false)
|
||||
vi.mocked(execFileSync).mockImplementation(() => {
|
||||
throw new Error('Not found')
|
||||
})
|
||||
|
||||
const result = findGitBash()
|
||||
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
|
||||
it('should return null when git.exe exists but bash.exe does not', () => {
|
||||
const gitPath = 'C:\\Git\\cmd\\git.exe'
|
||||
|
||||
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
||||
// git.exe exists, but no bash.exe anywhere
|
||||
return p === gitPath
|
||||
})
|
||||
|
||||
vi.mocked(execFileSync).mockReturnValue(gitPath)
|
||||
|
||||
const result = findGitBash()
|
||||
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
describe('real-world scenarios', () => {
|
||||
it('should handle official Git for Windows installer', () => {
|
||||
const gitPath = 'C:\\Program Files\\Git\\cmd\\git.exe'
|
||||
const bashPath = 'C:\\Program Files\\Git\\bin\\bash.exe'
|
||||
|
||||
process.env.ProgramFiles = 'C:\\Program Files'
|
||||
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
||||
return p === gitPath || p === bashPath
|
||||
})
|
||||
|
||||
const result = findGitBash()
|
||||
|
||||
expect(result).toBe(bashPath)
|
||||
})
|
||||
|
||||
it('should handle portable Git installation in custom directory', () => {
|
||||
const gitPath = 'D:\\DevTools\\PortableGit\\bin\\git.exe'
|
||||
const bashPath = 'D:\\DevTools\\PortableGit\\bin\\bash.exe'
|
||||
|
||||
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
||||
const pathStr = p?.toString() || ''
|
||||
// Common paths don't exist
|
||||
if (pathStr.includes('Program Files\\Git\\cmd\\git.exe')) return false
|
||||
if (pathStr.includes('Program Files (x86)\\Git\\cmd\\git.exe')) return false
|
||||
// Portable Git paths exist (portable uses second path: Git/bin/bash.exe)
|
||||
if (pathStr === bashPath) return true
|
||||
return false
|
||||
})
|
||||
|
||||
vi.mocked(execFileSync).mockReturnValue(gitPath)
|
||||
|
||||
const result = findGitBash()
|
||||
|
||||
expect(result).toBe(bashPath)
|
||||
})
|
||||
|
||||
it('should handle Git installed via Scoop', () => {
|
||||
// Scoop typically installs to %USERPROFILE%\scoop\apps\git\current
|
||||
const gitPath = 'C:\\Users\\User\\scoop\\apps\\git\\current\\cmd\\git.exe'
|
||||
const bashPath = 'C:\\Users\\User\\scoop\\apps\\git\\current\\bin\\bash.exe'
|
||||
|
||||
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
||||
const pathStr = p?.toString() || ''
|
||||
// Common paths don't exist
|
||||
if (pathStr.includes('Program Files\\Git\\cmd\\git.exe')) return false
|
||||
if (pathStr.includes('Program Files (x86)\\Git\\cmd\\git.exe')) return false
|
||||
// Scoop bash path exists (standard structure: cmd -> bin)
|
||||
if (pathStr === bashPath) return true
|
||||
return false
|
||||
})
|
||||
|
||||
vi.mocked(execFileSync).mockReturnValue(gitPath)
|
||||
|
||||
const result = findGitBash()
|
||||
|
||||
expect(result).toBe(bashPath)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
@ -1,10 +1,11 @@
|
||||
import { loggerService } from '@logger'
|
||||
import { HOME_CHERRY_DIR } from '@shared/config/constant'
|
||||
import { spawn } from 'child_process'
|
||||
import { execFileSync, spawn } from 'child_process'
|
||||
import fs from 'fs'
|
||||
import os from 'os'
|
||||
import path from 'path'
|
||||
|
||||
import { isWin } from '../constant'
|
||||
import { getResourcePath } from '.'
|
||||
|
||||
const logger = loggerService.withContext('Utils:Process')
|
||||
@ -39,7 +40,7 @@ export function runInstallScript(scriptPath: string): Promise<void> {
|
||||
}
|
||||
|
||||
export async function getBinaryName(name: string): Promise<string> {
|
||||
if (process.platform === 'win32') {
|
||||
if (isWin) {
|
||||
return `${name}.exe`
|
||||
}
|
||||
return name
|
||||
@ -60,3 +61,123 @@ export async function isBinaryExists(name: string): Promise<boolean> {
|
||||
const cmd = await getBinaryPath(name)
|
||||
return await fs.existsSync(cmd)
|
||||
}
|
||||
|
||||
/**
|
||||
* Find executable in common paths or PATH environment variable
|
||||
* Based on Claude Code's implementation with security checks
|
||||
* @param name - Name of the executable to find (without .exe extension)
|
||||
* @returns Full path to the executable or null if not found
|
||||
*/
|
||||
export function findExecutable(name: string): string | null {
|
||||
// This implementation uses where.exe which is Windows-only
|
||||
if (!isWin) {
|
||||
return null
|
||||
}
|
||||
|
||||
// Special handling for git - check common installation paths first
|
||||
if (name === 'git') {
|
||||
const commonGitPaths = [
|
||||
path.join(process.env.ProgramFiles || 'C:\\Program Files', 'Git', 'cmd', 'git.exe'),
|
||||
path.join(process.env['ProgramFiles(x86)'] || 'C:\\Program Files (x86)', 'Git', 'cmd', 'git.exe')
|
||||
]
|
||||
|
||||
for (const gitPath of commonGitPaths) {
|
||||
if (fs.existsSync(gitPath)) {
|
||||
logger.debug(`Found ${name} at common path`, { path: gitPath })
|
||||
return gitPath
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Use where.exe to find executable in PATH
|
||||
// Use execFileSync to prevent command injection
|
||||
try {
|
||||
// Add .exe extension for more precise matching on Windows
|
||||
const executableName = `${name}.exe`
|
||||
const result = execFileSync('where.exe', [executableName], {
|
||||
encoding: 'utf8',
|
||||
stdio: ['pipe', 'pipe', 'pipe']
|
||||
})
|
||||
|
||||
// Handle both Windows (\r\n) and Unix (\n) line endings
|
||||
const paths = result.trim().split(/\r?\n/).filter(Boolean)
|
||||
const currentDir = process.cwd().toLowerCase()
|
||||
|
||||
// Security check: skip executables in current directory
|
||||
for (const exePath of paths) {
|
||||
// Trim whitespace from where.exe output
|
||||
const cleanPath = exePath.trim()
|
||||
const resolvedPath = path.resolve(cleanPath).toLowerCase()
|
||||
const execDir = path.dirname(resolvedPath).toLowerCase()
|
||||
|
||||
// Skip if in current directory or subdirectory (potential malware)
|
||||
if (execDir === currentDir || execDir.startsWith(currentDir + path.sep)) {
|
||||
logger.warn('Skipping potentially malicious executable in current directory', {
|
||||
path: cleanPath
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
logger.debug(`Found ${name} via where.exe`, { path: cleanPath })
|
||||
return cleanPath
|
||||
}
|
||||
|
||||
return null
|
||||
} catch (error) {
|
||||
logger.debug(`where.exe ${name} failed`, { error })
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find Git Bash executable on Windows
|
||||
* @returns Full path to bash.exe or null if not found
|
||||
*/
|
||||
export function findGitBash(): string | null {
|
||||
// Git Bash is Windows-only
|
||||
if (!isWin) {
|
||||
return null
|
||||
}
|
||||
|
||||
// 1. Find git.exe and derive bash.exe path
|
||||
const gitPath = findExecutable('git')
|
||||
if (gitPath) {
|
||||
// Try multiple possible locations for bash.exe relative to git.exe
|
||||
// Different Git installations have different directory structures
|
||||
const possibleBashPaths = [
|
||||
path.join(gitPath, '..', '..', 'bin', 'bash.exe'), // Standard Git: git.exe at Git/cmd/ -> navigate up 2 levels -> then bin/bash.exe
|
||||
path.join(gitPath, '..', 'bash.exe'), // Portable Git: git.exe at Git/bin/ -> bash.exe in same directory
|
||||
path.join(gitPath, '..', '..', 'usr', 'bin', 'bash.exe') // MSYS2 Git: git.exe at msys64/usr/bin/ -> navigate up 2 levels -> then usr/bin/bash.exe
|
||||
]
|
||||
|
||||
for (const bashPath of possibleBashPaths) {
|
||||
const resolvedBashPath = path.resolve(bashPath)
|
||||
if (fs.existsSync(resolvedBashPath)) {
|
||||
logger.debug('Found bash.exe via git.exe path derivation', { path: resolvedBashPath })
|
||||
return resolvedBashPath
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug('bash.exe not found at expected locations relative to git.exe', {
|
||||
gitPath,
|
||||
checkedPaths: possibleBashPaths.map((p) => path.resolve(p))
|
||||
})
|
||||
}
|
||||
|
||||
// 2. Fallback: check common Git Bash paths directly
|
||||
const commonBashPaths = [
|
||||
path.join(process.env.ProgramFiles || 'C:\\Program Files', 'Git', 'bin', 'bash.exe'),
|
||||
path.join(process.env['ProgramFiles(x86)'] || 'C:\\Program Files (x86)', 'Git', 'bin', 'bash.exe'),
|
||||
...(process.env.LOCALAPPDATA ? [path.join(process.env.LOCALAPPDATA, 'Programs', 'Git', 'bin', 'bash.exe')] : [])
|
||||
]
|
||||
|
||||
for (const bashPath of commonBashPaths) {
|
||||
if (fs.existsSync(bashPath)) {
|
||||
logger.debug('Found bash.exe at common path', { path: bashPath })
|
||||
return bashPath
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug('Git Bash not found - checked git derivation and common paths')
|
||||
return null
|
||||
}
|
||||
|
||||
@ -227,6 +227,10 @@ const api = {
|
||||
startFileWatcher: (dirPath: string, config?: any) =>
|
||||
ipcRenderer.invoke(IpcChannel.File_StartWatcher, dirPath, config),
|
||||
stopFileWatcher: () => ipcRenderer.invoke(IpcChannel.File_StopWatcher),
|
||||
pauseFileWatcher: () => ipcRenderer.invoke(IpcChannel.File_PauseWatcher),
|
||||
resumeFileWatcher: () => ipcRenderer.invoke(IpcChannel.File_ResumeWatcher),
|
||||
batchUploadMarkdown: (filePaths: string[], targetPath: string) =>
|
||||
ipcRenderer.invoke(IpcChannel.File_BatchUploadMarkdown, filePaths, targetPath),
|
||||
onFileChange: (callback: (data: FileChangeEvent) => void) => {
|
||||
const listener = (_event: Electron.IpcRendererEvent, data: any) => {
|
||||
if (data && typeof data === 'object') {
|
||||
@ -450,7 +454,10 @@ const api = {
|
||||
ipcRenderer.invoke(IpcChannel.Selection_ProcessAction, actionItem, isFullScreen),
|
||||
closeActionWindow: () => ipcRenderer.invoke(IpcChannel.Selection_ActionWindowClose),
|
||||
minimizeActionWindow: () => ipcRenderer.invoke(IpcChannel.Selection_ActionWindowMinimize),
|
||||
pinActionWindow: (isPinned: boolean) => ipcRenderer.invoke(IpcChannel.Selection_ActionWindowPin, isPinned)
|
||||
pinActionWindow: (isPinned: boolean) => ipcRenderer.invoke(IpcChannel.Selection_ActionWindowPin, isPinned),
|
||||
// [Windows only] Electron bug workaround - can be removed once https://github.com/electron/electron/issues/48554 is fixed
|
||||
resizeActionWindow: (deltaX: number, deltaY: number, direction: string) =>
|
||||
ipcRenderer.invoke(IpcChannel.Selection_ActionWindowResize, deltaX, deltaY, direction)
|
||||
},
|
||||
agentTools: {
|
||||
respondToPermission: (payload: {
|
||||
|
||||
@ -120,9 +120,12 @@ export default class ModernAiProvider {
|
||||
throw new Error('Model is required for completions. Please use constructor with model parameter.')
|
||||
}
|
||||
|
||||
// 每次请求时重新生成配置以确保API key轮换生效
|
||||
this.config = providerToAiSdkConfig(this.actualProvider, this.model)
|
||||
logger.debug('Generated provider config for completions', this.config)
|
||||
// Config is now set in constructor, ApiService handles key rotation before passing provider
|
||||
if (!this.config) {
|
||||
// If config wasn't set in constructor (when provider only), generate it now
|
||||
this.config = providerToAiSdkConfig(this.actualProvider, this.model!)
|
||||
}
|
||||
logger.debug('Using provider config for completions', this.config)
|
||||
|
||||
// 检查 config 是否存在
|
||||
if (!this.config) {
|
||||
|
||||
@ -3,9 +3,10 @@ import { loggerService } from '@logger'
|
||||
import {
|
||||
getModelSupportedVerbosity,
|
||||
isFunctionCallingModel,
|
||||
isNotSupportTemperatureAndTopP,
|
||||
isOpenAIModel,
|
||||
isSupportFlexServiceTierModel
|
||||
isSupportFlexServiceTierModel,
|
||||
isSupportTemperatureModel,
|
||||
isSupportTopPModel
|
||||
} from '@renderer/config/models'
|
||||
import { getLMStudioKeepAliveTime } from '@renderer/hooks/useLMStudio'
|
||||
import { getAssistantSettings } from '@renderer/services/AssistantService'
|
||||
@ -200,7 +201,7 @@ export abstract class BaseApiClient<
|
||||
}
|
||||
|
||||
public getTemperature(assistant: Assistant, model: Model): number | undefined {
|
||||
if (isNotSupportTemperatureAndTopP(model)) {
|
||||
if (!isSupportTemperatureModel(model)) {
|
||||
return undefined
|
||||
}
|
||||
const assistantSettings = getAssistantSettings(assistant)
|
||||
@ -208,7 +209,7 @@ export abstract class BaseApiClient<
|
||||
}
|
||||
|
||||
public getTopP(assistant: Assistant, model: Model): number | undefined {
|
||||
if (isNotSupportTemperatureAndTopP(model)) {
|
||||
if (!isSupportTopPModel(model)) {
|
||||
return undefined
|
||||
}
|
||||
const assistantSettings = getAssistantSettings(assistant)
|
||||
|
||||
@ -2,6 +2,7 @@ import type OpenAI from '@cherrystudio/openai'
|
||||
import { loggerService } from '@logger'
|
||||
import { isSupportedModel } from '@renderer/config/models'
|
||||
import { objectKeys, type Provider } from '@renderer/types'
|
||||
import { formatApiHost, withoutTrailingApiVersion } from '@renderer/utils'
|
||||
|
||||
import { OpenAIAPIClient } from '../openai/OpenAIApiClient'
|
||||
|
||||
@ -15,11 +16,8 @@ export class OVMSClient extends OpenAIAPIClient {
|
||||
override async listModels(): Promise<OpenAI.Models.Model[]> {
|
||||
try {
|
||||
const sdk = await this.getSdkInstance()
|
||||
|
||||
const chatModelsResponse = await sdk.request({
|
||||
method: 'get',
|
||||
path: '../v1/config'
|
||||
})
|
||||
const url = formatApiHost(withoutTrailingApiVersion(this.getBaseURL()), true, 'v1')
|
||||
const chatModelsResponse = await sdk.withOptions({ baseURL: url }).get('/config')
|
||||
logger.debug(`Chat models response: ${JSON.stringify(chatModelsResponse)}`)
|
||||
|
||||
// Parse the config response to extract model information
|
||||
|
||||
@ -4,60 +4,81 @@
|
||||
*/
|
||||
|
||||
import {
|
||||
isClaude45ReasoningModel,
|
||||
isClaudeReasoningModel,
|
||||
isMaxTemperatureOneModel,
|
||||
isNotSupportTemperatureAndTopP,
|
||||
isSupportedFlexServiceTier,
|
||||
isSupportedThinkingTokenClaudeModel
|
||||
isSupportedThinkingTokenClaudeModel,
|
||||
isSupportTemperatureModel,
|
||||
isSupportTopPModel,
|
||||
isTemperatureTopPMutuallyExclusiveModel
|
||||
} from '@renderer/config/models'
|
||||
import { getAssistantSettings, getProviderByModel } from '@renderer/services/AssistantService'
|
||||
import {
|
||||
DEFAULT_ASSISTANT_SETTINGS,
|
||||
getAssistantSettings,
|
||||
getProviderByModel
|
||||
} from '@renderer/services/AssistantService'
|
||||
import type { Assistant, Model } from '@renderer/types'
|
||||
import { defaultTimeout } from '@shared/config/constant'
|
||||
|
||||
import { getAnthropicThinkingBudget } from '../utils/reasoning'
|
||||
|
||||
/**
|
||||
* Claude 4.5 推理模型:
|
||||
* - 只启用 temperature → 使用 temperature
|
||||
* - 只启用 top_p → 使用 top_p
|
||||
* - 同时启用 → temperature 生效,top_p 被忽略
|
||||
* - 都不启用 → 都不使用
|
||||
* 获取温度参数
|
||||
* Retrieves the temperature parameter, adapting it based on assistant.settings and model capabilities.
|
||||
* - Disabled for Claude reasoning models when reasoning effort is set.
|
||||
* - Disabled for models that do not support temperature.
|
||||
* - Disabled for Claude 4.5 reasoning models when TopP is enabled and temperature is disabled.
|
||||
* Otherwise, returns the temperature value if the assistant has temperature enabled.
|
||||
*/
|
||||
export function getTemperature(assistant: Assistant, model: Model): number | undefined {
|
||||
if (assistant.settings?.reasoning_effort && isClaudeReasoningModel(model)) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
if (!isSupportTemperatureModel(model)) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
if (
|
||||
isNotSupportTemperatureAndTopP(model) ||
|
||||
(isClaude45ReasoningModel(model) && assistant.settings?.enableTopP && !assistant.settings?.enableTemperature)
|
||||
isTemperatureTopPMutuallyExclusiveModel(model) &&
|
||||
assistant.settings?.enableTopP &&
|
||||
!assistant.settings?.enableTemperature
|
||||
) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const assistantSettings = getAssistantSettings(assistant)
|
||||
let temperature = assistantSettings?.temperature
|
||||
if (temperature && isMaxTemperatureOneModel(model)) {
|
||||
temperature = Math.min(1, temperature)
|
||||
}
|
||||
return assistantSettings?.enableTemperature ? temperature : undefined
|
||||
|
||||
// FIXME: assistant.settings.enableTemperature should be always a boolean value.
|
||||
const enableTemperature = assistantSettings?.enableTemperature ?? DEFAULT_ASSISTANT_SETTINGS.enableTemperature
|
||||
return enableTemperature ? temperature : undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取 TopP 参数
|
||||
* Retrieves the TopP parameter, adapting it based on assistant.settings and model capabilities.
|
||||
* - Disabled for Claude reasoning models when reasoning effort is set.
|
||||
* - Disabled for models that do not support TopP.
|
||||
* - Disabled for Claude 4.5 reasoning models when temperature is explicitly enabled.
|
||||
* Otherwise, returns the TopP value if the assistant has TopP enabled.
|
||||
*/
|
||||
export function getTopP(assistant: Assistant, model: Model): number | undefined {
|
||||
if (assistant.settings?.reasoning_effort && isClaudeReasoningModel(model)) {
|
||||
return undefined
|
||||
}
|
||||
if (
|
||||
isNotSupportTemperatureAndTopP(model) ||
|
||||
(isClaude45ReasoningModel(model) && assistant.settings?.enableTemperature)
|
||||
) {
|
||||
if (!isSupportTopPModel(model)) {
|
||||
return undefined
|
||||
}
|
||||
if (isTemperatureTopPMutuallyExclusiveModel(model) && assistant.settings?.enableTemperature) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const assistantSettings = getAssistantSettings(assistant)
|
||||
return assistantSettings?.enableTopP ? assistantSettings?.topP : undefined
|
||||
// FIXME: assistant.settings.enableTopP should be always a boolean value.
|
||||
const enableTopP = assistantSettings.enableTopP ?? DEFAULT_ASSISTANT_SETTINGS.enableTopP
|
||||
return enableTopP ? assistantSettings?.topP : undefined
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -22,11 +22,15 @@ vi.mock('@renderer/services/AssistantService', () => ({
|
||||
})
|
||||
}))
|
||||
|
||||
vi.mock('@renderer/store', () => ({
|
||||
default: {
|
||||
getState: () => ({ copilot: { defaultHeaders: {} } })
|
||||
vi.mock('@renderer/store', () => {
|
||||
const mockGetState = vi.fn()
|
||||
return {
|
||||
default: {
|
||||
getState: mockGetState
|
||||
},
|
||||
__mockGetState: mockGetState
|
||||
}
|
||||
}))
|
||||
})
|
||||
|
||||
vi.mock('@renderer/utils/api', () => ({
|
||||
formatApiHost: vi.fn((host, isSupportedAPIVersion = true) => {
|
||||
@ -79,6 +83,8 @@ import { isCherryAIProvider, isPerplexityProvider } from '@renderer/utils/provid
|
||||
import { COPILOT_DEFAULT_HEADERS, COPILOT_EDITOR_VERSION, isCopilotResponsesModel } from '../constants'
|
||||
import { getActualProvider, providerToAiSdkConfig } from '../providerConfig'
|
||||
|
||||
const { __mockGetState: mockGetState } = vi.mocked(await import('@renderer/store')) as any
|
||||
|
||||
const createWindowKeyv = () => {
|
||||
const store = new Map<string, string>()
|
||||
return {
|
||||
@ -132,6 +138,16 @@ describe('Copilot responses routing', () => {
|
||||
...(globalThis as any).window,
|
||||
keyv: createWindowKeyv()
|
||||
}
|
||||
mockGetState.mockReturnValue({
|
||||
copilot: { defaultHeaders: {} },
|
||||
settings: {
|
||||
openAI: {
|
||||
streamOptions: {
|
||||
includeUsage: undefined
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
it('detects official GPT-5 Codex identifiers case-insensitively', () => {
|
||||
@ -167,6 +183,16 @@ describe('CherryAI provider configuration', () => {
|
||||
...(globalThis as any).window,
|
||||
keyv: createWindowKeyv()
|
||||
}
|
||||
mockGetState.mockReturnValue({
|
||||
copilot: { defaultHeaders: {} },
|
||||
settings: {
|
||||
openAI: {
|
||||
streamOptions: {
|
||||
includeUsage: undefined
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
@ -231,6 +257,16 @@ describe('Perplexity provider configuration', () => {
|
||||
...(globalThis as any).window,
|
||||
keyv: createWindowKeyv()
|
||||
}
|
||||
mockGetState.mockReturnValue({
|
||||
copilot: { defaultHeaders: {} },
|
||||
settings: {
|
||||
openAI: {
|
||||
streamOptions: {
|
||||
includeUsage: undefined
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
@ -291,3 +327,165 @@ describe('Perplexity provider configuration', () => {
|
||||
expect(actualProvider.apiHost).toBe('')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Stream options includeUsage configuration', () => {
|
||||
beforeEach(() => {
|
||||
;(globalThis as any).window = {
|
||||
...(globalThis as any).window,
|
||||
keyv: createWindowKeyv()
|
||||
}
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
const createOpenAIProvider = (): Provider => ({
|
||||
id: 'openai-compatible',
|
||||
type: 'openai',
|
||||
name: 'OpenAI',
|
||||
apiKey: 'test-key',
|
||||
apiHost: 'https://api.openai.com',
|
||||
models: [],
|
||||
isSystem: true
|
||||
})
|
||||
|
||||
it('uses includeUsage from settings when undefined', () => {
|
||||
mockGetState.mockReturnValue({
|
||||
copilot: { defaultHeaders: {} },
|
||||
settings: {
|
||||
openAI: {
|
||||
streamOptions: {
|
||||
includeUsage: undefined
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const provider = createOpenAIProvider()
|
||||
const config = providerToAiSdkConfig(provider, createModel('gpt-4', 'GPT-4', 'openai'))
|
||||
|
||||
expect(config.options.includeUsage).toBeUndefined()
|
||||
})
|
||||
|
||||
it('uses includeUsage from settings when set to true', () => {
|
||||
mockGetState.mockReturnValue({
|
||||
copilot: { defaultHeaders: {} },
|
||||
settings: {
|
||||
openAI: {
|
||||
streamOptions: {
|
||||
includeUsage: true
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const provider = createOpenAIProvider()
|
||||
const config = providerToAiSdkConfig(provider, createModel('gpt-4', 'GPT-4', 'openai'))
|
||||
|
||||
expect(config.options.includeUsage).toBe(true)
|
||||
})
|
||||
|
||||
it('uses includeUsage from settings when set to false', () => {
|
||||
mockGetState.mockReturnValue({
|
||||
copilot: { defaultHeaders: {} },
|
||||
settings: {
|
||||
openAI: {
|
||||
streamOptions: {
|
||||
includeUsage: false
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const provider = createOpenAIProvider()
|
||||
const config = providerToAiSdkConfig(provider, createModel('gpt-4', 'GPT-4', 'openai'))
|
||||
|
||||
expect(config.options.includeUsage).toBe(false)
|
||||
})
|
||||
|
||||
it('respects includeUsage setting for non-supporting providers', () => {
|
||||
mockGetState.mockReturnValue({
|
||||
copilot: { defaultHeaders: {} },
|
||||
settings: {
|
||||
openAI: {
|
||||
streamOptions: {
|
||||
includeUsage: true
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const testProvider: Provider = {
|
||||
id: 'test',
|
||||
type: 'openai',
|
||||
name: 'test',
|
||||
apiKey: 'test-key',
|
||||
apiHost: 'https://api.test.com',
|
||||
models: [],
|
||||
isSystem: false,
|
||||
apiOptions: {
|
||||
isNotSupportStreamOptions: true
|
||||
}
|
||||
}
|
||||
|
||||
const config = providerToAiSdkConfig(testProvider, createModel('gpt-4', 'GPT-4', 'test'))
|
||||
|
||||
// Even though setting is true, provider doesn't support it, so includeUsage should be undefined
|
||||
expect(config.options.includeUsage).toBeUndefined()
|
||||
})
|
||||
|
||||
it('uses includeUsage from settings for Copilot provider when set to false', () => {
|
||||
mockGetState.mockReturnValue({
|
||||
copilot: { defaultHeaders: {} },
|
||||
settings: {
|
||||
openAI: {
|
||||
streamOptions: {
|
||||
includeUsage: false
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const provider = createCopilotProvider()
|
||||
const config = providerToAiSdkConfig(provider, createModel('gpt-4', 'GPT-4', 'copilot'))
|
||||
|
||||
expect(config.options.includeUsage).toBe(false)
|
||||
expect(config.providerId).toBe('github-copilot-openai-compatible')
|
||||
})
|
||||
|
||||
it('uses includeUsage from settings for Copilot provider when set to true', () => {
|
||||
mockGetState.mockReturnValue({
|
||||
copilot: { defaultHeaders: {} },
|
||||
settings: {
|
||||
openAI: {
|
||||
streamOptions: {
|
||||
includeUsage: true
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const provider = createCopilotProvider()
|
||||
const config = providerToAiSdkConfig(provider, createModel('gpt-4', 'GPT-4', 'copilot'))
|
||||
|
||||
expect(config.options.includeUsage).toBe(true)
|
||||
expect(config.providerId).toBe('github-copilot-openai-compatible')
|
||||
})
|
||||
|
||||
it('uses includeUsage from settings for Copilot provider when undefined', () => {
|
||||
mockGetState.mockReturnValue({
|
||||
copilot: { defaultHeaders: {} },
|
||||
settings: {
|
||||
openAI: {
|
||||
streamOptions: {
|
||||
includeUsage: undefined
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const provider = createCopilotProvider()
|
||||
const config = providerToAiSdkConfig(provider, createModel('gpt-4', 'GPT-4', 'copilot'))
|
||||
|
||||
expect(config.options.includeUsage).toBeUndefined()
|
||||
expect(config.providerId).toBe('github-copilot-openai-compatible')
|
||||
})
|
||||
})
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
import { formatPrivateKey, hasProviderConfig, ProviderConfigFactory } from '@cherrystudio/ai-core/provider'
|
||||
import { cacheService } from '@data/CacheService'
|
||||
import { isOpenAIChatCompletionOnlyModel } from '@renderer/config/models'
|
||||
import {
|
||||
getAwsBedrockAccessKeyId,
|
||||
@ -12,6 +11,7 @@ import { createVertexProvider, isVertexAIConfigured } from '@renderer/hooks/useV
|
||||
import { getProviderByModel } from '@renderer/services/AssistantService'
|
||||
import store from '@renderer/store'
|
||||
import { isSystemProvider, type Model, type Provider, SystemProviderIds } from '@renderer/types'
|
||||
import type { OpenAICompletionsStreamOptions } from '@renderer/types/aiCoreTypes'
|
||||
import {
|
||||
formatApiHost,
|
||||
formatAzureOpenAIApiHost,
|
||||
@ -38,32 +38,6 @@ import { azureAnthropicProviderCreator } from './config/azure-anthropic'
|
||||
import { COPILOT_DEFAULT_HEADERS } from './constants'
|
||||
import { getAiSdkProviderId } from './factory'
|
||||
|
||||
/**
|
||||
* 获取轮询的API key
|
||||
* 复用legacy架构的多key轮询逻辑
|
||||
*/
|
||||
function getRotatedApiKey(provider: Provider): string {
|
||||
const keys = provider.apiKey.split(',').map((key) => key.trim())
|
||||
const keyName = `provider:${provider.id}:last_used_key`
|
||||
|
||||
if (keys.length === 1) {
|
||||
return keys[0]
|
||||
}
|
||||
|
||||
const lastUsedKey = cacheService.getShared(keyName) as string | undefined
|
||||
if (lastUsedKey === undefined) {
|
||||
cacheService.setShared(keyName, keys[0])
|
||||
return keys[0]
|
||||
}
|
||||
|
||||
const currentIndex = keys.indexOf(lastUsedKey)
|
||||
const nextIndex = (currentIndex + 1) % keys.length
|
||||
const nextKey = keys[nextIndex]
|
||||
cacheService.setShared(keyName, nextKey)
|
||||
|
||||
return nextKey
|
||||
}
|
||||
|
||||
/**
|
||||
* 处理特殊provider的转换逻辑
|
||||
*/
|
||||
@ -172,7 +146,11 @@ export function providerToAiSdkConfig(actualProvider: Provider, model: Model): A
|
||||
const { baseURL, endpoint } = routeToEndpoint(actualProvider.apiHost)
|
||||
const baseConfig = {
|
||||
baseURL: baseURL,
|
||||
apiKey: getRotatedApiKey(actualProvider)
|
||||
apiKey: actualProvider.apiKey
|
||||
}
|
||||
let includeUsage: OpenAICompletionsStreamOptions['include_usage'] = undefined
|
||||
if (isSupportStreamOptionsProvider(actualProvider)) {
|
||||
includeUsage = store.getState().settings.openAI?.streamOptions?.includeUsage
|
||||
}
|
||||
|
||||
const isCopilotProvider = actualProvider.id === SystemProviderIds.copilot
|
||||
@ -185,7 +163,7 @@ export function providerToAiSdkConfig(actualProvider: Provider, model: Model): A
|
||||
...actualProvider.extra_headers
|
||||
},
|
||||
name: actualProvider.id,
|
||||
includeUsage: true
|
||||
includeUsage
|
||||
})
|
||||
|
||||
return {
|
||||
@ -288,7 +266,7 @@ export function providerToAiSdkConfig(actualProvider: Provider, model: Model): A
|
||||
...options,
|
||||
name: actualProvider.id,
|
||||
...extraOptions,
|
||||
includeUsage: isSupportStreamOptionsProvider(actualProvider)
|
||||
includeUsage
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -37,7 +37,7 @@ import { getStoreSetting } from '@renderer/hooks/useSettings'
|
||||
import { getAssistantSettings, getProviderByModel } from '@renderer/services/AssistantService'
|
||||
import type { Assistant, Model } from '@renderer/types'
|
||||
import { EFFORT_RATIO, isSystemProvider, SystemProviderIds } from '@renderer/types'
|
||||
import type { OpenAISummaryText } from '@renderer/types/aiCoreTypes'
|
||||
import type { OpenAIReasoningSummary } from '@renderer/types/aiCoreTypes'
|
||||
import type { ReasoningEffortOptionalParams } from '@renderer/types/sdk'
|
||||
import { isSupportEnableThinkingProvider } from '@renderer/utils/provider'
|
||||
import { toInteger } from 'lodash'
|
||||
@ -448,7 +448,7 @@ export function getOpenAIReasoningParams(
|
||||
const openAI = getStoreSetting('openAI')
|
||||
const summaryText = openAI.summaryText
|
||||
|
||||
let reasoningSummary: OpenAISummaryText = undefined
|
||||
let reasoningSummary: OpenAIReasoningSummary = undefined
|
||||
|
||||
if (model.id.includes('o1-pro')) {
|
||||
reasoningSummary = undefined
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 3.2 KiB After Width: | Height: | Size: 19 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 3.2 KiB After Width: | Height: | Size: 19 KiB |
@ -14,7 +14,7 @@ import { convertImageToPng } from '@renderer/utils/image'
|
||||
import type { ImageProps as AntImageProps } from 'antd'
|
||||
import { Dropdown, Image as AntImage, Space } from 'antd'
|
||||
import { Base64 } from 'js-base64'
|
||||
import { DownloadIcon, ImageIcon } from 'lucide-react'
|
||||
import { DownloadIcon } from 'lucide-react'
|
||||
import mime from 'mime'
|
||||
import React from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
@ -73,9 +73,15 @@ const ImageViewer: React.FC<ImageViewerProps> = ({ src, style, ...props }) => {
|
||||
const getContextMenuItems = (src: string, size: number = 14) => {
|
||||
return [
|
||||
{
|
||||
key: 'copy-url',
|
||||
key: 'copy-image',
|
||||
label: t('common.copy'),
|
||||
icon: <CopyIcon size={size} />,
|
||||
onClick: () => handleCopyImage(src)
|
||||
},
|
||||
{
|
||||
key: 'copy-url',
|
||||
label: t('preview.copy.src'),
|
||||
icon: <CopyIcon size={size} />,
|
||||
onClick: () => {
|
||||
navigator.clipboard.writeText(src)
|
||||
window.toast.success(t('message.copy.success'))
|
||||
@ -86,12 +92,6 @@ const ImageViewer: React.FC<ImageViewerProps> = ({ src, style, ...props }) => {
|
||||
label: t('common.download'),
|
||||
icon: <DownloadIcon size={size} />,
|
||||
onClick: () => download(src)
|
||||
},
|
||||
{
|
||||
key: 'copy-image',
|
||||
label: t('preview.copy.image'),
|
||||
icon: <ImageIcon size={size} />,
|
||||
onClick: () => handleCopyImage(src)
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import { loggerService } from '@logger'
|
||||
import { TopView } from '@renderer/components/TopView'
|
||||
import { handleSaveData } from '@renderer/store'
|
||||
import { handleSaveData, useAppDispatch } from '@renderer/store'
|
||||
import { setUpdateState } from '@renderer/store/runtime'
|
||||
import { Button, Modal } from 'antd'
|
||||
import type { ReleaseNoteInfo, UpdateInfo } from 'builder-util-runtime'
|
||||
import { useEffect, useState } from 'react'
|
||||
@ -22,6 +23,7 @@ const PopupContainer: React.FC<Props> = ({ releaseInfo, resolve }) => {
|
||||
const { t } = useTranslation()
|
||||
const [open, setOpen] = useState(true)
|
||||
const [isInstalling, setIsInstalling] = useState(false)
|
||||
const dispatch = useAppDispatch()
|
||||
|
||||
useEffect(() => {
|
||||
if (releaseInfo) {
|
||||
@ -50,6 +52,11 @@ const PopupContainer: React.FC<Props> = ({ releaseInfo, resolve }) => {
|
||||
resolve({})
|
||||
}
|
||||
|
||||
const onIgnore = () => {
|
||||
dispatch(setUpdateState({ ignore: true }))
|
||||
setOpen(false)
|
||||
}
|
||||
|
||||
UpdateDialogPopup.hide = onCancel
|
||||
|
||||
const releaseNotes = releaseInfo?.releaseNotes
|
||||
@ -69,7 +76,7 @@ const PopupContainer: React.FC<Props> = ({ releaseInfo, resolve }) => {
|
||||
centered
|
||||
width={720}
|
||||
footer={[
|
||||
<Button key="later" onClick={onCancel} disabled={isInstalling}>
|
||||
<Button key="later" onClick={onIgnore} disabled={isInstalling}>
|
||||
{t('update.later')}
|
||||
</Button>,
|
||||
<Button key="install" type="primary" onClick={handleInstall} loading={isInstalling}>
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
import { loggerService } from '@logger'
|
||||
import { ErrorBoundary } from '@renderer/components/ErrorBoundary'
|
||||
import { HelpTooltip } from '@renderer/components/TooltipIcons'
|
||||
import { TopView } from '@renderer/components/TopView'
|
||||
import { permissionModeCards } from '@renderer/config/agent'
|
||||
import { useAgents } from '@renderer/hooks/agents/useAgents'
|
||||
@ -340,9 +341,12 @@ const PopupContainer: React.FC<Props> = ({ agent, afterSubmit, resolve }) => {
|
||||
</FormRow>
|
||||
|
||||
<FormItem>
|
||||
<Label>
|
||||
{t('common.model')} <RequiredMark>*</RequiredMark>
|
||||
</Label>
|
||||
<div className="flex items-center gap-2">
|
||||
<Label>
|
||||
{t('common.model')} <RequiredMark>*</RequiredMark>
|
||||
</Label>
|
||||
<HelpTooltip title={t('agent.add.model.tooltip')} />
|
||||
</div>
|
||||
<SelectAgentBaseModelButton
|
||||
agentBase={tempAgentBase}
|
||||
onSelect={handleModelSelect}
|
||||
|
||||
@ -140,11 +140,11 @@ describe('DynamicVirtualList', () => {
|
||||
// Should call isSticky function during rendering
|
||||
expect(isSticky).toHaveBeenCalled()
|
||||
|
||||
// Should apply sticky styles to sticky items
|
||||
// Sticky items within visible range should have proper z-index but may be absolute until scrolled
|
||||
const stickyItem = document.querySelector('[data-index="0"]') as HTMLElement
|
||||
expect(stickyItem).toBeInTheDocument()
|
||||
expect(stickyItem).toHaveStyle('position: sticky')
|
||||
expect(stickyItem).toHaveStyle('z-index: 1')
|
||||
// When sticky item is in visible range, it gets z-index but may not be sticky yet
|
||||
expect(stickyItem).toHaveStyle('z-index: 999')
|
||||
})
|
||||
|
||||
it('should apply absolute positioning to non-sticky items', () => {
|
||||
|
||||
@ -24,7 +24,7 @@ exports[`DynamicVirtualList > basic rendering > snapshot test 1`] = `
|
||||
>
|
||||
<div
|
||||
data-index="0"
|
||||
style="position: absolute; top: 0px; left: 0px; transform: translateY(0px); width: 100%;"
|
||||
style="position: absolute; top: 0px; left: 0px; z-index: 0; pointer-events: auto; transform: translateY(0px); width: 100%;"
|
||||
>
|
||||
<div
|
||||
data-testid="item-0"
|
||||
@ -34,7 +34,7 @@ exports[`DynamicVirtualList > basic rendering > snapshot test 1`] = `
|
||||
</div>
|
||||
<div
|
||||
data-index="1"
|
||||
style="position: absolute; top: 0px; left: 0px; transform: translateY(50px); width: 100%;"
|
||||
style="position: absolute; top: 0px; left: 0px; z-index: 0; pointer-events: auto; transform: translateY(50px); width: 100%;"
|
||||
>
|
||||
<div
|
||||
data-testid="item-1"
|
||||
@ -44,7 +44,7 @@ exports[`DynamicVirtualList > basic rendering > snapshot test 1`] = `
|
||||
</div>
|
||||
<div
|
||||
data-index="2"
|
||||
style="position: absolute; top: 0px; left: 0px; transform: translateY(100px); width: 100%;"
|
||||
style="position: absolute; top: 0px; left: 0px; z-index: 0; pointer-events: auto; transform: translateY(100px); width: 100%;"
|
||||
>
|
||||
<div
|
||||
data-testid="item-2"
|
||||
|
||||
@ -62,6 +62,12 @@ export interface DynamicVirtualListProps<T> extends InheritedVirtualizerOptions
|
||||
*/
|
||||
isSticky?: (index: number) => boolean
|
||||
|
||||
/**
|
||||
* Get the depth/level of an item for hierarchical sticky positioning
|
||||
* Used with isSticky to determine ancestor relationships
|
||||
*/
|
||||
getItemDepth?: (index: number) => number
|
||||
|
||||
/**
|
||||
* Range extractor function, cannot be used with isSticky
|
||||
*/
|
||||
@ -101,6 +107,7 @@ function DynamicVirtualList<T>(props: DynamicVirtualListProps<T>) {
|
||||
size,
|
||||
estimateSize,
|
||||
isSticky,
|
||||
getItemDepth,
|
||||
rangeExtractor: customRangeExtractor,
|
||||
itemContainerStyle,
|
||||
scrollerStyle,
|
||||
@ -115,7 +122,7 @@ function DynamicVirtualList<T>(props: DynamicVirtualListProps<T>) {
|
||||
const internalScrollerRef = useRef<HTMLDivElement>(null)
|
||||
const scrollerRef = internalScrollerRef
|
||||
|
||||
const activeStickyIndexRef = useRef(0)
|
||||
const activeStickyIndexesRef = useRef<number[]>([])
|
||||
|
||||
const stickyIndexes = useMemo(() => {
|
||||
if (!isSticky) return []
|
||||
@ -124,21 +131,54 @@ function DynamicVirtualList<T>(props: DynamicVirtualListProps<T>) {
|
||||
|
||||
const internalStickyRangeExtractor = useCallback(
|
||||
(range: Range) => {
|
||||
// The active sticky index is the last one that is before or at the start of the visible range
|
||||
const newActiveStickyIndex =
|
||||
[...stickyIndexes].reverse().find((index) => range.startIndex >= index) ?? stickyIndexes[0] ?? 0
|
||||
const activeStickies: number[] = []
|
||||
|
||||
if (newActiveStickyIndex !== activeStickyIndexRef.current) {
|
||||
activeStickyIndexRef.current = newActiveStickyIndex
|
||||
if (getItemDepth) {
|
||||
// With depth information, we can build a proper ancestor chain
|
||||
// Find all sticky items before the visible range
|
||||
const stickiesBeforeRange = stickyIndexes.filter((index) => index < range.startIndex)
|
||||
|
||||
if (stickiesBeforeRange.length > 0) {
|
||||
// Find the depth of the first visible item (or last sticky before it)
|
||||
const firstVisibleIndex = range.startIndex
|
||||
const referenceDepth = getItemDepth(firstVisibleIndex)
|
||||
|
||||
// Build ancestor chain: include all sticky parents
|
||||
const ancestorChain: number[] = []
|
||||
let minDepth = referenceDepth
|
||||
|
||||
// Walk backwards from the last sticky before visible range
|
||||
for (let i = stickiesBeforeRange.length - 1; i >= 0; i--) {
|
||||
const stickyIndex = stickiesBeforeRange[i]
|
||||
const stickyDepth = getItemDepth(stickyIndex)
|
||||
|
||||
// Include this sticky if it's a parent (smaller depth) of our reference
|
||||
if (stickyDepth < minDepth) {
|
||||
ancestorChain.unshift(stickyIndex)
|
||||
minDepth = stickyDepth
|
||||
}
|
||||
}
|
||||
|
||||
activeStickies.push(...ancestorChain)
|
||||
}
|
||||
} else {
|
||||
// Fallback: without depth info, just use the last sticky before range
|
||||
const lastStickyBeforeRange = [...stickyIndexes].reverse().find((index) => index < range.startIndex)
|
||||
if (lastStickyBeforeRange !== undefined) {
|
||||
activeStickies.push(lastStickyBeforeRange)
|
||||
}
|
||||
}
|
||||
|
||||
// Merge the active sticky index and the default range extractor
|
||||
const next = new Set([activeStickyIndexRef.current, ...defaultRangeExtractor(range)])
|
||||
// Update the ref with current active stickies
|
||||
activeStickyIndexesRef.current = activeStickies
|
||||
|
||||
// Merge the active sticky indexes and the default range extractor
|
||||
const next = new Set([...activeStickyIndexesRef.current, ...defaultRangeExtractor(range)])
|
||||
|
||||
// Sort the set to maintain proper order
|
||||
return [...next].sort((a, b) => a - b)
|
||||
},
|
||||
[stickyIndexes]
|
||||
[stickyIndexes, getItemDepth]
|
||||
)
|
||||
|
||||
const rangeExtractor = customRangeExtractor ?? (isSticky ? internalStickyRangeExtractor : undefined)
|
||||
@ -221,14 +261,47 @@ function DynamicVirtualList<T>(props: DynamicVirtualListProps<T>) {
|
||||
}}>
|
||||
{virtualItems.map((virtualItem) => {
|
||||
const isItemSticky = stickyIndexes.includes(virtualItem.index)
|
||||
const isItemActiveSticky = isItemSticky && activeStickyIndexRef.current === virtualItem.index
|
||||
const isItemActiveSticky = isItemSticky && activeStickyIndexesRef.current.includes(virtualItem.index)
|
||||
|
||||
// Calculate the sticky offset for multi-level sticky headers
|
||||
const activeStickyIndex = isItemActiveSticky ? activeStickyIndexesRef.current.indexOf(virtualItem.index) : -1
|
||||
|
||||
// Calculate cumulative offset based on actual sizes of previous sticky items
|
||||
let stickyOffset = 0
|
||||
if (activeStickyIndex >= 0) {
|
||||
for (let i = 0; i < activeStickyIndex; i++) {
|
||||
const prevStickyIndex = activeStickyIndexesRef.current[i]
|
||||
stickyOffset += estimateSize(prevStickyIndex)
|
||||
}
|
||||
}
|
||||
|
||||
// Check if this item is visually covered by sticky items
|
||||
// If covered, disable pointer events to prevent hover/click bleeding through
|
||||
const isCoveredBySticky = (() => {
|
||||
if (!activeStickyIndexesRef.current.length) return false
|
||||
if (isItemActiveSticky) return false // Sticky items themselves are not covered
|
||||
|
||||
// Calculate if this item's visual position is under any sticky header
|
||||
const itemVisualTop = virtualItem.start
|
||||
let totalStickyHeight = 0
|
||||
for (const stickyIdx of activeStickyIndexesRef.current) {
|
||||
totalStickyHeight += estimateSize(stickyIdx)
|
||||
}
|
||||
|
||||
// If item starts within the sticky area, it's covered
|
||||
return itemVisualTop < totalStickyHeight
|
||||
})()
|
||||
|
||||
const style: React.CSSProperties = {
|
||||
...itemContainerStyle,
|
||||
position: isItemActiveSticky ? 'sticky' : 'absolute',
|
||||
top: 0,
|
||||
top: isItemActiveSticky ? stickyOffset : 0,
|
||||
left: 0,
|
||||
zIndex: isItemSticky ? 1 : undefined,
|
||||
zIndex: isItemActiveSticky ? 1000 + (100 - activeStickyIndex) : isItemSticky ? 999 : 0,
|
||||
pointerEvents: isCoveredBySticky ? 'none' : 'auto',
|
||||
...(isItemActiveSticky && {
|
||||
backgroundColor: 'var(--color-background)'
|
||||
}),
|
||||
...(horizontal
|
||||
? {
|
||||
transform: isItemActiveSticky ? undefined : `translateX(${virtualItem.start}px)`,
|
||||
|
||||
@ -5,6 +5,7 @@ export const SYSTEM_PROMPT_THRESHOLD = 128
|
||||
export const DEFAULT_KNOWLEDGE_DOCUMENT_COUNT = 6
|
||||
export const DEFAULT_KNOWLEDGE_THRESHOLD = 0.0
|
||||
export const DEFAULT_WEBSEARCH_RAG_DOCUMENT_COUNT = 1
|
||||
export const DEFAULT_STREAM_OPTIONS_INCLUDE_USAGE = true
|
||||
|
||||
export const platform = window.electron?.process?.platform
|
||||
export const isMac = platform === 'darwin'
|
||||
|
||||
@ -101,7 +101,8 @@ const ORIGIN_DEFAULT_MIN_APPS: MinAppType[] = [
|
||||
id: 'gemini',
|
||||
name: 'Gemini',
|
||||
url: 'https://gemini.google.com/',
|
||||
logo: GeminiAppLogo
|
||||
logo: GeminiAppLogo,
|
||||
bodered: true
|
||||
},
|
||||
{
|
||||
id: 'silicon',
|
||||
|
||||
@ -1016,7 +1016,7 @@ describe('Gemini Models', () => {
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(false)
|
||||
).toBe(true)
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-3.0-flash-image-preview',
|
||||
@ -1224,7 +1224,7 @@ describe('Gemini Models', () => {
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(false)
|
||||
).toBe(true)
|
||||
expect(
|
||||
isGeminiReasoningModel({
|
||||
id: 'gemini-3.5-flash-image-preview',
|
||||
|
||||
@ -25,11 +25,13 @@ import {
|
||||
isGenerateImageModels,
|
||||
isMaxTemperatureOneModel,
|
||||
isNotSupportSystemMessageModel,
|
||||
isNotSupportTemperatureAndTopP,
|
||||
isNotSupportTextDeltaModel,
|
||||
isSupportedFlexServiceTier,
|
||||
isSupportedModel,
|
||||
isSupportFlexServiceTierModel,
|
||||
isSupportTemperatureModel,
|
||||
isSupportTopPModel,
|
||||
isTemperatureTopPMutuallyExclusiveModel,
|
||||
isVisionModels,
|
||||
isZhipuModel
|
||||
} from '../utils'
|
||||
@ -273,27 +275,104 @@ describe('model utils', () => {
|
||||
})
|
||||
|
||||
describe('Temperature and top-p support', () => {
|
||||
describe('isNotSupportTemperatureAndTopP', () => {
|
||||
it('returns true for reasoning models', () => {
|
||||
describe('isSupportTemperatureModel', () => {
|
||||
it('returns false for reasoning models (non-open weight)', () => {
|
||||
const model = createModel({ id: 'o1' })
|
||||
reasoningMock.mockReturnValue(true)
|
||||
expect(isNotSupportTemperatureAndTopP(model)).toBe(true)
|
||||
expect(isSupportTemperatureModel(model)).toBe(false)
|
||||
})
|
||||
|
||||
it('returns false for open weight models', () => {
|
||||
it('returns true for open weight models', () => {
|
||||
const openWeight = createModel({ id: 'gpt-oss-debug' })
|
||||
expect(isNotSupportTemperatureAndTopP(openWeight)).toBe(false)
|
||||
expect(isSupportTemperatureModel(openWeight)).toBe(true)
|
||||
})
|
||||
|
||||
it('returns true for chat-only models without reasoning', () => {
|
||||
it('returns false for chat-only models', () => {
|
||||
const chatOnly = createModel({ id: 'o1-preview' })
|
||||
reasoningMock.mockReturnValue(false)
|
||||
expect(isNotSupportTemperatureAndTopP(chatOnly)).toBe(true)
|
||||
expect(isSupportTemperatureModel(chatOnly)).toBe(false)
|
||||
})
|
||||
|
||||
it('returns true for Qwen MT models', () => {
|
||||
it('returns false for Qwen MT models', () => {
|
||||
const qwenMt = createModel({ id: 'qwen-mt-large', provider: 'aliyun' })
|
||||
expect(isNotSupportTemperatureAndTopP(qwenMt)).toBe(true)
|
||||
expect(isSupportTemperatureModel(qwenMt)).toBe(false)
|
||||
})
|
||||
|
||||
it('returns false for null/undefined models', () => {
|
||||
expect(isSupportTemperatureModel(null)).toBe(false)
|
||||
expect(isSupportTemperatureModel(undefined)).toBe(false)
|
||||
})
|
||||
|
||||
it('returns true for regular GPT models', () => {
|
||||
const model = createModel({ id: 'gpt-4' })
|
||||
expect(isSupportTemperatureModel(model)).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('isSupportTopPModel', () => {
|
||||
it('returns false for reasoning models (non-open weight)', () => {
|
||||
const model = createModel({ id: 'o1' })
|
||||
reasoningMock.mockReturnValue(true)
|
||||
expect(isSupportTopPModel(model)).toBe(false)
|
||||
})
|
||||
|
||||
it('returns true for open weight models', () => {
|
||||
const openWeight = createModel({ id: 'gpt-oss-debug' })
|
||||
expect(isSupportTopPModel(openWeight)).toBe(true)
|
||||
})
|
||||
|
||||
it('returns false for chat-only models', () => {
|
||||
const chatOnly = createModel({ id: 'o1-preview' })
|
||||
expect(isSupportTopPModel(chatOnly)).toBe(false)
|
||||
})
|
||||
|
||||
it('returns false for Qwen MT models', () => {
|
||||
const qwenMt = createModel({ id: 'qwen-mt-large', provider: 'aliyun' })
|
||||
expect(isSupportTopPModel(qwenMt)).toBe(false)
|
||||
})
|
||||
|
||||
it('returns false for null/undefined models', () => {
|
||||
expect(isSupportTopPModel(null)).toBe(false)
|
||||
expect(isSupportTopPModel(undefined)).toBe(false)
|
||||
})
|
||||
|
||||
it('returns true for regular GPT models', () => {
|
||||
const model = createModel({ id: 'gpt-4' })
|
||||
expect(isSupportTopPModel(model)).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('isTemperatureTopPMutuallyExclusiveModel', () => {
|
||||
it('returns true for Claude 4.5 reasoning models', () => {
|
||||
const claude45Sonnet = createModel({ id: 'claude-sonnet-4.5-20250514' })
|
||||
expect(isTemperatureTopPMutuallyExclusiveModel(claude45Sonnet)).toBe(true)
|
||||
|
||||
const claude45Opus = createModel({ id: 'claude-opus-4.5-20250514' })
|
||||
expect(isTemperatureTopPMutuallyExclusiveModel(claude45Opus)).toBe(true)
|
||||
})
|
||||
|
||||
it('returns false for Claude 4 models', () => {
|
||||
const claude4Sonnet = createModel({ id: 'claude-sonnet-4-20250514' })
|
||||
expect(isTemperatureTopPMutuallyExclusiveModel(claude4Sonnet)).toBe(false)
|
||||
})
|
||||
|
||||
it('returns false for Claude 3.x models', () => {
|
||||
const claude35Sonnet = createModel({ id: 'claude-3-5-sonnet-20241022' })
|
||||
expect(isTemperatureTopPMutuallyExclusiveModel(claude35Sonnet)).toBe(false)
|
||||
|
||||
const claude3Opus = createModel({ id: 'claude-3-opus-20240229' })
|
||||
expect(isTemperatureTopPMutuallyExclusiveModel(claude3Opus)).toBe(false)
|
||||
})
|
||||
|
||||
it('returns false for other AI models', () => {
|
||||
expect(isTemperatureTopPMutuallyExclusiveModel(createModel({ id: 'gpt-4o' }))).toBe(false)
|
||||
expect(isTemperatureTopPMutuallyExclusiveModel(createModel({ id: 'o1' }))).toBe(false)
|
||||
expect(isTemperatureTopPMutuallyExclusiveModel(createModel({ id: 'gemini-2.0-flash' }))).toBe(false)
|
||||
expect(isTemperatureTopPMutuallyExclusiveModel(createModel({ id: 'qwen-max' }))).toBe(false)
|
||||
})
|
||||
|
||||
it('returns false for null/undefined models', () => {
|
||||
expect(isTemperatureTopPMutuallyExclusiveModel(null)).toBe(false)
|
||||
expect(isTemperatureTopPMutuallyExclusiveModel(undefined)).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@ -240,47 +240,35 @@ export const SYSTEM_MODELS: Record<SystemProviderId | 'defaultModel', Model[]> =
|
||||
],
|
||||
|
||||
burncloud: [
|
||||
{ id: 'claude-3-7-sonnet-20250219-thinking', provider: 'burncloud', name: 'Claude 3.7 thinking', group: 'Claude' },
|
||||
{ id: 'claude-3-7-sonnet-20250219', provider: 'burncloud', name: 'Claude 3.7 Sonnet', group: 'Claude 3.7' },
|
||||
{ id: 'claude-3-5-sonnet-20241022', provider: 'burncloud', name: 'Claude 3.5 Sonnet', group: 'Claude 3.5' },
|
||||
{ id: 'claude-3-5-haiku-20241022', provider: 'burncloud', name: 'Claude 3.5 Haiku', group: 'Claude 3.5' },
|
||||
{ id: 'claude-opus-4-5-20251101', provider: 'burncloud', name: 'Claude 4.5 Opus', group: 'Claude 4.5' },
|
||||
{ id: 'claude-sonnet-4-5-20250929', provider: 'burncloud', name: 'Claude 4.5 Sonnet', group: 'Claude 4.5' },
|
||||
{ id: 'claude-haiku-4-5-20251001', provider: 'burncloud', name: 'Claude 4.5 Haiku', group: 'Claude 4.5' },
|
||||
|
||||
{ id: 'gpt-4.5-preview', provider: 'burncloud', name: 'gpt-4.5-preview', group: 'gpt-4.5' },
|
||||
{ id: 'gpt-4o', provider: 'burncloud', name: 'GPT-4o', group: 'GPT 4o' },
|
||||
{ id: 'gpt-4o-mini', provider: 'burncloud', name: 'GPT-4o-mini', group: 'GPT 4o' },
|
||||
{ id: 'o3', provider: 'burncloud', name: 'GPT-o1-mini', group: 'o1' },
|
||||
{ id: 'o3-mini', provider: 'burncloud', name: 'GPT-o1-preview', group: 'o1' },
|
||||
{ id: 'o1-mini', provider: 'burncloud', name: 'GPT-o1-mini', group: 'o1' },
|
||||
{ id: 'gpt-5', provider: 'burncloud', name: 'GPT 5', group: 'GPT 5' },
|
||||
{ id: 'gpt-5.1', provider: 'burncloud', name: 'GPT 5.1', group: 'GPT 5.1' },
|
||||
|
||||
{ id: 'gemini-2.5-pro-preview-03-25', provider: 'burncloud', name: 'Gemini 2.5 Preview', group: 'Geminit 2.5' },
|
||||
{ id: 'gemini-2.5-pro-exp-03-25', provider: 'burncloud', name: 'Gemini 2.5 Pro Exp', group: 'Geminit 2.5' },
|
||||
{ id: 'gemini-2.0-flash-lite', provider: 'burncloud', name: 'Gemini 2.0 Flash Lite', group: 'Geminit 2.0' },
|
||||
{ id: 'gemini-2.0-flash-exp', provider: 'burncloud', name: 'Gemini 2.0 Flash Exp', group: 'Geminit 2.0' },
|
||||
{ id: 'gemini-2.0-flash', provider: 'burncloud', name: 'Gemini 2.0 Flash', group: 'Geminit 2.0' },
|
||||
{ id: 'gemini-2.5-flash', provider: 'burncloud', name: 'Gemini 2.5 Flash', group: 'Gemini 2.5' },
|
||||
{ id: 'gemini-2.5-flash-image', provider: 'burncloud', name: 'Gemini 2.5 Flash Image', group: 'Gemini 2.5' },
|
||||
{ id: 'gemini-2.5-pro', provider: 'burncloud', name: 'Gemini 2.5 Pro', group: 'Gemini 2.5' },
|
||||
{ id: 'gemini-3-pro-preview', provider: 'burncloud', name: 'Gemini 3 Pro Preview', group: 'Gemini 3' },
|
||||
|
||||
{ id: 'deepseek-r1', name: 'DeepSeek-R1', provider: 'burncloud', group: 'deepseek-ai' },
|
||||
{ id: 'deepseek-v3', name: 'DeepSeek-V3', provider: 'burncloud', group: 'deepseek-ai' }
|
||||
{ id: 'deepseek-reasoner', name: 'DeepSeek Reasoner', provider: 'burncloud', group: 'deepseek-ai' },
|
||||
{ id: 'deepseek-chat', name: 'DeepSeek Chat', provider: 'burncloud', group: 'deepseek-ai' }
|
||||
],
|
||||
ovms: [],
|
||||
ollama: [],
|
||||
lmstudio: [],
|
||||
silicon: [
|
||||
{
|
||||
id: 'deepseek-ai/DeepSeek-R1',
|
||||
name: 'deepseek-ai/DeepSeek-R1',
|
||||
id: 'deepseek-ai/DeepSeek-V3.2',
|
||||
name: 'deepseek-ai/DeepSeek-V3.2',
|
||||
provider: 'silicon',
|
||||
group: 'deepseek-ai'
|
||||
},
|
||||
{
|
||||
id: 'deepseek-ai/DeepSeek-V3',
|
||||
name: 'deepseek-ai/DeepSeek-V3',
|
||||
id: 'Qwen/Qwen3-8B',
|
||||
name: 'Qwen/Qwen3-8B',
|
||||
provider: 'silicon',
|
||||
group: 'deepseek-ai'
|
||||
},
|
||||
{
|
||||
id: 'Qwen/Qwen2.5-7B-Instruct',
|
||||
provider: 'silicon',
|
||||
name: 'Qwen2.5-7B-Instruct',
|
||||
group: 'Qwen'
|
||||
},
|
||||
{
|
||||
@ -288,79 +276,31 @@ export const SYSTEM_MODELS: Record<SystemProviderId | 'defaultModel', Model[]> =
|
||||
name: 'BAAI/bge-m3',
|
||||
provider: 'silicon',
|
||||
group: 'BAAI'
|
||||
},
|
||||
{
|
||||
id: 'Qwen/Qwen3-8B',
|
||||
name: 'Qwen/Qwen3-8B',
|
||||
provider: 'silicon',
|
||||
group: 'Qwen'
|
||||
}
|
||||
],
|
||||
ppio: [
|
||||
{
|
||||
id: 'deepseek/deepseek-r1-0528',
|
||||
id: 'deepseek/deepseek-v3.2',
|
||||
provider: 'ppio',
|
||||
name: 'DeepSeek R1-0528',
|
||||
name: 'DeepSeek V3.2',
|
||||
group: 'deepseek'
|
||||
},
|
||||
{
|
||||
id: 'deepseek/deepseek-v3-0324',
|
||||
id: 'minimax/minimax-m2',
|
||||
provider: 'ppio',
|
||||
name: 'DeepSeek V3-0324',
|
||||
group: 'deepseek'
|
||||
},
|
||||
{
|
||||
id: 'deepseek/deepseek-r1-turbo',
|
||||
provider: 'ppio',
|
||||
name: 'DeepSeek R1 Turbo',
|
||||
group: 'deepseek'
|
||||
},
|
||||
{
|
||||
id: 'deepseek/deepseek-v3-turbo',
|
||||
provider: 'ppio',
|
||||
name: 'DeepSeek V3 Turbo',
|
||||
group: 'deepseek'
|
||||
},
|
||||
{
|
||||
id: 'deepseek/deepseek-r1/community',
|
||||
name: 'DeepSeek: DeepSeek R1 (Community)',
|
||||
provider: 'ppio',
|
||||
group: 'deepseek'
|
||||
},
|
||||
{
|
||||
id: 'deepseek/deepseek-v3/community',
|
||||
name: 'DeepSeek: DeepSeek V3 (Community)',
|
||||
provider: 'ppio',
|
||||
group: 'deepseek'
|
||||
},
|
||||
{
|
||||
id: 'minimaxai/minimax-m1-80k',
|
||||
provider: 'ppio',
|
||||
name: 'MiniMax M1-80K',
|
||||
name: 'MiniMax M2',
|
||||
group: 'minimaxai'
|
||||
},
|
||||
{
|
||||
id: 'qwen/qwen3-235b-a22b-fp8',
|
||||
id: 'qwen/qwen3-235b-a22b-instruct-2507',
|
||||
provider: 'ppio',
|
||||
name: 'Qwen3 235B',
|
||||
name: 'Qwen3-235b-a22b-instruct-2507',
|
||||
group: 'qwen'
|
||||
},
|
||||
{
|
||||
id: 'qwen/qwen3-32b-fp8',
|
||||
id: 'qwen/qwen3-vl-235b-a22b-instruct',
|
||||
provider: 'ppio',
|
||||
name: 'Qwen3 32B',
|
||||
group: 'qwen'
|
||||
},
|
||||
{
|
||||
id: 'qwen/qwen3-30b-a3b-fp8',
|
||||
provider: 'ppio',
|
||||
name: 'Qwen3 30B',
|
||||
group: 'qwen'
|
||||
},
|
||||
{
|
||||
id: 'qwen/qwen2.5-vl-72b-instruct',
|
||||
provider: 'ppio',
|
||||
name: 'Qwen2.5 VL 72B',
|
||||
name: 'Qwen3-vl-235b-a22b-instruct',
|
||||
group: 'qwen'
|
||||
},
|
||||
{
|
||||
@ -378,11 +318,13 @@ export const SYSTEM_MODELS: Record<SystemProviderId | 'defaultModel', Model[]> =
|
||||
],
|
||||
alayanew: [],
|
||||
openai: [
|
||||
{ id: 'gpt-4.5-preview', provider: 'openai', name: ' gpt-4.5-preview', group: 'gpt-4.5' },
|
||||
{ id: 'gpt-4o', provider: 'openai', name: ' GPT-4o', group: 'GPT 4o' },
|
||||
{ id: 'gpt-4o-mini', provider: 'openai', name: ' GPT-4o-mini', group: 'GPT 4o' },
|
||||
{ id: 'o1-mini', provider: 'openai', name: ' o1-mini', group: 'o1' },
|
||||
{ id: 'o1-preview', provider: 'openai', name: ' o1-preview', group: 'o1' }
|
||||
{ id: 'gpt-5.1', provider: 'openai', name: ' GPT 5.1', group: 'GPT 5.1' },
|
||||
{ id: 'gpt-5', provider: 'openai', name: ' GPT 5', group: 'GPT 5' },
|
||||
{ id: 'gpt-5-mini', provider: 'openai', name: ' GPT 5 Mini', group: 'GPT 5' },
|
||||
{ id: 'gpt-5-nano', provider: 'openai', name: ' GPT 5 Nano', group: 'GPT 5' },
|
||||
{ id: 'gpt-5-pro', provider: 'openai', name: ' GPT 5 Pro', group: 'GPT 5' },
|
||||
{ id: 'gpt-5-chat', provider: 'openai', name: ' GPT 5 Chat', group: 'GPT 5' },
|
||||
{ id: 'gpt-image-1', provider: 'openai', name: ' GPT Image 1', group: 'GPT Image' }
|
||||
],
|
||||
'azure-openai': [
|
||||
{
|
||||
@ -400,96 +342,54 @@ export const SYSTEM_MODELS: Record<SystemProviderId | 'defaultModel', Model[]> =
|
||||
],
|
||||
gemini: [
|
||||
{
|
||||
id: 'gemini-1.5-flash',
|
||||
id: 'gemini-2.5-flash',
|
||||
provider: 'gemini',
|
||||
name: 'Gemini 1.5 Flash',
|
||||
group: 'Gemini 1.5'
|
||||
name: 'Gemini 2.5 Flash',
|
||||
group: 'Gemini 2.5'
|
||||
},
|
||||
{
|
||||
id: 'gemini-1.5-flash-8b',
|
||||
id: 'gemini-2.5-pro',
|
||||
provider: 'gemini',
|
||||
name: 'Gemini 1.5 Flash (8B)',
|
||||
group: 'Gemini 1.5'
|
||||
},
|
||||
{
|
||||
id: 'gemini-1.5-pro',
|
||||
name: 'Gemini 1.5 Pro',
|
||||
provider: 'gemini',
|
||||
group: 'Gemini 1.5'
|
||||
},
|
||||
{
|
||||
id: 'gemini-2.0-flash',
|
||||
provider: 'gemini',
|
||||
name: 'Gemini 2.0 Flash',
|
||||
group: 'Gemini 2.0'
|
||||
name: 'Gemini 2.5 Pro',
|
||||
group: 'Gemini 2.5'
|
||||
},
|
||||
{
|
||||
id: 'gemini-2.5-flash-image-preview',
|
||||
provider: 'gemini',
|
||||
name: 'Gemini 2.5 Flash Image',
|
||||
group: 'Gemini 2.5'
|
||||
},
|
||||
{
|
||||
id: 'gemini-3-pro-image-preview',
|
||||
provider: 'gemini',
|
||||
name: 'Gemini 3 Pro Image Privew',
|
||||
group: 'Gemini 3'
|
||||
},
|
||||
{
|
||||
id: 'gemini-3-pro-preview',
|
||||
provider: 'gemini',
|
||||
name: 'Gemini 3 Pro Preview',
|
||||
group: 'Gemini 3'
|
||||
}
|
||||
],
|
||||
anthropic: [
|
||||
{
|
||||
id: 'claude-haiku-4-5-20251001',
|
||||
provider: 'anthropic',
|
||||
name: 'Claude Haiku 4.5',
|
||||
group: 'Claude 4.5'
|
||||
},
|
||||
{
|
||||
id: 'claude-sonnet-4-5-20250929',
|
||||
id: 'claude-sonnet-4-5',
|
||||
provider: 'anthropic',
|
||||
name: 'Claude Sonnet 4.5',
|
||||
group: 'Claude 4.5'
|
||||
},
|
||||
{
|
||||
id: 'claude-sonnet-4-20250514',
|
||||
id: 'claude-haiku-4-5',
|
||||
provider: 'anthropic',
|
||||
name: 'Claude Sonnet 4',
|
||||
group: 'Claude 4'
|
||||
name: 'Claude Haiku 4.5',
|
||||
group: 'Claude 4.5'
|
||||
},
|
||||
{
|
||||
id: 'claude-opus-4-20250514',
|
||||
id: 'claude-opus-4-5',
|
||||
provider: 'anthropic',
|
||||
name: 'Claude Opus 4',
|
||||
group: 'Claude 4'
|
||||
},
|
||||
{
|
||||
id: 'claude-3-7-sonnet-20250219',
|
||||
provider: 'anthropic',
|
||||
name: 'Claude 3.7 Sonnet',
|
||||
group: 'Claude 3.7'
|
||||
},
|
||||
{
|
||||
id: 'claude-3-5-sonnet-20241022',
|
||||
provider: 'anthropic',
|
||||
name: 'Claude 3.5 Sonnet',
|
||||
group: 'Claude 3.5'
|
||||
},
|
||||
{
|
||||
id: 'claude-3-5-haiku-20241022',
|
||||
provider: 'anthropic',
|
||||
name: 'Claude 3.5 Haiku',
|
||||
group: 'Claude 3.5'
|
||||
},
|
||||
{
|
||||
id: 'claude-3-5-sonnet-20240620',
|
||||
provider: 'anthropic',
|
||||
name: 'Claude 3.5 Sonnet (Legacy)',
|
||||
group: 'Claude 3.5'
|
||||
},
|
||||
{
|
||||
id: 'claude-3-opus-20240229',
|
||||
provider: 'anthropic',
|
||||
name: 'Claude 3 Opus',
|
||||
group: 'Claude 3'
|
||||
},
|
||||
{
|
||||
id: 'claude-3-haiku-20240307',
|
||||
provider: 'anthropic',
|
||||
name: 'Claude 3 Haiku',
|
||||
group: 'Claude 3'
|
||||
name: 'Claude Opus 4.5',
|
||||
group: 'Claude 4.5'
|
||||
}
|
||||
],
|
||||
deepseek: [
|
||||
@ -1073,18 +973,6 @@ export const SYSTEM_MODELS: Record<SystemProviderId | 'defaultModel', Model[]> =
|
||||
provider: 'grok',
|
||||
name: 'Grok 3 Mini Fast',
|
||||
group: 'Grok'
|
||||
},
|
||||
{
|
||||
id: 'grok-2-vision-1212',
|
||||
provider: 'grok',
|
||||
name: 'Grok 2 Vision 1212',
|
||||
group: 'Grok'
|
||||
},
|
||||
{
|
||||
id: 'grok-2-1212',
|
||||
provider: 'grok',
|
||||
name: 'Grok 2 1212',
|
||||
group: 'Grok'
|
||||
}
|
||||
],
|
||||
mistral: [
|
||||
@ -1808,34 +1696,58 @@ export const SYSTEM_MODELS: Record<SystemProviderId | 'defaultModel', Model[]> =
|
||||
],
|
||||
aionly: [
|
||||
{
|
||||
id: 'claude-opus-4.1',
|
||||
name: 'claude-opus-4.1',
|
||||
id: 'claude-opus-4-5-20251101',
|
||||
name: 'Claude Opus 4.5',
|
||||
provider: 'aionly',
|
||||
group: 'claude'
|
||||
group: 'Anthropic'
|
||||
},
|
||||
{
|
||||
id: 'claude-sonnet4',
|
||||
name: 'claude-sonnet4',
|
||||
id: 'claude-haiku-4-5-20251001',
|
||||
name: 'Claude Haiku 4.5',
|
||||
provider: 'aionly',
|
||||
group: 'claude'
|
||||
group: 'Anthropic'
|
||||
},
|
||||
{
|
||||
id: 'claude-3.5-sonnet-v2',
|
||||
name: 'claude-3.5-sonnet-v2',
|
||||
id: 'claude-sonnet-4-5-20250929',
|
||||
name: 'Claude Sonnet 4.5',
|
||||
provider: 'aionly',
|
||||
group: 'claude'
|
||||
group: 'Anthropic'
|
||||
},
|
||||
{
|
||||
id: 'gpt-4.1',
|
||||
name: 'gpt-4.1',
|
||||
id: 'gpt-5.1',
|
||||
name: 'GPT-5.1',
|
||||
provider: 'aionly',
|
||||
group: 'gpt'
|
||||
group: 'OpenAI'
|
||||
},
|
||||
{
|
||||
id: 'gpt-5.1-chat',
|
||||
name: 'GPT-5.1 Chat',
|
||||
provider: 'aionly',
|
||||
group: 'OpenAI'
|
||||
},
|
||||
{
|
||||
id: 'gpt-5-pro',
|
||||
name: 'GPT 5 Pro',
|
||||
provider: 'aionly',
|
||||
group: 'OpenAI'
|
||||
},
|
||||
{
|
||||
id: 'gemini-3-pro-preview',
|
||||
name: 'Gemini 3 Pro Preview',
|
||||
provider: 'aionly',
|
||||
group: 'Google'
|
||||
},
|
||||
{
|
||||
id: 'gemini-2.5-pro',
|
||||
name: 'Gemini 2.5 Pro',
|
||||
provider: 'aionly',
|
||||
group: 'Google'
|
||||
},
|
||||
{
|
||||
id: 'gemini-2.5-flash',
|
||||
name: 'gemini-2.5-flash',
|
||||
name: 'Gemini 2.5 Flash',
|
||||
provider: 'aionly',
|
||||
group: 'gemini'
|
||||
group: 'Google'
|
||||
}
|
||||
],
|
||||
longcat: [
|
||||
|
||||
@ -163,6 +163,7 @@ import ZhipuProviderLogo from '@renderer/assets/images/providers/zhipu.png'
|
||||
import type { Model } from '@renderer/types'
|
||||
|
||||
export function getModelLogoById(modelId: string): string | undefined {
|
||||
// FIXME: This is always true. Either remove it or fetch it.
|
||||
const isLight = true
|
||||
|
||||
if (!modelId) {
|
||||
|
||||
@ -277,6 +277,10 @@ export const GEMINI_THINKING_MODEL_REGEX =
|
||||
export const isSupportedThinkingTokenGeminiModel = (model: Model): boolean => {
|
||||
const modelId = getLowerBaseModelName(model.id, '/')
|
||||
if (GEMINI_THINKING_MODEL_REGEX.test(modelId)) {
|
||||
// ref: https://docs.cloud.google.com/vertex-ai/generative-ai/docs/models/gemini/3-pro-image
|
||||
if (modelId.includes('gemini-3-pro-image')) {
|
||||
return true
|
||||
}
|
||||
if (modelId.includes('image') || modelId.includes('tts')) {
|
||||
return false
|
||||
}
|
||||
|
||||
@ -14,6 +14,7 @@ import {
|
||||
isSupportVerbosityModel
|
||||
} from './openai'
|
||||
import { isQwenMTModel } from './qwen'
|
||||
import { isClaude45ReasoningModel } from './reasoning'
|
||||
import { isGenerateImageModel, isTextToImageModel, isVisionModel } from './vision'
|
||||
export const NOT_SUPPORTED_REGEX = /(?:^tts|whisper|speech)/i
|
||||
export const GEMINI_FLASH_MODEL_REGEX = new RegExp('gemini.*-flash.*$', 'i')
|
||||
@ -42,20 +43,71 @@ export function isSupportedModel(model: OpenAI.Models.Model): boolean {
|
||||
return !NOT_SUPPORTED_REGEX.test(modelId)
|
||||
}
|
||||
|
||||
export function isNotSupportTemperatureAndTopP(model: Model): boolean {
|
||||
/**
|
||||
* Check if the model supports temperature parameter
|
||||
* @param model - The model to check
|
||||
* @returns true if the model supports temperature parameter
|
||||
*/
|
||||
export function isSupportTemperatureModel(model: Model | undefined | null): boolean {
|
||||
if (!model) {
|
||||
return true
|
||||
return false
|
||||
}
|
||||
|
||||
if (
|
||||
(isOpenAIReasoningModel(model) && !isOpenAIOpenWeightModel(model)) ||
|
||||
isOpenAIChatCompletionOnlyModel(model) ||
|
||||
isQwenMTModel(model)
|
||||
) {
|
||||
return true
|
||||
// OpenAI reasoning models (except open weight) don't support temperature
|
||||
if (isOpenAIReasoningModel(model) && !isOpenAIOpenWeightModel(model)) {
|
||||
return false
|
||||
}
|
||||
|
||||
return false
|
||||
// OpenAI chat completion only models don't support temperature
|
||||
if (isOpenAIChatCompletionOnlyModel(model)) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Qwen MT models don't support temperature
|
||||
if (isQwenMTModel(model)) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the model supports top_p parameter
|
||||
* @param model - The model to check
|
||||
* @returns true if the model supports top_p parameter
|
||||
*/
|
||||
export function isSupportTopPModel(model: Model | undefined | null): boolean {
|
||||
if (!model) {
|
||||
return false
|
||||
}
|
||||
|
||||
// OpenAI reasoning models (except open weight) don't support top_p
|
||||
if (isOpenAIReasoningModel(model) && !isOpenAIOpenWeightModel(model)) {
|
||||
return false
|
||||
}
|
||||
|
||||
// OpenAI chat completion only models don't support top_p
|
||||
if (isOpenAIChatCompletionOnlyModel(model)) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Qwen MT models don't support top_p
|
||||
if (isQwenMTModel(model)) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the model enforces mutual exclusivity between temperature and top_p parameters.
|
||||
* Currently only Claude 4.5 reasoning models require this constraint.
|
||||
* @param model - The model to check
|
||||
* @returns true if temperature and top_p are mutually exclusive for this model
|
||||
*/
|
||||
export function isTemperatureTopPMutuallyExclusiveModel(model: Model | undefined | null): boolean {
|
||||
if (!model) return false
|
||||
return isClaude45ReasoningModel(model)
|
||||
}
|
||||
|
||||
export function isGemmaModel(model?: Model): boolean {
|
||||
|
||||
@ -1,10 +1,12 @@
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
|
||||
import { useTimer } from './useTimer'
|
||||
import { loggerService } from '@logger'
|
||||
import { useCallback, useLayoutEffect, useRef, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
||||
const logger = loggerService.withContext('useInPlaceEdit')
|
||||
export interface UseInPlaceEditOptions {
|
||||
onSave: ((value: string) => void) | ((value: string) => Promise<void>)
|
||||
onCancel?: () => void
|
||||
onError?: (error: unknown) => void
|
||||
autoSelectOnStart?: boolean
|
||||
trimOnSave?: boolean
|
||||
}
|
||||
@ -12,14 +14,10 @@ export interface UseInPlaceEditOptions {
|
||||
export interface UseInPlaceEditReturn {
|
||||
isEditing: boolean
|
||||
isSaving: boolean
|
||||
editValue: string
|
||||
inputRef: React.RefObject<HTMLInputElement | null>
|
||||
startEdit: (initialValue: string) => void
|
||||
saveEdit: () => void
|
||||
cancelEdit: () => void
|
||||
handleKeyDown: (e: React.KeyboardEvent) => void
|
||||
handleInputChange: (e: React.ChangeEvent<HTMLInputElement>) => void
|
||||
handleValueChange: (value: string) => void
|
||||
inputProps: React.InputHTMLAttributes<HTMLInputElement> & { ref: React.RefObject<HTMLInputElement | null> }
|
||||
}
|
||||
|
||||
/**
|
||||
@ -32,63 +30,69 @@ export interface UseInPlaceEditReturn {
|
||||
* @returns An object containing the editing state and handler functions
|
||||
*/
|
||||
export function useInPlaceEdit(options: UseInPlaceEditOptions): UseInPlaceEditReturn {
|
||||
const { onSave, onCancel, autoSelectOnStart = true, trimOnSave = true } = options
|
||||
const { onSave, onCancel, onError, autoSelectOnStart = true, trimOnSave = true } = options
|
||||
const { t } = useTranslation()
|
||||
|
||||
const [isSaving, setIsSaving] = useState(false)
|
||||
const [isEditing, setIsEditing] = useState(false)
|
||||
const [editValue, setEditValue] = useState('')
|
||||
const [originalValue, setOriginalValue] = useState('')
|
||||
const originalValueRef = useRef('')
|
||||
const inputRef = useRef<HTMLInputElement>(null)
|
||||
const { setTimeoutTimer } = useTimer()
|
||||
|
||||
const startEdit = useCallback(
|
||||
(initialValue: string) => {
|
||||
setIsEditing(true)
|
||||
setEditValue(initialValue)
|
||||
setOriginalValue(initialValue)
|
||||
const startEdit = useCallback((initialValue: string) => {
|
||||
setIsEditing(true)
|
||||
setEditValue(initialValue)
|
||||
originalValueRef.current = initialValue
|
||||
}, [])
|
||||
|
||||
setTimeoutTimer(
|
||||
'startEdit',
|
||||
() => {
|
||||
inputRef.current?.focus()
|
||||
if (autoSelectOnStart) {
|
||||
inputRef.current?.select()
|
||||
}
|
||||
},
|
||||
0
|
||||
)
|
||||
},
|
||||
[autoSelectOnStart, setTimeoutTimer]
|
||||
)
|
||||
useLayoutEffect(() => {
|
||||
if (isEditing) {
|
||||
inputRef.current?.focus()
|
||||
if (autoSelectOnStart) {
|
||||
inputRef.current?.select()
|
||||
}
|
||||
}
|
||||
}, [autoSelectOnStart, isEditing])
|
||||
|
||||
const saveEdit = useCallback(async () => {
|
||||
if (isSaving) return
|
||||
|
||||
const finalValue = trimOnSave ? editValue.trim() : editValue
|
||||
if (finalValue === originalValueRef.current) {
|
||||
setIsEditing(false)
|
||||
return
|
||||
}
|
||||
|
||||
setIsSaving(true)
|
||||
|
||||
try {
|
||||
const finalValue = trimOnSave ? editValue.trim() : editValue
|
||||
if (finalValue !== originalValue) {
|
||||
await onSave(finalValue)
|
||||
}
|
||||
await onSave(finalValue)
|
||||
setIsEditing(false)
|
||||
setEditValue('')
|
||||
setOriginalValue('')
|
||||
} catch (error) {
|
||||
logger.error('Error saving in-place edit', { error })
|
||||
|
||||
// Call custom error handler if provided, otherwise show default toast
|
||||
if (onError) {
|
||||
onError(error)
|
||||
} else {
|
||||
window.toast.error(t('common.save_failed') || 'Failed to save')
|
||||
}
|
||||
} finally {
|
||||
setIsSaving(false)
|
||||
}
|
||||
}, [isSaving, trimOnSave, editValue, originalValue, onSave])
|
||||
}, [isSaving, trimOnSave, editValue, onSave, onError, t])
|
||||
|
||||
const cancelEdit = useCallback(() => {
|
||||
setIsEditing(false)
|
||||
setEditValue('')
|
||||
setOriginalValue('')
|
||||
onCancel?.()
|
||||
}, [onCancel])
|
||||
|
||||
const handleKeyDown = useCallback(
|
||||
(e: React.KeyboardEvent) => {
|
||||
if (e.key === 'Enter' && !e.nativeEvent.isComposing) {
|
||||
if (e.nativeEvent.isComposing) return
|
||||
if (e.key === 'Enter') {
|
||||
e.preventDefault()
|
||||
saveEdit()
|
||||
} else if (e.key === 'Escape') {
|
||||
@ -104,37 +108,29 @@ export function useInPlaceEdit(options: UseInPlaceEditOptions): UseInPlaceEditRe
|
||||
setEditValue(e.target.value)
|
||||
}, [])
|
||||
|
||||
const handleValueChange = useCallback((value: string) => {
|
||||
setEditValue(value)
|
||||
}, [])
|
||||
|
||||
// Handle clicks outside the input to save
|
||||
useEffect(() => {
|
||||
const handleClickOutside = (event: MouseEvent) => {
|
||||
if (isEditing && inputRef.current && !inputRef.current.contains(event.target as Node)) {
|
||||
saveEdit()
|
||||
}
|
||||
const handleBlur = useCallback(() => {
|
||||
// 这里的逻辑需要注意:
|
||||
// 如果点击了“取消”按钮,可能会先触发 Blur 保存。
|
||||
// 通常 InPlaceEdit 的逻辑是 Blur 即 Save。
|
||||
// 如果不想 Blur 保存,可以去掉这一行,或者判断 relatedTarget。
|
||||
if (!isSaving) {
|
||||
saveEdit()
|
||||
}
|
||||
|
||||
if (isEditing) {
|
||||
document.addEventListener('mousedown', handleClickOutside)
|
||||
return () => {
|
||||
document.removeEventListener('mousedown', handleClickOutside)
|
||||
}
|
||||
}
|
||||
return
|
||||
}, [isEditing, saveEdit])
|
||||
}, [saveEdit, isSaving])
|
||||
|
||||
return {
|
||||
isEditing,
|
||||
isSaving,
|
||||
editValue,
|
||||
inputRef,
|
||||
startEdit,
|
||||
saveEdit,
|
||||
cancelEdit,
|
||||
handleKeyDown,
|
||||
handleInputChange,
|
||||
handleValueChange
|
||||
inputProps: {
|
||||
ref: inputRef,
|
||||
value: editValue,
|
||||
onChange: handleInputChange,
|
||||
onKeyDown: handleKeyDown,
|
||||
onBlur: handleBlur,
|
||||
disabled: isSaving // 保存时禁用输入
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -6,6 +6,9 @@
|
||||
"failed": "Failed to add a agent",
|
||||
"invalid_agent": "Invalid Agent"
|
||||
},
|
||||
"model": {
|
||||
"tooltip": "Currently, only models that support Anthropic endpoints are available for the Agent feature."
|
||||
},
|
||||
"title": "Add Agent",
|
||||
"type": {
|
||||
"placeholder": "Select an agent type"
|
||||
@ -1162,6 +1165,7 @@
|
||||
"no_results": "No results",
|
||||
"none": "None",
|
||||
"off": "Off",
|
||||
"on": "On",
|
||||
"open": "Open",
|
||||
"paste": "Paste",
|
||||
"placeholders": {
|
||||
@ -2219,7 +2223,10 @@
|
||||
"untitled_folder": "New Folder",
|
||||
"untitled_note": "Untitled Note",
|
||||
"upload_failed": "Note upload failed",
|
||||
"upload_success": "Note uploaded success"
|
||||
"upload_files": "Upload Files",
|
||||
"upload_folder": "Upload Folder",
|
||||
"upload_success": "Note uploaded success",
|
||||
"uploading_files": "Uploading {{count}} files..."
|
||||
},
|
||||
"notification": {
|
||||
"assistant": "Assistant Response",
|
||||
@ -2510,7 +2517,8 @@
|
||||
},
|
||||
"preview": {
|
||||
"copy": {
|
||||
"image": "Copy as image"
|
||||
"image": "Copy as image",
|
||||
"src": "Copy Image Source"
|
||||
},
|
||||
"dialog": "Open Dialog",
|
||||
"label": "Preview",
|
||||
@ -4271,6 +4279,12 @@
|
||||
"tip": "Specifies the latency tier to use for processing the request",
|
||||
"title": "Service Tier"
|
||||
},
|
||||
"stream_options": {
|
||||
"include_usage": {
|
||||
"tip": "Whether token usage is included (applicable only to the OpenAI Chat Completions API)",
|
||||
"title": "Include usage"
|
||||
}
|
||||
},
|
||||
"summary_text_mode": {
|
||||
"auto": "auto",
|
||||
"concise": "concise",
|
||||
|
||||
@ -6,6 +6,9 @@
|
||||
"failed": "添加 Agent 失败",
|
||||
"invalid_agent": "无效的 Agent"
|
||||
},
|
||||
"model": {
|
||||
"tooltip": "目前,只有支持 Anthropic 端点的模型可用于 Agent 功能。"
|
||||
},
|
||||
"title": "添加 Agent",
|
||||
"type": {
|
||||
"placeholder": "选择 Agent 类型"
|
||||
@ -1162,6 +1165,7 @@
|
||||
"no_results": "无结果",
|
||||
"none": "无",
|
||||
"off": "关闭",
|
||||
"on": "启用",
|
||||
"open": "打开",
|
||||
"paste": "粘贴",
|
||||
"placeholders": {
|
||||
@ -2219,7 +2223,10 @@
|
||||
"untitled_folder": "新文件夹",
|
||||
"untitled_note": "无标题笔记",
|
||||
"upload_failed": "笔记上传失败",
|
||||
"upload_success": "笔记上传成功"
|
||||
"upload_files": "上传文件",
|
||||
"upload_folder": "上传文件夹",
|
||||
"upload_success": "笔记上传成功",
|
||||
"uploading_files": "正在上传 {{count}} 个文件..."
|
||||
},
|
||||
"notification": {
|
||||
"assistant": "助手响应",
|
||||
@ -2510,7 +2517,8 @@
|
||||
},
|
||||
"preview": {
|
||||
"copy": {
|
||||
"image": "复制为图片"
|
||||
"image": "复制为图片",
|
||||
"src": "复制图片源"
|
||||
},
|
||||
"dialog": "打开预览窗口",
|
||||
"label": "预览",
|
||||
@ -4271,6 +4279,12 @@
|
||||
"tip": "指定用于处理请求的延迟层级",
|
||||
"title": "服务层级"
|
||||
},
|
||||
"stream_options": {
|
||||
"include_usage": {
|
||||
"tip": "是否请求 Tokens 用量(仅 OpenAI Chat Completions API 可用)",
|
||||
"title": "包含用量"
|
||||
}
|
||||
},
|
||||
"summary_text_mode": {
|
||||
"auto": "自动",
|
||||
"concise": "简洁",
|
||||
|
||||
@ -6,6 +6,9 @@
|
||||
"failed": "無法新增代理人",
|
||||
"invalid_agent": "無效的 Agent"
|
||||
},
|
||||
"model": {
|
||||
"tooltip": "目前,僅支援 Anthropic 端點的模型可供代理功能使用。"
|
||||
},
|
||||
"title": "新增代理",
|
||||
"type": {
|
||||
"placeholder": "選擇 Agent 類型"
|
||||
@ -1162,6 +1165,7 @@
|
||||
"no_results": "沒有結果",
|
||||
"none": "無",
|
||||
"off": "關閉",
|
||||
"on": "開啟",
|
||||
"open": "開啟",
|
||||
"paste": "貼上",
|
||||
"placeholders": {
|
||||
@ -2219,7 +2223,10 @@
|
||||
"untitled_folder": "新資料夾",
|
||||
"untitled_note": "無標題筆記",
|
||||
"upload_failed": "筆記上傳失敗",
|
||||
"upload_success": "筆記上傳成功"
|
||||
"upload_files": "上傳檔案",
|
||||
"upload_folder": "上傳資料夾",
|
||||
"upload_success": "筆記上傳成功",
|
||||
"uploading_files": "正在上傳 {{count}} 個檔案..."
|
||||
},
|
||||
"notification": {
|
||||
"assistant": "助手回應",
|
||||
@ -2510,7 +2517,8 @@
|
||||
},
|
||||
"preview": {
|
||||
"copy": {
|
||||
"image": "複製為圖片"
|
||||
"image": "複製為圖片",
|
||||
"src": "複製圖片來源"
|
||||
},
|
||||
"dialog": "開啟預覽窗口",
|
||||
"label": "預覽",
|
||||
@ -4271,6 +4279,12 @@
|
||||
"tip": "指定用於處理請求的延遲層級",
|
||||
"title": "服務層級"
|
||||
},
|
||||
"stream_options": {
|
||||
"include_usage": {
|
||||
"tip": "是否請求 Tokens 用量(僅 OpenAI Chat Completions API 可用)",
|
||||
"title": "包含用量"
|
||||
}
|
||||
},
|
||||
"summary_text_mode": {
|
||||
"auto": "自動",
|
||||
"concise": "簡潔",
|
||||
|
||||
@ -6,6 +6,9 @@
|
||||
"failed": "Agent hinzufügen fehlgeschlagen",
|
||||
"invalid_agent": "Ungültiger Agent"
|
||||
},
|
||||
"model": {
|
||||
"tooltip": "Derzeit sind für die Agent-Funktion nur Modelle verfügbar, die Anthropic-Endpunkte unterstützen."
|
||||
},
|
||||
"title": "Agent hinzufügen",
|
||||
"type": {
|
||||
"placeholder": "Agent-Typ auswählen"
|
||||
@ -1162,6 +1165,7 @@
|
||||
"no_results": "Keine Ergebnisse",
|
||||
"none": "Keine",
|
||||
"off": "Aus",
|
||||
"on": "An",
|
||||
"open": "Öffnen",
|
||||
"paste": "Einfügen",
|
||||
"placeholders": {
|
||||
@ -2219,7 +2223,10 @@
|
||||
"untitled_folder": "Neuer Ordner",
|
||||
"untitled_note": "Unbenannte Notiz",
|
||||
"upload_failed": "Notizen-Upload fehlgeschlagen",
|
||||
"upload_success": "Notizen erfolgreich hochgeladen"
|
||||
"upload_files": "Dateien hochladen",
|
||||
"upload_folder": "Ordner hochladen",
|
||||
"upload_success": "Notizen erfolgreich hochgeladen",
|
||||
"uploading_files": "Lade {{count}} Dateien hoch..."
|
||||
},
|
||||
"notification": {
|
||||
"assistant": "Assistenten-Antwort",
|
||||
@ -2510,7 +2517,8 @@
|
||||
},
|
||||
"preview": {
|
||||
"copy": {
|
||||
"image": "Als Bild kopieren"
|
||||
"image": "Als Bild kopieren",
|
||||
"src": "Bildquelle kopieren"
|
||||
},
|
||||
"dialog": "Vorschaufenster öffnen",
|
||||
"label": "Vorschau",
|
||||
@ -4271,6 +4279,12 @@
|
||||
"tip": "Latenz-Ebene für Anfrageverarbeitung festlegen",
|
||||
"title": "Service-Tier"
|
||||
},
|
||||
"stream_options": {
|
||||
"include_usage": {
|
||||
"tip": "Ob die Token-Nutzung enthalten ist (gilt nur für die OpenAI Chat Completions API)",
|
||||
"title": "Nutzung einbeziehen"
|
||||
}
|
||||
},
|
||||
"summary_text_mode": {
|
||||
"auto": "Automatisch",
|
||||
"concise": "Kompakt",
|
||||
|
||||
@ -6,6 +6,9 @@
|
||||
"failed": "Αποτυχία προσθήκης πράκτορα",
|
||||
"invalid_agent": "Μη έγκυρος Agent"
|
||||
},
|
||||
"model": {
|
||||
"tooltip": "Προς το παρόν, μόνο μοντέλα που υποστηρίζουν τελικά σημεία Anthropic είναι διαθέσιμα για τη λειτουργία Agent."
|
||||
},
|
||||
"title": "Προσθήκη Agent",
|
||||
"type": {
|
||||
"placeholder": "Επιλέξτε τύπο Agent"
|
||||
@ -1162,6 +1165,7 @@
|
||||
"no_results": "Δεν βρέθηκαν αποτελέσματα",
|
||||
"none": "Χωρίς",
|
||||
"off": "Κλειστό",
|
||||
"on": "Ενεργό",
|
||||
"open": "Άνοιγμα",
|
||||
"paste": "Επικόλληση",
|
||||
"placeholders": {
|
||||
@ -2219,7 +2223,10 @@
|
||||
"untitled_folder": "Νέος φάκελος",
|
||||
"untitled_note": "σημείωση χωρίς τίτλο",
|
||||
"upload_failed": "Η σημείωση δεν ανέβηκε",
|
||||
"upload_success": "Οι σημειώσεις μεταφορτώθηκαν με επιτυχία"
|
||||
"upload_files": "Ανέβασμα Αρχείων",
|
||||
"upload_folder": "Ανέβασμα Φακέλου",
|
||||
"upload_success": "Οι σημειώσεις μεταφορτώθηκαν με επιτυχία",
|
||||
"uploading_files": "Ανεβάζονται {{count}} αρχεία..."
|
||||
},
|
||||
"notification": {
|
||||
"assistant": "Απάντηση Βοηθού",
|
||||
@ -2510,7 +2517,8 @@
|
||||
},
|
||||
"preview": {
|
||||
"copy": {
|
||||
"image": "Αντιγραφή ως εικόνα"
|
||||
"image": "Αντιγραφή ως εικόνα",
|
||||
"src": "Αντιγραφή πηγής εικόνας"
|
||||
},
|
||||
"dialog": "Άνοιγμα παραθύρου προεπισκόπησης",
|
||||
"label": "Προεπισκόπηση",
|
||||
@ -4271,6 +4279,12 @@
|
||||
"tip": "Καθορίστε το επίπεδο καθυστέρησης που χρησιμοποιείται για την επεξεργασία των αιτημάτων",
|
||||
"title": "Επίπεδο υπηρεσίας"
|
||||
},
|
||||
"stream_options": {
|
||||
"include_usage": {
|
||||
"tip": "Είτε περιλαμβάνεται η χρήση διακριτικών (ισχύει μόνο για το OpenAI Chat Completions API)",
|
||||
"title": "Συμπεριλάβετε χρήση"
|
||||
}
|
||||
},
|
||||
"summary_text_mode": {
|
||||
"auto": "Αυτόματο",
|
||||
"concise": "Σύντομο",
|
||||
|
||||
@ -6,6 +6,9 @@
|
||||
"failed": "Error al añadir agente",
|
||||
"invalid_agent": "Agent inválido"
|
||||
},
|
||||
"model": {
|
||||
"tooltip": "Actualmente, solo los modelos que admiten puntos finales de Anthropic están disponibles para la función Agente."
|
||||
},
|
||||
"title": "Agregar Agente",
|
||||
"type": {
|
||||
"placeholder": "Seleccionar tipo de Agente"
|
||||
@ -1162,6 +1165,7 @@
|
||||
"no_results": "Sin resultados",
|
||||
"none": "无",
|
||||
"off": "Apagado",
|
||||
"on": "En",
|
||||
"open": "Abrir",
|
||||
"paste": "Pegar",
|
||||
"placeholders": {
|
||||
@ -2219,7 +2223,10 @@
|
||||
"untitled_folder": "Nueva carpeta",
|
||||
"untitled_note": "Nota sin título",
|
||||
"upload_failed": "Error al cargar la nota",
|
||||
"upload_success": "Nota cargada con éxito"
|
||||
"upload_files": "Subir archivos",
|
||||
"upload_folder": "Carpeta de subida",
|
||||
"upload_success": "Nota cargada con éxito",
|
||||
"uploading_files": "Subiendo {{count}} archivos..."
|
||||
},
|
||||
"notification": {
|
||||
"assistant": "Respuesta del asistente",
|
||||
@ -2510,7 +2517,8 @@
|
||||
},
|
||||
"preview": {
|
||||
"copy": {
|
||||
"image": "Copiar como imagen"
|
||||
"image": "Copiar como imagen",
|
||||
"src": "Copia la fuente de la imagen"
|
||||
},
|
||||
"dialog": "Abrir la ventana de vista previa",
|
||||
"label": "Vista previa",
|
||||
@ -4271,6 +4279,12 @@
|
||||
"tip": "Especifica el nivel de latencia utilizado para procesar la solicitud",
|
||||
"title": "Nivel de servicio"
|
||||
},
|
||||
"stream_options": {
|
||||
"include_usage": {
|
||||
"tip": "Si se incluye el uso de tokens (aplicable solo a la API de Completions de chat de OpenAI)",
|
||||
"title": "Incluir uso"
|
||||
}
|
||||
},
|
||||
"summary_text_mode": {
|
||||
"auto": "Automático",
|
||||
"concise": "Conciso",
|
||||
|
||||
@ -6,6 +6,9 @@
|
||||
"failed": "Échec de l'ajout de l'agent",
|
||||
"invalid_agent": "Agent invalide"
|
||||
},
|
||||
"model": {
|
||||
"tooltip": "Actuellement, seuls les modèles qui prennent en charge les points de terminaison Anthropic sont disponibles pour la fonctionnalité Agent."
|
||||
},
|
||||
"title": "Ajouter un agent",
|
||||
"type": {
|
||||
"placeholder": "Sélectionner le type d'Agent"
|
||||
@ -1162,6 +1165,7 @@
|
||||
"no_results": "Aucun résultat",
|
||||
"none": "Aucun",
|
||||
"off": "Désactivé",
|
||||
"on": "Marche",
|
||||
"open": "Ouvrir",
|
||||
"paste": "Coller",
|
||||
"placeholders": {
|
||||
@ -2219,7 +2223,10 @@
|
||||
"untitled_folder": "nouveau dossier",
|
||||
"untitled_note": "Note sans titre",
|
||||
"upload_failed": "Échec du téléchargement de la note",
|
||||
"upload_success": "Note téléchargée avec succès"
|
||||
"upload_files": "Télécharger des fichiers",
|
||||
"upload_folder": "Puis dossier de téléchargement",
|
||||
"upload_success": "Note téléchargée avec succès",
|
||||
"uploading_files": "Téléchargement de {{count}} fichiers..."
|
||||
},
|
||||
"notification": {
|
||||
"assistant": "Réponse de l'assistant",
|
||||
@ -2510,7 +2517,8 @@
|
||||
},
|
||||
"preview": {
|
||||
"copy": {
|
||||
"image": "Copier en tant qu'image"
|
||||
"image": "Copier en tant qu'image",
|
||||
"src": "Copier la source de l'image"
|
||||
},
|
||||
"dialog": "Ouvrir la fenêtre d'aperçu",
|
||||
"label": "Aperçu",
|
||||
@ -4271,6 +4279,12 @@
|
||||
"tip": "Spécifie le niveau de latence utilisé pour traiter la demande",
|
||||
"title": "Niveau de service"
|
||||
},
|
||||
"stream_options": {
|
||||
"include_usage": {
|
||||
"tip": "Si l'utilisation des jetons est incluse (applicable uniquement à l'API OpenAI Chat Completions)",
|
||||
"title": "Inclure l'utilisation"
|
||||
}
|
||||
},
|
||||
"summary_text_mode": {
|
||||
"auto": "Automatique",
|
||||
"concise": "Concis",
|
||||
|
||||
@ -6,6 +6,9 @@
|
||||
"failed": "エージェントの追加に失敗しました",
|
||||
"invalid_agent": "無効なエージェント"
|
||||
},
|
||||
"model": {
|
||||
"tooltip": "現在、エージェント機能では、Anthropicエンドポイントをサポートするモデルのみが利用可能です。"
|
||||
},
|
||||
"title": "エージェントを追加",
|
||||
"type": {
|
||||
"placeholder": "エージェントタイプを選択"
|
||||
@ -1162,6 +1165,7 @@
|
||||
"no_results": "検索結果なし",
|
||||
"none": "無",
|
||||
"off": "オフ",
|
||||
"on": "オン",
|
||||
"open": "開く",
|
||||
"paste": "貼り付け",
|
||||
"placeholders": {
|
||||
@ -2219,7 +2223,10 @@
|
||||
"untitled_folder": "新ファイル夹",
|
||||
"untitled_note": "無題のメモ",
|
||||
"upload_failed": "ノートのアップロードに失敗しました",
|
||||
"upload_success": "ノートのアップロードが成功しました"
|
||||
"upload_files": "ファイルをアップロード",
|
||||
"upload_folder": "アップロードフォルダ",
|
||||
"upload_success": "ノートのアップロードが成功しました",
|
||||
"uploading_files": "{{count}} 個のファイルをアップロード中..."
|
||||
},
|
||||
"notification": {
|
||||
"assistant": "助手回應",
|
||||
@ -2510,7 +2517,8 @@
|
||||
},
|
||||
"preview": {
|
||||
"copy": {
|
||||
"image": "画像としてコピー"
|
||||
"image": "画像としてコピー",
|
||||
"src": "画像ソースをコピー"
|
||||
},
|
||||
"dialog": "ダイアログを開く",
|
||||
"label": "プレビュー",
|
||||
@ -4271,6 +4279,12 @@
|
||||
"tip": "リクエスト処理に使用するレイテンシティアを指定します",
|
||||
"title": "サービスティア"
|
||||
},
|
||||
"stream_options": {
|
||||
"include_usage": {
|
||||
"tip": "トークン使用量が含まれるかどうか (OpenAI Chat Completions APIのみに適用)",
|
||||
"title": "使用法を含める"
|
||||
}
|
||||
},
|
||||
"summary_text_mode": {
|
||||
"auto": "自動",
|
||||
"concise": "簡潔",
|
||||
|
||||
@ -6,6 +6,9 @@
|
||||
"failed": "Falha ao adicionar agente",
|
||||
"invalid_agent": "Agent inválido"
|
||||
},
|
||||
"model": {
|
||||
"tooltip": "Atualmente, apenas modelos que suportam endpoints da Anthropic estão disponíveis para o recurso Agente."
|
||||
},
|
||||
"title": "Adicionar Agente",
|
||||
"type": {
|
||||
"placeholder": "Selecionar tipo de Agente"
|
||||
@ -1162,6 +1165,7 @@
|
||||
"no_results": "Nenhum resultado",
|
||||
"none": "Nenhum",
|
||||
"off": "Desligado",
|
||||
"on": "Ligado",
|
||||
"open": "Abrir",
|
||||
"paste": "Colar",
|
||||
"placeholders": {
|
||||
@ -2219,7 +2223,10 @@
|
||||
"untitled_folder": "Nova pasta",
|
||||
"untitled_note": "Nota sem título",
|
||||
"upload_failed": "Falha ao carregar a nota",
|
||||
"upload_success": "Nota carregada com sucesso"
|
||||
"upload_files": "Carregar Ficheiros",
|
||||
"upload_folder": "Carregar Pasta",
|
||||
"upload_success": "Nota carregada com sucesso",
|
||||
"uploading_files": "A enviar {{count}} ficheiros..."
|
||||
},
|
||||
"notification": {
|
||||
"assistant": "Resposta do assistente",
|
||||
@ -2510,7 +2517,8 @@
|
||||
},
|
||||
"preview": {
|
||||
"copy": {
|
||||
"image": "Copiar como imagem"
|
||||
"image": "Copiar como imagem",
|
||||
"src": "Copiar Origem da Imagem"
|
||||
},
|
||||
"dialog": "Abrir janela de pré-visualização",
|
||||
"label": "Pré-visualização",
|
||||
@ -4271,6 +4279,12 @@
|
||||
"tip": "Especifique o nível de latência usado para processar a solicitação",
|
||||
"title": "Nível de Serviço"
|
||||
},
|
||||
"stream_options": {
|
||||
"include_usage": {
|
||||
"tip": "Se o uso de tokens está incluído (aplicável apenas à API de Conclusões de Chat da OpenAI)",
|
||||
"title": "Incluir uso"
|
||||
}
|
||||
},
|
||||
"summary_text_mode": {
|
||||
"auto": "Automático",
|
||||
"concise": "Conciso",
|
||||
|
||||
@ -6,6 +6,9 @@
|
||||
"failed": "Не удалось добавить агента",
|
||||
"invalid_agent": "Недействительный агент"
|
||||
},
|
||||
"model": {
|
||||
"tooltip": "В настоящее время для функции агента доступны только модели, поддерживающие конечные точки Anthropic."
|
||||
},
|
||||
"title": "Добавить агента",
|
||||
"type": {
|
||||
"placeholder": "Выбор типа агента"
|
||||
@ -1162,6 +1165,7 @@
|
||||
"no_results": "Результатов не найдено",
|
||||
"none": "без",
|
||||
"off": "Выкл",
|
||||
"on": "Вкл",
|
||||
"open": "Открыть",
|
||||
"paste": "Вставить",
|
||||
"placeholders": {
|
||||
@ -2219,7 +2223,10 @@
|
||||
"untitled_folder": "Новая папка",
|
||||
"untitled_note": "Незаглавленная заметка",
|
||||
"upload_failed": "Не удалось загрузить заметку",
|
||||
"upload_success": "Заметка успешно загружена"
|
||||
"upload_files": "Загрузить файлы",
|
||||
"upload_folder": "Загрузить папку",
|
||||
"upload_success": "Заметка успешно загружена",
|
||||
"uploading_files": "Загрузка {{count}} файлов..."
|
||||
},
|
||||
"notification": {
|
||||
"assistant": "Ответ ассистента",
|
||||
@ -2510,7 +2517,8 @@
|
||||
},
|
||||
"preview": {
|
||||
"copy": {
|
||||
"image": "Скопировать как изображение"
|
||||
"image": "Скопировать как изображение",
|
||||
"src": "Копировать источник изображения"
|
||||
},
|
||||
"dialog": "Открыть диалог",
|
||||
"label": "Предварительный просмотр",
|
||||
@ -4271,6 +4279,12 @@
|
||||
"tip": "Указывает уровень задержки, который следует использовать для обработки запроса",
|
||||
"title": "Уровень сервиса"
|
||||
},
|
||||
"stream_options": {
|
||||
"include_usage": {
|
||||
"tip": "Включено ли использование токенов (применимо только к API завершения чата OpenAI)",
|
||||
"title": "Включить использование"
|
||||
}
|
||||
},
|
||||
"summary_text_mode": {
|
||||
"auto": "Авто",
|
||||
"concise": "Краткий",
|
||||
|
||||
@ -62,7 +62,7 @@ export const getCodeToolsApiBaseUrl = (model: Model, type: EndpointType) => {
|
||||
const CODE_TOOLS_API_ENDPOINTS = {
|
||||
aihubmix: {
|
||||
gemini: {
|
||||
api_base_url: 'https://api.aihubmix.com/gemini'
|
||||
api_base_url: 'https://aihubmix.com/gemini'
|
||||
}
|
||||
},
|
||||
deepseek: {
|
||||
|
||||
@ -34,7 +34,11 @@ import type { Assistant, AssistantSettings, CodeStyleVarious, MathEngine } from
|
||||
import { isGroqSystemProvider } from '@renderer/types'
|
||||
import { modalConfirm } from '@renderer/utils'
|
||||
import { getSendMessageShortcutLabel } from '@renderer/utils/input'
|
||||
import { isSupportServiceTierProvider, isSupportVerbosityProvider } from '@renderer/utils/provider'
|
||||
import {
|
||||
isOpenAICompatibleProvider,
|
||||
isSupportServiceTierProvider,
|
||||
isSupportVerbosityProvider
|
||||
} from '@renderer/utils/provider'
|
||||
import type { MultiModelMessageStyle, SendMessageShortcut } from '@shared/data/preference/preferenceTypes'
|
||||
import { ThemeMode } from '@shared/data/preference/preferenceTypes'
|
||||
import { Col, InputNumber, Row, Slider } from 'antd'
|
||||
@ -244,6 +248,7 @@ const SettingsTab: FC<Props> = (props) => {
|
||||
const model = assistant.model || getDefaultModel()
|
||||
|
||||
const showOpenAiSettings =
|
||||
isOpenAICompatibleProvider(provider) ||
|
||||
isOpenAIModel(model) ||
|
||||
isSupportServiceTierProvider(provider) ||
|
||||
(isSupportVerbosityModel(model) && isSupportVerbosityProvider(provider))
|
||||
|
||||
@ -1,239 +0,0 @@
|
||||
import { HelpTooltip } from '@cherrystudio/ui'
|
||||
import Selector from '@renderer/components/Selector'
|
||||
import {
|
||||
getModelSupportedVerbosity,
|
||||
isSupportedReasoningEffortOpenAIModel,
|
||||
isSupportFlexServiceTierModel,
|
||||
isSupportVerbosityModel
|
||||
} from '@renderer/config/models'
|
||||
import { useProvider } from '@renderer/hooks/useProvider'
|
||||
import { SettingDivider, SettingRow } from '@renderer/pages/settings'
|
||||
import { CollapsibleSettingGroup } from '@renderer/pages/settings/SettingGroup'
|
||||
import type { RootState } from '@renderer/store'
|
||||
import { useAppDispatch } from '@renderer/store'
|
||||
import { setOpenAISummaryText, setOpenAIVerbosity } from '@renderer/store/settings'
|
||||
import type { Model, OpenAIServiceTier, ServiceTier } from '@renderer/types'
|
||||
import { SystemProviderIds } from '@renderer/types'
|
||||
import type { OpenAISummaryText, OpenAIVerbosity } from '@renderer/types/aiCoreTypes'
|
||||
import { isSupportServiceTierProvider, isSupportVerbosityProvider } from '@renderer/utils/provider'
|
||||
import { toOptionValue, toRealValue } from '@renderer/utils/select'
|
||||
import type { FC } from 'react'
|
||||
import { useCallback, useEffect, useMemo } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useSelector } from 'react-redux'
|
||||
|
||||
type VerbosityOption = {
|
||||
value: NonNullable<OpenAIVerbosity> | 'undefined' | 'null'
|
||||
label: string
|
||||
}
|
||||
|
||||
type SummaryTextOption = {
|
||||
value: NonNullable<OpenAISummaryText> | 'undefined' | 'null'
|
||||
label: string
|
||||
}
|
||||
|
||||
type OpenAIServiceTierOption = { value: NonNullable<OpenAIServiceTier> | 'null' | 'undefined'; label: string }
|
||||
|
||||
interface Props {
|
||||
model: Model
|
||||
providerId: string
|
||||
SettingGroup: FC<{ children: React.ReactNode }>
|
||||
SettingRowTitleSmall: FC<{ children: React.ReactNode }>
|
||||
}
|
||||
|
||||
const OpenAISettingsGroup: FC<Props> = ({ model, providerId, SettingGroup, SettingRowTitleSmall }) => {
|
||||
const { t } = useTranslation()
|
||||
const { provider, updateProvider } = useProvider(providerId)
|
||||
const verbosity = useSelector((state: RootState) => state.settings.openAI.verbosity)
|
||||
const summaryText = useSelector((state: RootState) => state.settings.openAI.summaryText)
|
||||
const serviceTierMode = provider.serviceTier
|
||||
const dispatch = useAppDispatch()
|
||||
|
||||
const showSummarySetting =
|
||||
isSupportedReasoningEffortOpenAIModel(model) &&
|
||||
!model.id.includes('o1-pro') &&
|
||||
(provider.type === 'openai-response' || model.endpoint_type === 'openai-response' || provider.id === 'aihubmix')
|
||||
const showVerbositySetting = isSupportVerbosityModel(model) && isSupportVerbosityProvider(provider)
|
||||
const isSupportFlexServiceTier = isSupportFlexServiceTierModel(model)
|
||||
const isSupportServiceTier = isSupportServiceTierProvider(provider)
|
||||
const showServiceTierSetting = isSupportServiceTier && providerId !== SystemProviderIds.groq
|
||||
|
||||
const setSummaryText = useCallback(
|
||||
(value: OpenAISummaryText) => {
|
||||
dispatch(setOpenAISummaryText(value))
|
||||
},
|
||||
[dispatch]
|
||||
)
|
||||
|
||||
const setServiceTierMode = useCallback(
|
||||
(value: ServiceTier) => {
|
||||
updateProvider({ serviceTier: value })
|
||||
},
|
||||
[updateProvider]
|
||||
)
|
||||
|
||||
const setVerbosity = useCallback(
|
||||
(value: OpenAIVerbosity) => {
|
||||
dispatch(setOpenAIVerbosity(value))
|
||||
},
|
||||
[dispatch]
|
||||
)
|
||||
|
||||
const summaryTextOptions = [
|
||||
{
|
||||
value: 'undefined',
|
||||
label: t('common.ignore')
|
||||
},
|
||||
{
|
||||
value: 'null',
|
||||
label: t('common.off')
|
||||
},
|
||||
{
|
||||
value: 'auto',
|
||||
label: t('settings.openai.summary_text_mode.auto')
|
||||
},
|
||||
{
|
||||
value: 'detailed',
|
||||
label: t('settings.openai.summary_text_mode.detailed')
|
||||
},
|
||||
{
|
||||
value: 'concise',
|
||||
label: t('settings.openai.summary_text_mode.concise')
|
||||
}
|
||||
] as const satisfies SummaryTextOption[]
|
||||
|
||||
const verbosityOptions = useMemo(() => {
|
||||
const allOptions = [
|
||||
{
|
||||
value: 'undefined',
|
||||
label: t('common.ignore')
|
||||
},
|
||||
{
|
||||
value: 'null',
|
||||
label: t('common.off')
|
||||
},
|
||||
{
|
||||
value: 'low',
|
||||
label: t('settings.openai.verbosity.low')
|
||||
},
|
||||
{
|
||||
value: 'medium',
|
||||
label: t('settings.openai.verbosity.medium')
|
||||
},
|
||||
{
|
||||
value: 'high',
|
||||
label: t('settings.openai.verbosity.high')
|
||||
}
|
||||
] as const satisfies VerbosityOption[]
|
||||
const supportedVerbosityLevels = getModelSupportedVerbosity(model).map((v) => toOptionValue(v))
|
||||
return allOptions.filter((option) => supportedVerbosityLevels.includes(option.value))
|
||||
}, [model, t])
|
||||
|
||||
const serviceTierOptions = useMemo(() => {
|
||||
const options = [
|
||||
{
|
||||
value: 'undefined',
|
||||
label: t('common.ignore')
|
||||
},
|
||||
{
|
||||
value: 'null',
|
||||
label: t('common.off')
|
||||
},
|
||||
{
|
||||
value: 'auto',
|
||||
label: t('settings.openai.service_tier.auto')
|
||||
},
|
||||
{
|
||||
value: 'default',
|
||||
label: t('settings.openai.service_tier.default')
|
||||
},
|
||||
{
|
||||
value: 'flex',
|
||||
label: t('settings.openai.service_tier.flex')
|
||||
},
|
||||
{
|
||||
value: 'priority',
|
||||
label: t('settings.openai.service_tier.priority')
|
||||
}
|
||||
] as const satisfies OpenAIServiceTierOption[]
|
||||
return options.filter((option) => {
|
||||
if (option.value === 'flex') {
|
||||
return isSupportFlexServiceTier
|
||||
}
|
||||
return true
|
||||
})
|
||||
}, [isSupportFlexServiceTier, t])
|
||||
|
||||
useEffect(() => {
|
||||
if (verbosity && !verbosityOptions.some((option) => option.value === verbosity)) {
|
||||
const supportedVerbosityLevels = getModelSupportedVerbosity(model)
|
||||
// Default to the highest supported verbosity level
|
||||
const defaultVerbosity = supportedVerbosityLevels[supportedVerbosityLevels.length - 1]
|
||||
setVerbosity(defaultVerbosity)
|
||||
}
|
||||
}, [model, verbosity, verbosityOptions, setVerbosity])
|
||||
|
||||
if (!showSummarySetting && !showServiceTierSetting && !showVerbositySetting) {
|
||||
return null
|
||||
}
|
||||
|
||||
return (
|
||||
<CollapsibleSettingGroup title={t('settings.openai.title')} defaultExpanded={true}>
|
||||
<SettingGroup>
|
||||
{showServiceTierSetting && (
|
||||
<>
|
||||
<SettingRow>
|
||||
<SettingRowTitleSmall>
|
||||
{t('settings.openai.service_tier.title')}{' '}
|
||||
<HelpTooltip content={t('settings.openai.service_tier.tip')} />
|
||||
</SettingRowTitleSmall>
|
||||
<Selector
|
||||
value={toOptionValue(serviceTierMode)}
|
||||
onChange={(value) => {
|
||||
setServiceTierMode(toRealValue(value))
|
||||
}}
|
||||
options={serviceTierOptions}
|
||||
/>
|
||||
</SettingRow>
|
||||
{(showSummarySetting || showVerbositySetting) && <SettingDivider />}
|
||||
</>
|
||||
)}
|
||||
{showSummarySetting && (
|
||||
<>
|
||||
<SettingRow>
|
||||
<SettingRowTitleSmall>
|
||||
{t('settings.openai.summary_text_mode.title')}{' '}
|
||||
<HelpTooltip content={t('settings.openai.summary_text_mode.tip')} />
|
||||
</SettingRowTitleSmall>
|
||||
<Selector
|
||||
value={toOptionValue(summaryText)}
|
||||
onChange={(value) => {
|
||||
setSummaryText(toRealValue(value))
|
||||
}}
|
||||
options={summaryTextOptions}
|
||||
/>
|
||||
</SettingRow>
|
||||
{showVerbositySetting && <SettingDivider />}
|
||||
</>
|
||||
)}
|
||||
{showVerbositySetting && (
|
||||
<SettingRow>
|
||||
<SettingRowTitleSmall>
|
||||
{t('settings.openai.verbosity.title')} <HelpTooltip content={t('settings.openai.verbosity.tip')} />
|
||||
</SettingRowTitleSmall>
|
||||
<Selector
|
||||
value={toOptionValue(verbosity)}
|
||||
onChange={(value) => {
|
||||
setVerbosity(toRealValue(value))
|
||||
}}
|
||||
options={verbosityOptions}
|
||||
/>
|
||||
</SettingRow>
|
||||
)}
|
||||
</SettingGroup>
|
||||
<SettingDivider />
|
||||
</CollapsibleSettingGroup>
|
||||
)
|
||||
}
|
||||
|
||||
export default OpenAISettingsGroup
|
||||
@ -0,0 +1,72 @@
|
||||
import { isSupportedReasoningEffortOpenAIModel, isSupportVerbosityModel } from '@renderer/config/models'
|
||||
import { useProvider } from '@renderer/hooks/useProvider'
|
||||
import { SettingDivider } from '@renderer/pages/settings'
|
||||
import { CollapsibleSettingGroup } from '@renderer/pages/settings/SettingGroup'
|
||||
import type { Model } from '@renderer/types'
|
||||
import { SystemProviderIds } from '@renderer/types'
|
||||
import {
|
||||
isSupportServiceTierProvider,
|
||||
isSupportStreamOptionsProvider,
|
||||
isSupportVerbosityProvider
|
||||
} from '@renderer/utils/provider'
|
||||
import type { FC } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
||||
import ReasoningSummarySetting from './ReasoningSummarySetting'
|
||||
import ServiceTierSetting from './ServiceTierSetting'
|
||||
import StreamOptionsSetting from './StreamOptionsSetting'
|
||||
import VerbositySetting from './VerbositySetting'
|
||||
|
||||
interface Props {
|
||||
model: Model
|
||||
providerId: string
|
||||
SettingGroup: FC<{ children: React.ReactNode }>
|
||||
SettingRowTitleSmall: FC<{ children: React.ReactNode }>
|
||||
}
|
||||
|
||||
const OpenAISettingsGroup: FC<Props> = ({ model, providerId, SettingGroup, SettingRowTitleSmall }) => {
|
||||
const { t } = useTranslation()
|
||||
const { provider } = useProvider(providerId)
|
||||
|
||||
const showSummarySetting =
|
||||
isSupportedReasoningEffortOpenAIModel(model) &&
|
||||
!model.id.includes('o1-pro') &&
|
||||
(provider.type === 'openai-response' || model.endpoint_type === 'openai-response' || provider.id === 'aihubmix')
|
||||
const showVerbositySetting = isSupportVerbosityModel(model) && isSupportVerbosityProvider(provider)
|
||||
const isSupportServiceTier = isSupportServiceTierProvider(provider)
|
||||
const showServiceTierSetting = isSupportServiceTier && providerId !== SystemProviderIds.groq
|
||||
const showStreamOptionsSetting = isSupportStreamOptionsProvider(provider)
|
||||
|
||||
if (!showSummarySetting && !showServiceTierSetting && !showVerbositySetting && !showStreamOptionsSetting) {
|
||||
return null
|
||||
}
|
||||
|
||||
return (
|
||||
<CollapsibleSettingGroup title={t('settings.openai.title')} defaultExpanded={true}>
|
||||
<SettingGroup>
|
||||
{showServiceTierSetting && (
|
||||
<>
|
||||
<ServiceTierSetting model={model} providerId={providerId} SettingRowTitleSmall={SettingRowTitleSmall} />
|
||||
{(showSummarySetting || showVerbositySetting || showStreamOptionsSetting) && <SettingDivider />}
|
||||
</>
|
||||
)}
|
||||
{showSummarySetting && (
|
||||
<>
|
||||
<ReasoningSummarySetting SettingRowTitleSmall={SettingRowTitleSmall} />
|
||||
{(showVerbositySetting || showStreamOptionsSetting) && <SettingDivider />}
|
||||
</>
|
||||
)}
|
||||
{showVerbositySetting && (
|
||||
<>
|
||||
<VerbositySetting model={model} SettingRowTitleSmall={SettingRowTitleSmall} />
|
||||
{showStreamOptionsSetting && <SettingDivider />}
|
||||
</>
|
||||
)}
|
||||
{showStreamOptionsSetting && <StreamOptionsSetting SettingRowTitleSmall={SettingRowTitleSmall} />}
|
||||
</SettingGroup>
|
||||
<SettingDivider />
|
||||
</CollapsibleSettingGroup>
|
||||
)
|
||||
}
|
||||
|
||||
export default OpenAISettingsGroup
|
||||
@ -0,0 +1,78 @@
|
||||
import Selector from '@renderer/components/Selector'
|
||||
import { SettingRow } from '@renderer/pages/settings'
|
||||
import type { RootState } from '@renderer/store'
|
||||
import { useAppDispatch } from '@renderer/store'
|
||||
import { setOpenAISummaryText } from '@renderer/store/settings'
|
||||
import type { OpenAIReasoningSummary } from '@renderer/types/aiCoreTypes'
|
||||
import { toOptionValue, toRealValue } from '@renderer/utils/select'
|
||||
import { Tooltip } from 'antd'
|
||||
import { CircleHelp } from 'lucide-react'
|
||||
import type { FC } from 'react'
|
||||
import { useCallback } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useSelector } from 'react-redux'
|
||||
|
||||
type SummaryTextOption = {
|
||||
value: NonNullable<OpenAIReasoningSummary> | 'undefined' | 'null'
|
||||
label: string
|
||||
}
|
||||
|
||||
interface Props {
|
||||
SettingRowTitleSmall: FC<{ children: React.ReactNode }>
|
||||
}
|
||||
|
||||
const ReasoningSummarySetting: FC<Props> = ({ SettingRowTitleSmall }) => {
|
||||
const { t } = useTranslation()
|
||||
const summaryText = useSelector((state: RootState) => state.settings.openAI.summaryText)
|
||||
const dispatch = useAppDispatch()
|
||||
|
||||
const setSummaryText = useCallback(
|
||||
(value: OpenAIReasoningSummary) => {
|
||||
dispatch(setOpenAISummaryText(value))
|
||||
},
|
||||
[dispatch]
|
||||
)
|
||||
|
||||
const summaryTextOptions = [
|
||||
{
|
||||
value: 'undefined',
|
||||
label: t('common.ignore')
|
||||
},
|
||||
{
|
||||
value: 'null',
|
||||
label: t('common.off')
|
||||
},
|
||||
{
|
||||
value: 'auto',
|
||||
label: t('settings.openai.summary_text_mode.auto')
|
||||
},
|
||||
{
|
||||
value: 'detailed',
|
||||
label: t('settings.openai.summary_text_mode.detailed')
|
||||
},
|
||||
{
|
||||
value: 'concise',
|
||||
label: t('settings.openai.summary_text_mode.concise')
|
||||
}
|
||||
] as const satisfies SummaryTextOption[]
|
||||
|
||||
return (
|
||||
<SettingRow>
|
||||
<SettingRowTitleSmall>
|
||||
{t('settings.openai.summary_text_mode.title')}{' '}
|
||||
<Tooltip title={t('settings.openai.summary_text_mode.tip')}>
|
||||
<CircleHelp size={14} style={{ marginLeft: 4 }} color="var(--color-text-2)" />
|
||||
</Tooltip>
|
||||
</SettingRowTitleSmall>
|
||||
<Selector
|
||||
value={toOptionValue(summaryText)}
|
||||
onChange={(value) => {
|
||||
setSummaryText(toRealValue(value))
|
||||
}}
|
||||
options={summaryTextOptions}
|
||||
/>
|
||||
</SettingRow>
|
||||
)
|
||||
}
|
||||
|
||||
export default ReasoningSummarySetting
|
||||
@ -0,0 +1,88 @@
|
||||
import Selector from '@renderer/components/Selector'
|
||||
import { isSupportFlexServiceTierModel } from '@renderer/config/models'
|
||||
import { useProvider } from '@renderer/hooks/useProvider'
|
||||
import { SettingRow } from '@renderer/pages/settings'
|
||||
import type { Model, OpenAIServiceTier, ServiceTier } from '@renderer/types'
|
||||
import { toOptionValue, toRealValue } from '@renderer/utils/select'
|
||||
import { Tooltip } from 'antd'
|
||||
import { CircleHelp } from 'lucide-react'
|
||||
import type { FC } from 'react'
|
||||
import { useCallback, useMemo } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
||||
type OpenAIServiceTierOption = { value: NonNullable<OpenAIServiceTier> | 'null' | 'undefined'; label: string }
|
||||
|
||||
interface Props {
|
||||
model: Model
|
||||
providerId: string
|
||||
SettingRowTitleSmall: FC<{ children: React.ReactNode }>
|
||||
}
|
||||
|
||||
const ServiceTierSetting: FC<Props> = ({ model, providerId, SettingRowTitleSmall }) => {
|
||||
const { t } = useTranslation()
|
||||
const { provider, updateProvider } = useProvider(providerId)
|
||||
const serviceTierMode = provider.serviceTier
|
||||
const isSupportFlexServiceTier = isSupportFlexServiceTierModel(model)
|
||||
|
||||
const setServiceTierMode = useCallback(
|
||||
(value: ServiceTier) => {
|
||||
updateProvider({ serviceTier: value })
|
||||
},
|
||||
[updateProvider]
|
||||
)
|
||||
|
||||
const serviceTierOptions = useMemo(() => {
|
||||
const options = [
|
||||
{
|
||||
value: 'undefined',
|
||||
label: t('common.ignore')
|
||||
},
|
||||
{
|
||||
value: 'null',
|
||||
label: t('common.off')
|
||||
},
|
||||
{
|
||||
value: 'auto',
|
||||
label: t('settings.openai.service_tier.auto')
|
||||
},
|
||||
{
|
||||
value: 'default',
|
||||
label: t('settings.openai.service_tier.default')
|
||||
},
|
||||
{
|
||||
value: 'flex',
|
||||
label: t('settings.openai.service_tier.flex')
|
||||
},
|
||||
{
|
||||
value: 'priority',
|
||||
label: t('settings.openai.service_tier.priority')
|
||||
}
|
||||
] as const satisfies OpenAIServiceTierOption[]
|
||||
return options.filter((option) => {
|
||||
if (option.value === 'flex') {
|
||||
return isSupportFlexServiceTier
|
||||
}
|
||||
return true
|
||||
})
|
||||
}, [isSupportFlexServiceTier, t])
|
||||
|
||||
return (
|
||||
<SettingRow>
|
||||
<SettingRowTitleSmall>
|
||||
{t('settings.openai.service_tier.title')}{' '}
|
||||
<Tooltip title={t('settings.openai.service_tier.tip')}>
|
||||
<CircleHelp size={14} style={{ marginLeft: 4 }} color="var(--color-text-2)" />
|
||||
</Tooltip>
|
||||
</SettingRowTitleSmall>
|
||||
<Selector
|
||||
value={toOptionValue(serviceTierMode)}
|
||||
onChange={(value) => {
|
||||
setServiceTierMode(toRealValue(value))
|
||||
}}
|
||||
options={serviceTierOptions}
|
||||
/>
|
||||
</SettingRow>
|
||||
)
|
||||
}
|
||||
|
||||
export default ServiceTierSetting
|
||||
@ -0,0 +1,72 @@
|
||||
import Selector from '@renderer/components/Selector'
|
||||
import { SettingRow } from '@renderer/pages/settings'
|
||||
import type { RootState } from '@renderer/store'
|
||||
import { useAppDispatch } from '@renderer/store'
|
||||
import { setOpenAIStreamOptionsIncludeUsage } from '@renderer/store/settings'
|
||||
import type { OpenAICompletionsStreamOptions } from '@renderer/types/aiCoreTypes'
|
||||
import { toOptionValue, toRealValue } from '@renderer/utils/select'
|
||||
import { Tooltip } from 'antd'
|
||||
import { CircleHelp } from 'lucide-react'
|
||||
import type { FC } from 'react'
|
||||
import { useCallback, useMemo } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useSelector } from 'react-redux'
|
||||
|
||||
type IncludeUsageOption = {
|
||||
value: 'undefined' | 'false' | 'true'
|
||||
label: string
|
||||
}
|
||||
|
||||
interface Props {
|
||||
SettingRowTitleSmall: FC<{ children: React.ReactNode }>
|
||||
}
|
||||
|
||||
const StreamOptionsSetting: FC<Props> = ({ SettingRowTitleSmall }) => {
|
||||
const { t } = useTranslation()
|
||||
const includeUsage = useSelector((state: RootState) => state.settings.openAI?.streamOptions?.includeUsage)
|
||||
const dispatch = useAppDispatch()
|
||||
|
||||
const setIncludeUsage = useCallback(
|
||||
(value: OpenAICompletionsStreamOptions['include_usage']) => {
|
||||
dispatch(setOpenAIStreamOptionsIncludeUsage(value))
|
||||
},
|
||||
[dispatch]
|
||||
)
|
||||
|
||||
const includeUsageOptions = useMemo(() => {
|
||||
return [
|
||||
{
|
||||
value: 'undefined',
|
||||
label: t('common.ignore')
|
||||
},
|
||||
{
|
||||
value: 'false',
|
||||
label: t('common.off')
|
||||
},
|
||||
{
|
||||
value: 'true',
|
||||
label: t('common.on')
|
||||
}
|
||||
] as const satisfies IncludeUsageOption[]
|
||||
}, [t])
|
||||
|
||||
return (
|
||||
<SettingRow>
|
||||
<SettingRowTitleSmall>
|
||||
{t('settings.openai.stream_options.include_usage.title')}{' '}
|
||||
<Tooltip title={t('settings.openai.stream_options.include_usage.tip')}>
|
||||
<CircleHelp size={14} style={{ marginLeft: 4 }} color="var(--color-text-2)" />
|
||||
</Tooltip>
|
||||
</SettingRowTitleSmall>
|
||||
<Selector
|
||||
value={toOptionValue(includeUsage)}
|
||||
onChange={(value) => {
|
||||
setIncludeUsage(toRealValue(value))
|
||||
}}
|
||||
options={includeUsageOptions}
|
||||
/>
|
||||
</SettingRow>
|
||||
)
|
||||
}
|
||||
|
||||
export default StreamOptionsSetting
|
||||
@ -0,0 +1,94 @@
|
||||
import Selector from '@renderer/components/Selector'
|
||||
import { getModelSupportedVerbosity } from '@renderer/config/models'
|
||||
import { SettingRow } from '@renderer/pages/settings'
|
||||
import type { RootState } from '@renderer/store'
|
||||
import { useAppDispatch } from '@renderer/store'
|
||||
import { setOpenAIVerbosity } from '@renderer/store/settings'
|
||||
import type { Model } from '@renderer/types'
|
||||
import type { OpenAIVerbosity } from '@renderer/types/aiCoreTypes'
|
||||
import { toOptionValue, toRealValue } from '@renderer/utils/select'
|
||||
import { Tooltip } from 'antd'
|
||||
import { CircleHelp } from 'lucide-react'
|
||||
import type { FC } from 'react'
|
||||
import { useCallback, useEffect, useMemo } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useSelector } from 'react-redux'
|
||||
|
||||
type VerbosityOption = {
|
||||
value: NonNullable<OpenAIVerbosity> | 'undefined' | 'null'
|
||||
label: string
|
||||
}
|
||||
|
||||
interface Props {
|
||||
model: Model
|
||||
SettingRowTitleSmall: FC<{ children: React.ReactNode }>
|
||||
}
|
||||
|
||||
const VerbositySetting: FC<Props> = ({ model, SettingRowTitleSmall }) => {
|
||||
const { t } = useTranslation()
|
||||
const verbosity = useSelector((state: RootState) => state.settings.openAI.verbosity)
|
||||
const dispatch = useAppDispatch()
|
||||
|
||||
const setVerbosity = useCallback(
|
||||
(value: OpenAIVerbosity) => {
|
||||
dispatch(setOpenAIVerbosity(value))
|
||||
},
|
||||
[dispatch]
|
||||
)
|
||||
|
||||
const verbosityOptions = useMemo(() => {
|
||||
const allOptions = [
|
||||
{
|
||||
value: 'undefined',
|
||||
label: t('common.ignore')
|
||||
},
|
||||
{
|
||||
value: 'null',
|
||||
label: t('common.off')
|
||||
},
|
||||
{
|
||||
value: 'low',
|
||||
label: t('settings.openai.verbosity.low')
|
||||
},
|
||||
{
|
||||
value: 'medium',
|
||||
label: t('settings.openai.verbosity.medium')
|
||||
},
|
||||
{
|
||||
value: 'high',
|
||||
label: t('settings.openai.verbosity.high')
|
||||
}
|
||||
] as const satisfies VerbosityOption[]
|
||||
const supportedVerbosityLevels = getModelSupportedVerbosity(model).map((v) => toOptionValue(v))
|
||||
return allOptions.filter((option) => supportedVerbosityLevels.includes(option.value))
|
||||
}, [model, t])
|
||||
|
||||
useEffect(() => {
|
||||
if (verbosity !== undefined && !verbosityOptions.some((option) => option.value === toOptionValue(verbosity))) {
|
||||
const supportedVerbosityLevels = getModelSupportedVerbosity(model)
|
||||
// Default to the highest supported verbosity level
|
||||
const defaultVerbosity = supportedVerbosityLevels[supportedVerbosityLevels.length - 1]
|
||||
setVerbosity(defaultVerbosity)
|
||||
}
|
||||
}, [model, verbosity, verbosityOptions, setVerbosity])
|
||||
|
||||
return (
|
||||
<SettingRow>
|
||||
<SettingRowTitleSmall>
|
||||
{t('settings.openai.verbosity.title')}{' '}
|
||||
<Tooltip title={t('settings.openai.verbosity.tip')}>
|
||||
<CircleHelp size={14} style={{ marginLeft: 4 }} color="var(--color-text-2)" />
|
||||
</Tooltip>
|
||||
</SettingRowTitleSmall>
|
||||
<Selector
|
||||
value={toOptionValue(verbosity)}
|
||||
onChange={(value) => {
|
||||
setVerbosity(toRealValue(value))
|
||||
}}
|
||||
options={verbosityOptions}
|
||||
/>
|
||||
</SettingRow>
|
||||
)
|
||||
}
|
||||
|
||||
export default VerbositySetting
|
||||
@ -0,0 +1,3 @@
|
||||
import OpenAISettingsGroup from './OpenAISettingsGroup'
|
||||
|
||||
export default OpenAISettingsGroup
|
||||
@ -43,7 +43,7 @@ const SessionItem: FC<SessionItemProps> = ({ session, agentId, onDelete, onPress
|
||||
const targetSession = useDeferredValue(_targetSession)
|
||||
const dispatch = useAppDispatch()
|
||||
|
||||
const { isEditing, isSaving, editValue, inputRef, startEdit, handleKeyDown, handleValueChange } = useInPlaceEdit({
|
||||
const { isEditing, isSaving, startEdit, inputProps } = useInPlaceEdit({
|
||||
onSave: async (value) => {
|
||||
if (value !== session.name) {
|
||||
await updateSession({ id: session.id, name: value })
|
||||
@ -180,14 +180,7 @@ const SessionItem: FC<SessionItemProps> = ({ session, agentId, onDelete, onPress
|
||||
{isFulfilled && !isActive && <FulfilledIndicator />}
|
||||
<SessionNameContainer>
|
||||
{isEditing ? (
|
||||
<SessionEditInput
|
||||
ref={inputRef}
|
||||
value={editValue}
|
||||
onChange={(e: React.ChangeEvent<HTMLInputElement>) => handleValueChange(e.target.value)}
|
||||
onKeyDown={handleKeyDown}
|
||||
onClick={(e: React.MouseEvent) => e.stopPropagation()}
|
||||
style={{ opacity: isSaving ? 0.5 : 1 }}
|
||||
/>
|
||||
<SessionEditInput {...inputProps} style={{ opacity: isSaving ? 0.5 : 1 }} />
|
||||
) : (
|
||||
<>
|
||||
<SessionName>
|
||||
|
||||
@ -85,7 +85,7 @@ export const Topics: React.FC<Props> = ({ assistant: _assistant, activeTopic, se
|
||||
const deleteTimerRef = useRef<NodeJS.Timeout>(null)
|
||||
const [editingTopicId, setEditingTopicId] = useState<string | null>(null)
|
||||
|
||||
const topicEdit = useInPlaceEdit({
|
||||
const { startEdit, isEditing, inputProps } = useInPlaceEdit({
|
||||
onSave: (name: string) => {
|
||||
const topic = assistant.topics.find((t) => t.id === editingTopicId)
|
||||
if (topic && name !== topic.name) {
|
||||
@ -545,29 +545,23 @@ export const Topics: React.FC<Props> = ({ assistant: _assistant, activeTopic, se
|
||||
<TopicListItem
|
||||
onContextMenu={() => setTargetTopic(topic)}
|
||||
className={classNames(isActive ? 'active' : '', singlealone ? 'singlealone' : '')}
|
||||
onClick={editingTopicId === topic.id && topicEdit.isEditing ? undefined : () => onSwitchTopic(topic)}
|
||||
onClick={editingTopicId === topic.id && isEditing ? undefined : () => onSwitchTopic(topic)}
|
||||
style={{
|
||||
borderRadius,
|
||||
cursor: editingTopicId === topic.id && topicEdit.isEditing ? 'default' : 'pointer'
|
||||
cursor: editingTopicId === topic.id && isEditing ? 'default' : 'pointer'
|
||||
}}>
|
||||
{isPending(topic.id) && !isActive && <PendingIndicator />}
|
||||
{isFulfilled(topic.id) && !isActive && <FulfilledIndicator />}
|
||||
<TopicNameContainer>
|
||||
{editingTopicId === topic.id && topicEdit.isEditing ? (
|
||||
<TopicEditInput
|
||||
ref={topicEdit.inputRef}
|
||||
value={topicEdit.editValue}
|
||||
onChange={topicEdit.handleInputChange}
|
||||
onKeyDown={topicEdit.handleKeyDown}
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
/>
|
||||
{editingTopicId === topic.id && isEditing ? (
|
||||
<TopicEditInput {...inputProps} onClick={(e) => e.stopPropagation()} />
|
||||
) : (
|
||||
<TopicName
|
||||
className={getTopicNameClassName()}
|
||||
title={topicName}
|
||||
onDoubleClick={() => {
|
||||
setEditingTopicId(topic.id)
|
||||
topicEdit.startEdit(topic.name)
|
||||
startEdit(topic.name)
|
||||
}}>
|
||||
{topicName}
|
||||
</TopicName>
|
||||
|
||||
@ -20,6 +20,10 @@ const UpdateAppButton: FC = () => {
|
||||
return null
|
||||
}
|
||||
|
||||
if (update.ignore) {
|
||||
return null
|
||||
}
|
||||
|
||||
const handleOpenUpdateDialog = () => {
|
||||
UpdateDialogPopup.show({ releaseInfo: appUpdateState.info || null })
|
||||
}
|
||||
@ -30,7 +34,7 @@ const UpdateAppButton: FC = () => {
|
||||
className="nodrag"
|
||||
onClick={handleOpenUpdateDialog}
|
||||
icon={<SyncOutlined />}
|
||||
color="orange"
|
||||
color="primary"
|
||||
variant="outlined"
|
||||
size="small">
|
||||
{t('button.update_available')}
|
||||
|
||||
@ -295,6 +295,16 @@ const NotesPage: FC = () => {
|
||||
break
|
||||
}
|
||||
|
||||
case 'refresh': {
|
||||
// 批量操作完成后的单次刷新
|
||||
logger.debug('Received refresh event, triggering tree refresh')
|
||||
const refresh = refreshTreeRef.current
|
||||
if (refresh) {
|
||||
await refresh()
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
case 'add':
|
||||
case 'addDir':
|
||||
case 'unlink':
|
||||
@ -621,7 +631,27 @@ const NotesPage: FC = () => {
|
||||
throw new Error('No folder path selected')
|
||||
}
|
||||
|
||||
const result = await uploadNotes(files, targetFolderPath)
|
||||
// Validate uploadNotes function is available
|
||||
if (typeof uploadNotes !== 'function') {
|
||||
logger.error('uploadNotes function is not available', { uploadNotes })
|
||||
window.toast.error(t('notes.upload_failed'))
|
||||
return
|
||||
}
|
||||
|
||||
let result: Awaited<ReturnType<typeof uploadNotes>>
|
||||
try {
|
||||
result = await uploadNotes(files, targetFolderPath)
|
||||
} catch (uploadError) {
|
||||
logger.error('Upload operation failed:', uploadError as Error)
|
||||
throw uploadError
|
||||
}
|
||||
|
||||
// Validate result object
|
||||
if (!result || typeof result !== 'object') {
|
||||
logger.error('Invalid upload result:', { result })
|
||||
window.toast.error(t('notes.upload_failed'))
|
||||
return
|
||||
}
|
||||
|
||||
// 检查上传结果
|
||||
if (result.fileCount === 0) {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
498
src/renderer/src/pages/notes/components/TreeNode.tsx
Normal file
498
src/renderer/src/pages/notes/components/TreeNode.tsx
Normal file
@ -0,0 +1,498 @@
|
||||
import HighlightText from '@renderer/components/HighlightText'
|
||||
import {
|
||||
useNotesActions,
|
||||
useNotesDrag,
|
||||
useNotesEditing,
|
||||
useNotesSearch,
|
||||
useNotesSelection,
|
||||
useNotesUI
|
||||
} from '@renderer/pages/notes/context/NotesContexts'
|
||||
import { EVENT_NAMES, EventEmitter } from '@renderer/services/EventService'
|
||||
import type { SearchMatch, SearchResult } from '@renderer/services/NotesSearchService'
|
||||
import type { NotesTreeNode } from '@renderer/types/note'
|
||||
import { Dropdown } from 'antd'
|
||||
import { ChevronDown, ChevronRight, File, FilePlus, Folder, FolderOpen } from 'lucide-react'
|
||||
import { memo, useCallback, useMemo, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import styled from 'styled-components'
|
||||
|
||||
interface TreeNodeProps {
|
||||
node: NotesTreeNode | SearchResult
|
||||
depth: number
|
||||
renderChildren?: boolean
|
||||
onHintClick?: () => void
|
||||
}
|
||||
|
||||
const TreeNode = memo<TreeNodeProps>(({ node, depth, renderChildren = true, onHintClick }) => {
|
||||
const { t } = useTranslation()
|
||||
|
||||
// Use split contexts - only subscribe to what this node needs
|
||||
const { selectedFolderId, activeNodeId } = useNotesSelection()
|
||||
const { editingNodeId, renamingNodeIds, newlyRenamedNodeIds, inPlaceEdit } = useNotesEditing()
|
||||
const { draggedNodeId, dragOverNodeId, dragPosition, onDragStart, onDragOver, onDragLeave, onDrop, onDragEnd } =
|
||||
useNotesDrag()
|
||||
const { searchKeyword, showMatches } = useNotesSearch()
|
||||
const { openDropdownKey } = useNotesUI()
|
||||
const { getMenuItems, onSelectNode, onToggleExpanded, onDropdownOpenChange } = useNotesActions()
|
||||
|
||||
const [showAllMatches, setShowAllMatches] = useState(false)
|
||||
const { isEditing: isInputEditing, inputProps } = inPlaceEdit
|
||||
|
||||
// 检查是否是 hint 节点
|
||||
const isHintNode = node.type === 'hint'
|
||||
|
||||
// 检查是否是搜索结果
|
||||
const searchResult = 'matchType' in node ? (node as SearchResult) : null
|
||||
const hasMatches = searchResult && searchResult.matches && searchResult.matches.length > 0
|
||||
|
||||
// 处理匹配项点击
|
||||
const handleMatchClick = useCallback(
|
||||
(match: SearchMatch) => {
|
||||
// 发送定位事件
|
||||
EventEmitter.emit(EVENT_NAMES.LOCATE_NOTE_LINE, {
|
||||
noteId: node.id,
|
||||
lineNumber: match.lineNumber,
|
||||
lineContent: match.lineContent
|
||||
})
|
||||
},
|
||||
[node]
|
||||
)
|
||||
|
||||
const isActive = selectedFolderId ? node.type === 'folder' && node.id === selectedFolderId : node.id === activeNodeId
|
||||
const isEditing = editingNodeId === node.id && isInputEditing
|
||||
const isRenaming = renamingNodeIds.has(node.id)
|
||||
const isNewlyRenamed = newlyRenamedNodeIds.has(node.id)
|
||||
const hasChildren = node.children && node.children.length > 0
|
||||
const isDragging = draggedNodeId === node.id
|
||||
const isDragOver = dragOverNodeId === node.id
|
||||
const isDragBefore = isDragOver && dragPosition === 'before'
|
||||
const isDragInside = isDragOver && dragPosition === 'inside'
|
||||
const isDragAfter = isDragOver && dragPosition === 'after'
|
||||
|
||||
const getNodeNameClassName = () => {
|
||||
if (isRenaming) return 'shimmer'
|
||||
if (isNewlyRenamed) return 'typing'
|
||||
return ''
|
||||
}
|
||||
|
||||
const displayName = useMemo(() => {
|
||||
if (!searchKeyword) {
|
||||
return node.name
|
||||
}
|
||||
|
||||
const name = node.name ?? ''
|
||||
if (!name) {
|
||||
return name
|
||||
}
|
||||
|
||||
const keyword = searchKeyword
|
||||
const nameLower = name.toLowerCase()
|
||||
const keywordLower = keyword.toLowerCase()
|
||||
const matchStart = nameLower.indexOf(keywordLower)
|
||||
|
||||
if (matchStart === -1) {
|
||||
return name
|
||||
}
|
||||
|
||||
const matchEnd = matchStart + keyword.length
|
||||
const beforeMatch = Math.min(2, matchStart)
|
||||
const contextStart = matchStart - beforeMatch
|
||||
const contextLength = 50
|
||||
const contextEnd = Math.min(name.length, matchEnd + contextLength)
|
||||
|
||||
const prefix = contextStart > 0 ? '...' : ''
|
||||
const suffix = contextEnd < name.length ? '...' : ''
|
||||
|
||||
return prefix + name.substring(contextStart, contextEnd) + suffix
|
||||
}, [node.name, searchKeyword])
|
||||
|
||||
// Special render for hint nodes
|
||||
if (isHintNode) {
|
||||
return (
|
||||
<div key={node.id}>
|
||||
<TreeNodeContainer active={false} depth={depth}>
|
||||
<TreeNodeContent>
|
||||
<NodeIcon>
|
||||
<FilePlus size={16} />
|
||||
</NodeIcon>
|
||||
<DropHintText onClick={onHintClick}>{t('notes.drop_markdown_hint')}</DropHintText>
|
||||
</TreeNodeContent>
|
||||
</TreeNodeContainer>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div key={node.id}>
|
||||
<Dropdown
|
||||
menu={{ items: getMenuItems(node as NotesTreeNode) }}
|
||||
trigger={['contextMenu']}
|
||||
open={openDropdownKey === node.id}
|
||||
onOpenChange={(open) => onDropdownOpenChange(open ? node.id : null)}>
|
||||
<div onContextMenu={(e) => e.stopPropagation()}>
|
||||
<TreeNodeContainer
|
||||
active={isActive}
|
||||
depth={depth}
|
||||
isDragging={isDragging}
|
||||
isDragOver={isDragOver}
|
||||
isDragBefore={isDragBefore}
|
||||
isDragInside={isDragInside}
|
||||
isDragAfter={isDragAfter}
|
||||
draggable={!isEditing}
|
||||
data-node-id={node.id}
|
||||
onDragStart={(e) => onDragStart(e, node as NotesTreeNode)}
|
||||
onDragOver={(e) => onDragOver(e, node as NotesTreeNode)}
|
||||
onDragLeave={onDragLeave}
|
||||
onDrop={(e) => onDrop(e, node as NotesTreeNode)}
|
||||
onDragEnd={onDragEnd}>
|
||||
<TreeNodeContent onClick={() => onSelectNode(node as NotesTreeNode)}>
|
||||
<NodeIndent depth={depth} />
|
||||
|
||||
{node.type === 'folder' && (
|
||||
<ExpandIcon
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
onToggleExpanded(node.id)
|
||||
}}
|
||||
title={node.expanded ? t('notes.collapse') : t('notes.expand')}>
|
||||
{node.expanded ? <ChevronDown size={14} /> : <ChevronRight size={14} />}
|
||||
</ExpandIcon>
|
||||
)}
|
||||
|
||||
<NodeIcon>
|
||||
{node.type === 'folder' ? (
|
||||
node.expanded ? (
|
||||
<FolderOpen size={16} />
|
||||
) : (
|
||||
<Folder size={16} />
|
||||
)
|
||||
) : (
|
||||
<File size={16} />
|
||||
)}
|
||||
</NodeIcon>
|
||||
|
||||
{isEditing ? (
|
||||
<EditInput {...inputProps} onClick={(e) => e.stopPropagation()} autoFocus />
|
||||
) : (
|
||||
<NodeNameContainer>
|
||||
<NodeName className={getNodeNameClassName()}>
|
||||
{searchKeyword ? <HighlightText text={displayName} keyword={searchKeyword} /> : node.name}
|
||||
</NodeName>
|
||||
{searchResult && searchResult.matchType && searchResult.matchType !== 'filename' && (
|
||||
<MatchBadge matchType={searchResult.matchType}>
|
||||
{searchResult.matchType === 'both' ? t('notes.search.both') : t('notes.search.content')}
|
||||
</MatchBadge>
|
||||
)}
|
||||
</NodeNameContainer>
|
||||
)}
|
||||
</TreeNodeContent>
|
||||
</TreeNodeContainer>
|
||||
</div>
|
||||
</Dropdown>
|
||||
|
||||
{showMatches && hasMatches && (
|
||||
<SearchMatchesContainer depth={depth}>
|
||||
{(showAllMatches ? searchResult!.matches! : searchResult!.matches!.slice(0, 3)).map((match, idx) => (
|
||||
<MatchItem key={idx} onClick={() => handleMatchClick(match)}>
|
||||
<MatchLineNumber>{match.lineNumber}</MatchLineNumber>
|
||||
<MatchContext>
|
||||
<HighlightText text={match.context} keyword={searchKeyword} />
|
||||
</MatchContext>
|
||||
</MatchItem>
|
||||
))}
|
||||
{searchResult!.matches!.length > 3 && (
|
||||
<MoreMatches
|
||||
depth={depth}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
setShowAllMatches(!showAllMatches)
|
||||
}}>
|
||||
{showAllMatches ? (
|
||||
<>
|
||||
<ChevronDown size={12} style={{ marginRight: 4 }} />
|
||||
{t('notes.search.show_less')}
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<ChevronRight size={12} style={{ marginRight: 4 }} />+{searchResult!.matches!.length - 3}{' '}
|
||||
{t('notes.search.more_matches')}
|
||||
</>
|
||||
)}
|
||||
</MoreMatches>
|
||||
)}
|
||||
</SearchMatchesContainer>
|
||||
)}
|
||||
|
||||
{renderChildren && node.type === 'folder' && node.expanded && hasChildren && (
|
||||
<div>
|
||||
{node.children!.map((child) => (
|
||||
<TreeNode key={child.id} node={child} depth={depth + 1} renderChildren={renderChildren} />
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
})
|
||||
|
||||
export const TreeNodeContainer = styled.div<{
|
||||
active: boolean
|
||||
depth: number
|
||||
isDragging?: boolean
|
||||
isDragOver?: boolean
|
||||
isDragBefore?: boolean
|
||||
isDragInside?: boolean
|
||||
isDragAfter?: boolean
|
||||
}>`
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: 4px 6px;
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
margin-bottom: 2px;
|
||||
/* CRITICAL: Must have fully opaque background for sticky to work properly */
|
||||
/* Transparent/semi-transparent backgrounds will show content bleeding through when sticky */
|
||||
background-color: ${(props) => {
|
||||
if (props.isDragInside) return 'var(--color-primary-background)'
|
||||
// Use hover color for active state - it's guaranteed to be opaque
|
||||
if (props.active) return 'var(--color-hover, var(--color-background-mute))'
|
||||
return 'var(--color-background)'
|
||||
}};
|
||||
border: 0.5px solid
|
||||
${(props) => {
|
||||
if (props.isDragInside) return 'var(--color-primary)'
|
||||
if (props.active) return 'var(--color-border)'
|
||||
return 'transparent'
|
||||
}};
|
||||
opacity: ${(props) => (props.isDragging ? 0.5 : 1)};
|
||||
transition: all 0.2s ease;
|
||||
position: relative;
|
||||
|
||||
&:hover {
|
||||
background-color: var(--color-background-soft);
|
||||
|
||||
.node-actions {
|
||||
opacity: 1;
|
||||
}
|
||||
}
|
||||
|
||||
/* 添加拖拽指示线 */
|
||||
${(props) =>
|
||||
props.isDragBefore &&
|
||||
`
|
||||
&::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
top: -2px;
|
||||
left: 0;
|
||||
right: 0;
|
||||
height: 2px;
|
||||
background-color: var(--color-primary);
|
||||
border-radius: 1px;
|
||||
}
|
||||
`}
|
||||
|
||||
${(props) =>
|
||||
props.isDragAfter &&
|
||||
`
|
||||
&::after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
bottom: -2px;
|
||||
left: 0;
|
||||
right: 0;
|
||||
height: 2px;
|
||||
background-color: var(--color-primary);
|
||||
border-radius: 1px;
|
||||
}
|
||||
`}
|
||||
`
|
||||
|
||||
export const TreeNodeContent = styled.div`
|
||||
display: flex;
|
||||
align-items: center;
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
`
|
||||
|
||||
export const NodeIndent = styled.div<{ depth: number }>`
|
||||
width: ${(props) => props.depth * 16}px;
|
||||
flex-shrink: 0;
|
||||
`
|
||||
|
||||
export const ExpandIcon = styled.div`
|
||||
width: 16px;
|
||||
height: 16px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
color: var(--color-text-2);
|
||||
margin-right: 4px;
|
||||
|
||||
&:hover {
|
||||
color: var(--color-text);
|
||||
}
|
||||
`
|
||||
|
||||
export const NodeIcon = styled.div`
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
margin-right: 8px;
|
||||
color: var(--color-text-2);
|
||||
flex-shrink: 0;
|
||||
`
|
||||
|
||||
export const NodeName = styled.div`
|
||||
flex: 1;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
font-size: 13px;
|
||||
color: var(--color-text);
|
||||
position: relative;
|
||||
will-change: background-position, width;
|
||||
|
||||
--color-shimmer-mid: var(--color-text-1);
|
||||
--color-shimmer-end: color-mix(in srgb, var(--color-text-1) 25%, transparent);
|
||||
|
||||
&.shimmer {
|
||||
background: linear-gradient(to left, var(--color-shimmer-end), var(--color-shimmer-mid), var(--color-shimmer-end));
|
||||
background-size: 200% 100%;
|
||||
background-clip: text;
|
||||
color: transparent;
|
||||
animation: shimmer 3s linear infinite;
|
||||
}
|
||||
|
||||
&.typing {
|
||||
display: block;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
animation: typewriter 0.5s steps(40, end);
|
||||
}
|
||||
|
||||
@keyframes shimmer {
|
||||
0% {
|
||||
background-position: 200% 0;
|
||||
}
|
||||
100% {
|
||||
background-position: -200% 0;
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes typewriter {
|
||||
from {
|
||||
width: 0;
|
||||
}
|
||||
to {
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
`
|
||||
|
||||
export const SearchMatchesContainer = styled.div<{ depth: number }>`
|
||||
margin-left: ${(props) => props.depth * 16 + 40}px;
|
||||
margin-top: 4px;
|
||||
margin-bottom: 8px;
|
||||
padding: 6px 8px;
|
||||
background-color: var(--color-background-mute);
|
||||
border-radius: 4px;
|
||||
border-left: 2px solid var(--color-primary-soft);
|
||||
`
|
||||
|
||||
export const NodeNameContainer = styled.div`
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
`
|
||||
|
||||
export const MatchBadge = styled.span<{ matchType: string }>`
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
padding: 0 4px;
|
||||
height: 16px;
|
||||
font-size: 10px;
|
||||
line-height: 1;
|
||||
border-radius: 2px;
|
||||
background-color: ${(props) =>
|
||||
props.matchType === 'both' ? 'var(--color-primary-soft)' : 'var(--color-background-mute)'};
|
||||
color: ${(props) => (props.matchType === 'both' ? 'var(--color-primary)' : 'var(--color-text-3)')};
|
||||
font-weight: 500;
|
||||
flex-shrink: 0;
|
||||
`
|
||||
|
||||
export const MatchItem = styled.div`
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
margin-bottom: 4px;
|
||||
font-size: 12px;
|
||||
padding: 4px 6px;
|
||||
margin-left: -6px;
|
||||
margin-right: -6px;
|
||||
border-radius: 3px;
|
||||
cursor: pointer;
|
||||
transition: all 0.15s ease;
|
||||
|
||||
&:hover {
|
||||
background-color: var(--color-background-soft);
|
||||
transform: translateX(2px);
|
||||
}
|
||||
|
||||
&:active {
|
||||
background-color: var(--color-active);
|
||||
}
|
||||
|
||||
&:last-child {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
`
|
||||
|
||||
export const MatchLineNumber = styled.span`
|
||||
color: var(--color-text-3);
|
||||
font-family: monospace;
|
||||
flex-shrink: 0;
|
||||
width: 30px;
|
||||
`
|
||||
|
||||
export const MatchContext = styled.div`
|
||||
color: var(--color-text-2);
|
||||
flex: 1;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
font-family: monospace;
|
||||
`
|
||||
|
||||
export const MoreMatches = styled.div<{ depth: number }>`
|
||||
margin-top: 4px;
|
||||
padding: 4px 6px;
|
||||
margin-left: -6px;
|
||||
margin-right: -6px;
|
||||
font-size: 11px;
|
||||
color: var(--color-text-3);
|
||||
border-radius: 3px;
|
||||
cursor: pointer;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
transition: all 0.15s ease;
|
||||
|
||||
&:hover {
|
||||
color: var(--color-text-2);
|
||||
background-color: var(--color-background-soft);
|
||||
}
|
||||
`
|
||||
|
||||
const EditInput = styled.input`
|
||||
flex: 1;
|
||||
font-size: 13px;
|
||||
`
|
||||
|
||||
const DropHintText = styled.div`
|
||||
color: var(--color-text-3);
|
||||
font-size: 12px;
|
||||
font-style: italic;
|
||||
`
|
||||
|
||||
export default TreeNode
|
||||
109
src/renderer/src/pages/notes/context/NotesContexts.tsx
Normal file
109
src/renderer/src/pages/notes/context/NotesContexts.tsx
Normal file
@ -0,0 +1,109 @@
|
||||
import type { UseInPlaceEditReturn } from '@renderer/hooks/useInPlaceEdit'
|
||||
import type { NotesTreeNode } from '@renderer/types/note'
|
||||
import type { MenuProps } from 'antd'
|
||||
import { createContext, use } from 'react'
|
||||
|
||||
// ==================== 1. Actions Context (Static, rarely changes) ====================
|
||||
export interface NotesActionsContextType {
|
||||
getMenuItems: (node: NotesTreeNode) => MenuProps['items']
|
||||
onSelectNode: (node: NotesTreeNode) => void
|
||||
onToggleExpanded: (nodeId: string) => void
|
||||
onDropdownOpenChange: (key: string | null) => void
|
||||
}
|
||||
|
||||
export const NotesActionsContext = createContext<NotesActionsContextType | null>(null)
|
||||
|
||||
export const useNotesActions = () => {
|
||||
const context = use(NotesActionsContext)
|
||||
if (!context) {
|
||||
throw new Error('useNotesActions must be used within NotesActionsContext.Provider')
|
||||
}
|
||||
return context
|
||||
}
|
||||
|
||||
// ==================== 2. Selection Context (Low frequency updates) ====================
|
||||
export interface NotesSelectionContextType {
|
||||
selectedFolderId?: string | null
|
||||
activeNodeId?: string
|
||||
}
|
||||
|
||||
export const NotesSelectionContext = createContext<NotesSelectionContextType | null>(null)
|
||||
|
||||
export const useNotesSelection = () => {
|
||||
const context = use(NotesSelectionContext)
|
||||
if (!context) {
|
||||
throw new Error('useNotesSelection must be used within NotesSelectionContext.Provider')
|
||||
}
|
||||
return context
|
||||
}
|
||||
|
||||
// ==================== 3. Editing Context (Medium frequency updates) ====================
|
||||
export interface NotesEditingContextType {
|
||||
editingNodeId: string | null
|
||||
renamingNodeIds: Set<string>
|
||||
newlyRenamedNodeIds: Set<string>
|
||||
inPlaceEdit: UseInPlaceEditReturn
|
||||
}
|
||||
|
||||
export const NotesEditingContext = createContext<NotesEditingContextType | null>(null)
|
||||
|
||||
export const useNotesEditing = () => {
|
||||
const context = use(NotesEditingContext)
|
||||
if (!context) {
|
||||
throw new Error('useNotesEditing must be used within NotesEditingContext.Provider')
|
||||
}
|
||||
return context
|
||||
}
|
||||
|
||||
// ==================== 4. Drag Context (High frequency updates) ====================
|
||||
export interface NotesDragContextType {
|
||||
draggedNodeId: string | null
|
||||
dragOverNodeId: string | null
|
||||
dragPosition: 'before' | 'inside' | 'after'
|
||||
onDragStart: (e: React.DragEvent, node: NotesTreeNode) => void
|
||||
onDragOver: (e: React.DragEvent, node: NotesTreeNode) => void
|
||||
onDragLeave: () => void
|
||||
onDrop: (e: React.DragEvent, node: NotesTreeNode) => void
|
||||
onDragEnd: () => void
|
||||
}
|
||||
|
||||
export const NotesDragContext = createContext<NotesDragContextType | null>(null)
|
||||
|
||||
export const useNotesDrag = () => {
|
||||
const context = use(NotesDragContext)
|
||||
if (!context) {
|
||||
throw new Error('useNotesDrag must be used within NotesDragContext.Provider')
|
||||
}
|
||||
return context
|
||||
}
|
||||
|
||||
// ==================== 5. Search Context (Medium frequency updates) ====================
|
||||
export interface NotesSearchContextType {
|
||||
searchKeyword: string
|
||||
showMatches: boolean
|
||||
}
|
||||
|
||||
export const NotesSearchContext = createContext<NotesSearchContextType | null>(null)
|
||||
|
||||
export const useNotesSearch = () => {
|
||||
const context = use(NotesSearchContext)
|
||||
if (!context) {
|
||||
throw new Error('useNotesSearch must be used within NotesSearchContext.Provider')
|
||||
}
|
||||
return context
|
||||
}
|
||||
|
||||
// ==================== 6. UI Context (Medium frequency updates) ====================
|
||||
export interface NotesUIContextType {
|
||||
openDropdownKey: string | null
|
||||
}
|
||||
|
||||
export const NotesUIContext = createContext<NotesUIContextType | null>(null)
|
||||
|
||||
export const useNotesUI = () => {
|
||||
const context = use(NotesUIContext)
|
||||
if (!context) {
|
||||
throw new Error('useNotesUI must be used within NotesUIContext.Provider')
|
||||
}
|
||||
return context
|
||||
}
|
||||
101
src/renderer/src/pages/notes/hooks/useNotesDragAndDrop.ts
Normal file
101
src/renderer/src/pages/notes/hooks/useNotesDragAndDrop.ts
Normal file
@ -0,0 +1,101 @@
|
||||
import type { NotesTreeNode } from '@renderer/types/note'
|
||||
import { useCallback, useRef, useState } from 'react'
|
||||
|
||||
interface UseNotesDragAndDropProps {
|
||||
onMoveNode: (sourceNodeId: string, targetNodeId: string, position: 'before' | 'after' | 'inside') => void
|
||||
}
|
||||
|
||||
export const useNotesDragAndDrop = ({ onMoveNode }: UseNotesDragAndDropProps) => {
|
||||
const [draggedNodeId, setDraggedNodeId] = useState<string | null>(null)
|
||||
const [dragOverNodeId, setDragOverNodeId] = useState<string | null>(null)
|
||||
const [dragPosition, setDragPosition] = useState<'before' | 'inside' | 'after'>('inside')
|
||||
const dragNodeRef = useRef<HTMLDivElement | null>(null)
|
||||
|
||||
const handleDragStart = useCallback((e: React.DragEvent, node: NotesTreeNode) => {
|
||||
setDraggedNodeId(node.id)
|
||||
e.dataTransfer.effectAllowed = 'move'
|
||||
e.dataTransfer.setData('text/plain', node.id)
|
||||
|
||||
dragNodeRef.current = e.currentTarget as HTMLDivElement
|
||||
|
||||
// Create ghost element
|
||||
if (e.currentTarget.parentElement) {
|
||||
const rect = e.currentTarget.getBoundingClientRect()
|
||||
const ghostElement = e.currentTarget.cloneNode(true) as HTMLElement
|
||||
ghostElement.style.width = `${rect.width}px`
|
||||
ghostElement.style.opacity = '0.7'
|
||||
ghostElement.style.position = 'absolute'
|
||||
ghostElement.style.top = '-1000px'
|
||||
document.body.appendChild(ghostElement)
|
||||
e.dataTransfer.setDragImage(ghostElement, 10, 10)
|
||||
setTimeout(() => {
|
||||
document.body.removeChild(ghostElement)
|
||||
}, 0)
|
||||
}
|
||||
}, [])
|
||||
|
||||
const handleDragOver = useCallback(
|
||||
(e: React.DragEvent, node: NotesTreeNode) => {
|
||||
e.preventDefault()
|
||||
e.dataTransfer.dropEffect = 'move'
|
||||
|
||||
if (draggedNodeId === node.id) {
|
||||
return
|
||||
}
|
||||
|
||||
setDragOverNodeId(node.id)
|
||||
|
||||
const rect = (e.currentTarget as HTMLElement).getBoundingClientRect()
|
||||
const mouseY = e.clientY
|
||||
const thresholdTop = rect.top + rect.height * 0.3
|
||||
const thresholdBottom = rect.bottom - rect.height * 0.3
|
||||
|
||||
if (mouseY < thresholdTop) {
|
||||
setDragPosition('before')
|
||||
} else if (mouseY > thresholdBottom) {
|
||||
setDragPosition('after')
|
||||
} else {
|
||||
setDragPosition(node.type === 'folder' ? 'inside' : 'after')
|
||||
}
|
||||
},
|
||||
[draggedNodeId]
|
||||
)
|
||||
|
||||
const handleDragLeave = useCallback(() => {
|
||||
setDragOverNodeId(null)
|
||||
setDragPosition('inside')
|
||||
}, [])
|
||||
|
||||
const handleDrop = useCallback(
|
||||
(e: React.DragEvent, targetNode: NotesTreeNode) => {
|
||||
e.preventDefault()
|
||||
const draggedId = e.dataTransfer.getData('text/plain')
|
||||
|
||||
if (draggedId && draggedId !== targetNode.id) {
|
||||
onMoveNode(draggedId, targetNode.id, dragPosition)
|
||||
}
|
||||
|
||||
setDraggedNodeId(null)
|
||||
setDragOverNodeId(null)
|
||||
setDragPosition('inside')
|
||||
},
|
||||
[onMoveNode, dragPosition]
|
||||
)
|
||||
|
||||
const handleDragEnd = useCallback(() => {
|
||||
setDraggedNodeId(null)
|
||||
setDragOverNodeId(null)
|
||||
setDragPosition('inside')
|
||||
}, [])
|
||||
|
||||
return {
|
||||
draggedNodeId,
|
||||
dragOverNodeId,
|
||||
dragPosition,
|
||||
handleDragStart,
|
||||
handleDragOver,
|
||||
handleDragLeave,
|
||||
handleDrop,
|
||||
handleDragEnd
|
||||
}
|
||||
}
|
||||
94
src/renderer/src/pages/notes/hooks/useNotesEditing.ts
Normal file
94
src/renderer/src/pages/notes/hooks/useNotesEditing.ts
Normal file
@ -0,0 +1,94 @@
|
||||
import { loggerService } from '@logger'
|
||||
import { useInPlaceEdit } from '@renderer/hooks/useInPlaceEdit'
|
||||
import { fetchNoteSummary } from '@renderer/services/ApiService'
|
||||
import type { NotesTreeNode } from '@renderer/types/note'
|
||||
import { useCallback, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
||||
const logger = loggerService.withContext('UseNotesEditing')
|
||||
|
||||
interface UseNotesEditingProps {
|
||||
onRenameNode: (nodeId: string, newName: string) => void
|
||||
}
|
||||
|
||||
export const useNotesEditing = ({ onRenameNode }: UseNotesEditingProps) => {
|
||||
const { t } = useTranslation()
|
||||
const [editingNodeId, setEditingNodeId] = useState<string | null>(null)
|
||||
const [renamingNodeIds, setRenamingNodeIds] = useState<Set<string>>(new Set())
|
||||
const [newlyRenamedNodeIds, setNewlyRenamedNodeIds] = useState<Set<string>>(new Set())
|
||||
|
||||
const inPlaceEdit = useInPlaceEdit({
|
||||
onSave: (newName: string) => {
|
||||
if (editingNodeId && newName) {
|
||||
onRenameNode(editingNodeId, newName)
|
||||
window.toast.success(t('common.saved'))
|
||||
logger.debug(`Renamed node ${editingNodeId} to "${newName}"`)
|
||||
}
|
||||
setEditingNodeId(null)
|
||||
},
|
||||
onCancel: () => {
|
||||
setEditingNodeId(null)
|
||||
}
|
||||
})
|
||||
|
||||
const handleStartEdit = useCallback(
|
||||
(node: NotesTreeNode) => {
|
||||
setEditingNodeId(node.id)
|
||||
inPlaceEdit.startEdit(node.name)
|
||||
},
|
||||
[inPlaceEdit]
|
||||
)
|
||||
|
||||
const handleAutoRename = useCallback(
|
||||
async (note: NotesTreeNode) => {
|
||||
if (note.type !== 'file') return
|
||||
|
||||
setRenamingNodeIds((prev) => new Set(prev).add(note.id))
|
||||
try {
|
||||
const content = await window.api.file.readExternal(note.externalPath)
|
||||
if (!content || content.trim().length === 0) {
|
||||
window.toast.warning(t('notes.auto_rename.empty_note'))
|
||||
return
|
||||
}
|
||||
|
||||
const summaryText = await fetchNoteSummary({ content })
|
||||
if (summaryText) {
|
||||
onRenameNode(note.id, summaryText)
|
||||
window.toast.success(t('notes.auto_rename.success'))
|
||||
} else {
|
||||
window.toast.error(t('notes.auto_rename.failed'))
|
||||
}
|
||||
} catch (error) {
|
||||
window.toast.error(t('notes.auto_rename.failed'))
|
||||
logger.error(`Failed to auto-rename note: ${error}`)
|
||||
} finally {
|
||||
setRenamingNodeIds((prev) => {
|
||||
const next = new Set(prev)
|
||||
next.delete(note.id)
|
||||
return next
|
||||
})
|
||||
|
||||
setNewlyRenamedNodeIds((prev) => new Set(prev).add(note.id))
|
||||
|
||||
setTimeout(() => {
|
||||
setNewlyRenamedNodeIds((prev) => {
|
||||
const next = new Set(prev)
|
||||
next.delete(note.id)
|
||||
return next
|
||||
})
|
||||
}, 700)
|
||||
}
|
||||
},
|
||||
[onRenameNode, t]
|
||||
)
|
||||
|
||||
return {
|
||||
editingNodeId,
|
||||
renamingNodeIds,
|
||||
newlyRenamedNodeIds,
|
||||
inPlaceEdit,
|
||||
handleStartEdit,
|
||||
handleAutoRename,
|
||||
setEditingNodeId
|
||||
}
|
||||
}
|
||||
112
src/renderer/src/pages/notes/hooks/useNotesFileUpload.ts
Normal file
112
src/renderer/src/pages/notes/hooks/useNotesFileUpload.ts
Normal file
@ -0,0 +1,112 @@
|
||||
import { useCallback } from 'react'
|
||||
|
||||
interface UseNotesFileUploadProps {
|
||||
onUploadFiles: (files: File[]) => void
|
||||
setIsDragOverSidebar: (isDragOver: boolean) => void
|
||||
}
|
||||
|
||||
export const useNotesFileUpload = ({ onUploadFiles, setIsDragOverSidebar }: UseNotesFileUploadProps) => {
|
||||
const handleDropFiles = useCallback(
|
||||
async (e: React.DragEvent) => {
|
||||
e.preventDefault()
|
||||
setIsDragOverSidebar(false)
|
||||
|
||||
// 处理文件夹拖拽:从 dataTransfer.items 获取完整文件路径信息
|
||||
const items = Array.from(e.dataTransfer.items)
|
||||
const files: File[] = []
|
||||
|
||||
const processEntry = async (entry: FileSystemEntry, path: string = '') => {
|
||||
if (entry.isFile) {
|
||||
const fileEntry = entry as FileSystemFileEntry
|
||||
return new Promise<void>((resolve) => {
|
||||
fileEntry.file((file) => {
|
||||
// 手动设置 webkitRelativePath 以保持文件夹结构
|
||||
Object.defineProperty(file, 'webkitRelativePath', {
|
||||
value: path + file.name,
|
||||
writable: false
|
||||
})
|
||||
files.push(file)
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
} else if (entry.isDirectory) {
|
||||
const dirEntry = entry as FileSystemDirectoryEntry
|
||||
const reader = dirEntry.createReader()
|
||||
return new Promise<void>((resolve) => {
|
||||
reader.readEntries(async (entries) => {
|
||||
const promises = entries.map((subEntry) => processEntry(subEntry, path + entry.name + '/'))
|
||||
await Promise.all(promises)
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// 如果支持 DataTransferItem API(文件夹拖拽)
|
||||
if (items.length > 0 && items[0].webkitGetAsEntry()) {
|
||||
const promises = items.map((item) => {
|
||||
const entry = item.webkitGetAsEntry()
|
||||
return entry ? processEntry(entry) : Promise.resolve()
|
||||
})
|
||||
|
||||
await Promise.all(promises)
|
||||
|
||||
if (files.length > 0) {
|
||||
onUploadFiles(files)
|
||||
}
|
||||
} else {
|
||||
const regularFiles = Array.from(e.dataTransfer.files)
|
||||
if (regularFiles.length > 0) {
|
||||
onUploadFiles(regularFiles)
|
||||
}
|
||||
}
|
||||
},
|
||||
[onUploadFiles, setIsDragOverSidebar]
|
||||
)
|
||||
|
||||
const handleSelectFiles = useCallback(() => {
|
||||
const fileInput = document.createElement('input')
|
||||
fileInput.type = 'file'
|
||||
fileInput.multiple = true
|
||||
fileInput.accept = '.md,.markdown'
|
||||
fileInput.webkitdirectory = false
|
||||
|
||||
fileInput.onchange = (e) => {
|
||||
const target = e.target as HTMLInputElement
|
||||
if (target.files && target.files.length > 0) {
|
||||
const selectedFiles = Array.from(target.files)
|
||||
onUploadFiles(selectedFiles)
|
||||
}
|
||||
fileInput.remove()
|
||||
}
|
||||
|
||||
fileInput.click()
|
||||
}, [onUploadFiles])
|
||||
|
||||
const handleSelectFolder = useCallback(() => {
|
||||
const folderInput = document.createElement('input')
|
||||
folderInput.type = 'file'
|
||||
// @ts-ignore - webkitdirectory is a non-standard attribute
|
||||
folderInput.webkitdirectory = true
|
||||
// @ts-ignore - directory is a non-standard attribute
|
||||
folderInput.directory = true
|
||||
folderInput.multiple = true
|
||||
|
||||
folderInput.onchange = (e) => {
|
||||
const target = e.target as HTMLInputElement
|
||||
if (target.files && target.files.length > 0) {
|
||||
const selectedFiles = Array.from(target.files)
|
||||
onUploadFiles(selectedFiles)
|
||||
}
|
||||
folderInput.remove()
|
||||
}
|
||||
|
||||
folderInput.click()
|
||||
}, [onUploadFiles])
|
||||
|
||||
return {
|
||||
handleDropFiles,
|
||||
handleSelectFiles,
|
||||
handleSelectFolder
|
||||
}
|
||||
}
|
||||
263
src/renderer/src/pages/notes/hooks/useNotesMenu.tsx
Normal file
263
src/renderer/src/pages/notes/hooks/useNotesMenu.tsx
Normal file
@ -0,0 +1,263 @@
|
||||
import { loggerService } from '@logger'
|
||||
import { DeleteIcon } from '@renderer/components/Icons'
|
||||
import SaveToKnowledgePopup from '@renderer/components/Popups/SaveToKnowledgePopup'
|
||||
import { useKnowledgeBases } from '@renderer/hooks/useKnowledge'
|
||||
import type { RootState } from '@renderer/store'
|
||||
import type { NotesTreeNode } from '@renderer/types/note'
|
||||
import { exportNote } from '@renderer/utils/export'
|
||||
import type { MenuProps } from 'antd'
|
||||
import type { ItemType, MenuItemType } from 'antd/es/menu/interface'
|
||||
import { Edit3, FilePlus, FileSearch, Folder, FolderOpen, Sparkles, Star, StarOff, UploadIcon } from 'lucide-react'
|
||||
import { useCallback } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useSelector } from 'react-redux'
|
||||
|
||||
const logger = loggerService.withContext('UseNotesMenu')
|
||||
|
||||
interface UseNotesMenuProps {
|
||||
renamingNodeIds: Set<string>
|
||||
onCreateNote: (name: string, targetFolderId?: string) => void
|
||||
onCreateFolder: (name: string, targetFolderId?: string) => void
|
||||
onRenameNode: (nodeId: string, newName: string) => void
|
||||
onToggleStar: (nodeId: string) => void
|
||||
onDeleteNode: (nodeId: string) => void
|
||||
onSelectNode: (node: NotesTreeNode) => void
|
||||
handleStartEdit: (node: NotesTreeNode) => void
|
||||
handleAutoRename: (node: NotesTreeNode) => void
|
||||
activeNode?: NotesTreeNode | null
|
||||
}
|
||||
|
||||
export const useNotesMenu = ({
|
||||
renamingNodeIds,
|
||||
onCreateNote,
|
||||
onCreateFolder,
|
||||
onToggleStar,
|
||||
onDeleteNode,
|
||||
onSelectNode,
|
||||
handleStartEdit,
|
||||
handleAutoRename,
|
||||
activeNode
|
||||
}: UseNotesMenuProps) => {
|
||||
const { t } = useTranslation()
|
||||
const { bases } = useKnowledgeBases()
|
||||
const exportMenuOptions = useSelector((state: RootState) => state.settings.exportMenuOptions)
|
||||
|
||||
const handleExportKnowledge = useCallback(
|
||||
async (note: NotesTreeNode) => {
|
||||
try {
|
||||
if (bases.length === 0) {
|
||||
window.toast.warning(t('chat.save.knowledge.empty.no_knowledge_base'))
|
||||
return
|
||||
}
|
||||
|
||||
const result = await SaveToKnowledgePopup.showForNote(note)
|
||||
|
||||
if (result?.success) {
|
||||
window.toast.success(t('notes.export_success', { count: result.savedCount }))
|
||||
}
|
||||
} catch (error) {
|
||||
window.toast.error(t('notes.export_failed'))
|
||||
logger.error(`Failed to export note to knowledge base: ${error}`)
|
||||
}
|
||||
},
|
||||
[bases.length, t]
|
||||
)
|
||||
|
||||
const handleImageAction = useCallback(
|
||||
async (node: NotesTreeNode, platform: 'copyImage' | 'exportImage') => {
|
||||
try {
|
||||
if (activeNode?.id !== node.id) {
|
||||
onSelectNode(node)
|
||||
await new Promise((resolve) => setTimeout(resolve, 500))
|
||||
}
|
||||
|
||||
await exportNote({ node, platform })
|
||||
} catch (error) {
|
||||
logger.error(`Failed to ${platform === 'copyImage' ? 'copy' : 'export'} as image:`, error as Error)
|
||||
window.toast.error(t('common.copy_failed'))
|
||||
}
|
||||
},
|
||||
[activeNode, onSelectNode, t]
|
||||
)
|
||||
|
||||
const handleDeleteNodeWrapper = useCallback(
|
||||
(node: NotesTreeNode) => {
|
||||
const confirmText =
|
||||
node.type === 'folder'
|
||||
? t('notes.delete_folder_confirm', { name: node.name })
|
||||
: t('notes.delete_note_confirm', { name: node.name })
|
||||
|
||||
window.modal.confirm({
|
||||
title: t('notes.delete'),
|
||||
content: confirmText,
|
||||
centered: true,
|
||||
okButtonProps: { danger: true },
|
||||
onOk: () => {
|
||||
onDeleteNode(node.id)
|
||||
}
|
||||
})
|
||||
},
|
||||
[onDeleteNode, t]
|
||||
)
|
||||
|
||||
const getMenuItems = useCallback(
|
||||
(node: NotesTreeNode) => {
|
||||
const baseMenuItems: MenuProps['items'] = []
|
||||
|
||||
// only show auto rename for file for now
|
||||
if (node.type !== 'folder') {
|
||||
baseMenuItems.push({
|
||||
label: t('notes.auto_rename.label'),
|
||||
key: 'auto-rename',
|
||||
icon: <Sparkles size={14} />,
|
||||
disabled: renamingNodeIds.has(node.id),
|
||||
onClick: () => {
|
||||
handleAutoRename(node)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if (node.type === 'folder') {
|
||||
baseMenuItems.push(
|
||||
{
|
||||
label: t('notes.new_note'),
|
||||
key: 'new_note',
|
||||
icon: <FilePlus size={14} />,
|
||||
onClick: () => {
|
||||
onCreateNote(t('notes.untitled_note'), node.id)
|
||||
}
|
||||
},
|
||||
{
|
||||
label: t('notes.new_folder'),
|
||||
key: 'new_folder',
|
||||
icon: <Folder size={14} />,
|
||||
onClick: () => {
|
||||
onCreateFolder(t('notes.untitled_folder'), node.id)
|
||||
}
|
||||
},
|
||||
{ type: 'divider' }
|
||||
)
|
||||
}
|
||||
|
||||
baseMenuItems.push(
|
||||
{
|
||||
label: t('notes.rename'),
|
||||
key: 'rename',
|
||||
icon: <Edit3 size={14} />,
|
||||
onClick: () => {
|
||||
handleStartEdit(node)
|
||||
}
|
||||
},
|
||||
{
|
||||
label: t('notes.open_outside'),
|
||||
key: 'open_outside',
|
||||
icon: <FolderOpen size={14} />,
|
||||
onClick: () => {
|
||||
window.api.openPath(node.externalPath)
|
||||
}
|
||||
}
|
||||
)
|
||||
if (node.type !== 'folder') {
|
||||
baseMenuItems.push(
|
||||
{
|
||||
label: node.isStarred ? t('notes.unstar') : t('notes.star'),
|
||||
key: 'star',
|
||||
icon: node.isStarred ? <StarOff size={14} /> : <Star size={14} />,
|
||||
onClick: () => {
|
||||
onToggleStar(node.id)
|
||||
}
|
||||
},
|
||||
{
|
||||
label: t('notes.export_knowledge'),
|
||||
key: 'export_knowledge',
|
||||
icon: <FileSearch size={14} />,
|
||||
onClick: () => {
|
||||
handleExportKnowledge(node)
|
||||
}
|
||||
},
|
||||
{
|
||||
label: t('chat.topics.export.title'),
|
||||
key: 'export',
|
||||
icon: <UploadIcon size={14} />,
|
||||
children: [
|
||||
exportMenuOptions.image && {
|
||||
label: t('chat.topics.copy.image'),
|
||||
key: 'copy-image',
|
||||
onClick: () => handleImageAction(node, 'copyImage')
|
||||
},
|
||||
exportMenuOptions.image && {
|
||||
label: t('chat.topics.export.image'),
|
||||
key: 'export-image',
|
||||
onClick: () => handleImageAction(node, 'exportImage')
|
||||
},
|
||||
exportMenuOptions.markdown && {
|
||||
label: t('chat.topics.export.md.label'),
|
||||
key: 'markdown',
|
||||
onClick: () => exportNote({ node, platform: 'markdown' })
|
||||
},
|
||||
exportMenuOptions.docx && {
|
||||
label: t('chat.topics.export.word'),
|
||||
key: 'word',
|
||||
onClick: () => exportNote({ node, platform: 'docx' })
|
||||
},
|
||||
exportMenuOptions.notion && {
|
||||
label: t('chat.topics.export.notion'),
|
||||
key: 'notion',
|
||||
onClick: () => exportNote({ node, platform: 'notion' })
|
||||
},
|
||||
exportMenuOptions.yuque && {
|
||||
label: t('chat.topics.export.yuque'),
|
||||
key: 'yuque',
|
||||
onClick: () => exportNote({ node, platform: 'yuque' })
|
||||
},
|
||||
exportMenuOptions.obsidian && {
|
||||
label: t('chat.topics.export.obsidian'),
|
||||
key: 'obsidian',
|
||||
onClick: () => exportNote({ node, platform: 'obsidian' })
|
||||
},
|
||||
exportMenuOptions.joplin && {
|
||||
label: t('chat.topics.export.joplin'),
|
||||
key: 'joplin',
|
||||
onClick: () => exportNote({ node, platform: 'joplin' })
|
||||
},
|
||||
exportMenuOptions.siyuan && {
|
||||
label: t('chat.topics.export.siyuan'),
|
||||
key: 'siyuan',
|
||||
onClick: () => exportNote({ node, platform: 'siyuan' })
|
||||
}
|
||||
].filter(Boolean) as ItemType<MenuItemType>[]
|
||||
}
|
||||
)
|
||||
}
|
||||
baseMenuItems.push(
|
||||
{ type: 'divider' },
|
||||
{
|
||||
label: t('notes.delete'),
|
||||
danger: true,
|
||||
key: 'delete',
|
||||
icon: <DeleteIcon size={14} className="lucide-custom" />,
|
||||
onClick: () => {
|
||||
handleDeleteNodeWrapper(node)
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
return baseMenuItems
|
||||
},
|
||||
[
|
||||
t,
|
||||
handleStartEdit,
|
||||
onToggleStar,
|
||||
handleExportKnowledge,
|
||||
handleImageAction,
|
||||
handleDeleteNodeWrapper,
|
||||
renamingNodeIds,
|
||||
handleAutoRename,
|
||||
exportMenuOptions,
|
||||
onCreateNote,
|
||||
onCreateFolder
|
||||
]
|
||||
)
|
||||
|
||||
return { getMenuItems }
|
||||
}
|
||||
@ -31,6 +31,8 @@ import { getErrorMessage, uuid } from '@renderer/utils'
|
||||
import { isNewApiProvider } from '@renderer/utils/provider'
|
||||
import { Empty, InputNumber, Segmented, Select, Upload } from 'antd'
|
||||
import TextArea from 'antd/es/input/TextArea'
|
||||
import type { RcFile } from 'antd/es/upload'
|
||||
import type { UploadFile } from 'antd/es/upload/interface'
|
||||
import type { FC } from 'react'
|
||||
import React from 'react'
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
@ -553,7 +555,31 @@ const NewApiPage: FC<{ Options: string[] }> = ({ Options }) => {
|
||||
maxCount={16}
|
||||
showUploadList={true}
|
||||
listType="picture"
|
||||
beforeUpload={handleImageUpload}>
|
||||
beforeUpload={handleImageUpload}
|
||||
fileList={editImageFiles.map((file, idx): UploadFile<any> => {
|
||||
const rcFile: RcFile = {
|
||||
...file,
|
||||
uid: String(idx),
|
||||
lastModifiedDate: file.lastModified ? new Date(file.lastModified) : new Date()
|
||||
}
|
||||
return {
|
||||
uid: rcFile.uid,
|
||||
name: rcFile.name || `image_${idx + 1}.png`,
|
||||
status: 'done',
|
||||
url: URL.createObjectURL(file),
|
||||
originFileObj: rcFile,
|
||||
lastModifiedDate: rcFile.lastModifiedDate
|
||||
}
|
||||
})}
|
||||
onRemove={(file) => {
|
||||
setEditImageFiles((prev) =>
|
||||
prev.filter((f) => {
|
||||
const idx = prev.indexOf(f)
|
||||
return String(idx) !== file.uid
|
||||
})
|
||||
)
|
||||
return true
|
||||
}}>
|
||||
<ImagePlaceholder>
|
||||
<ImageSizeImage src={IcImageUp} theme={theme} />
|
||||
</ImagePlaceholder>
|
||||
|
||||
@ -64,7 +64,7 @@ export const AccessibleDirsSetting = ({ base, update }: AccessibleDirsSettingPro
|
||||
return (
|
||||
<SettingsItem>
|
||||
<SettingsTitle
|
||||
actions={
|
||||
contentAfter={
|
||||
<Tooltip title={t('agent.session.accessible_paths.add')}>
|
||||
<Button type="text" icon={<Plus size={16} />} shape="circle" onClick={addAccessiblePath} />
|
||||
</Tooltip>
|
||||
|
||||
@ -69,7 +69,7 @@ export const AdvancedSettings: React.FC<AdvancedSettingsProps> = ({ agentBase, u
|
||||
<SettingsContainer>
|
||||
<SettingsItem divider={false}>
|
||||
<SettingsTitle
|
||||
actions={
|
||||
contentAfter={
|
||||
<Tooltip title={t('agent.settings.advance.maxTurns.description')} placement="left">
|
||||
<Info size={16} className="text-foreground-400" />
|
||||
</Tooltip>
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
import { HelpTooltip } from '@renderer/components/TooltipIcons'
|
||||
import SelectAgentBaseModelButton from '@renderer/pages/home/components/SelectAgentBaseModelButton'
|
||||
import type { AgentBaseWithId, ApiModel, UpdateAgentFunctionUnion } from '@renderer/types'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
@ -22,7 +23,9 @@ export const ModelSetting = ({ base, update, isDisabled }: ModelSettingProps) =>
|
||||
|
||||
return (
|
||||
<SettingsItem inline>
|
||||
<SettingsTitle id="model">{t('common.model')}</SettingsTitle>
|
||||
<SettingsTitle id="model" contentAfter={<HelpTooltip title={t('agent.add.model.tooltip')} />}>
|
||||
{t('common.model')}
|
||||
</SettingsTitle>
|
||||
<SelectAgentBaseModelButton
|
||||
agentBase={base}
|
||||
onSelect={async (model) => {
|
||||
|
||||
@ -9,14 +9,14 @@ import styled from 'styled-components'
|
||||
import { SettingDivider } from '..'
|
||||
|
||||
export interface SettingsTitleProps extends React.ComponentPropsWithRef<'div'> {
|
||||
actions?: ReactNode
|
||||
contentAfter?: ReactNode
|
||||
}
|
||||
|
||||
export const SettingsTitle: React.FC<SettingsTitleProps> = ({ children, actions }) => {
|
||||
export const SettingsTitle: React.FC<SettingsTitleProps> = ({ children, contentAfter }) => {
|
||||
return (
|
||||
<div className={cn(actions ? 'justify-between' : undefined, 'mb-1 flex items-center gap-2')}>
|
||||
<div className={cn(contentAfter ? 'justify-between' : undefined, 'mb-1 flex items-center gap-2')}>
|
||||
<span className="flex items-center gap-1 font-bold">{children}</span>
|
||||
{actions !== undefined && actions}
|
||||
{contentAfter !== undefined && contentAfter}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@ -2,8 +2,6 @@ import { TopView } from '@renderer/components/TopView'
|
||||
import { useAssistants, useDefaultModel } from '@renderer/hooks/useAssistant'
|
||||
import { useProvider } from '@renderer/hooks/useProvider'
|
||||
import ModelEditContent from '@renderer/pages/settings/ProviderSettings/EditModelPopup/ModelEditContent'
|
||||
import { useAppDispatch } from '@renderer/store'
|
||||
import { setModel } from '@renderer/store/assistants'
|
||||
import type { Model, Provider } from '@renderer/types'
|
||||
import React, { useCallback, useState } from 'react'
|
||||
|
||||
@ -19,9 +17,9 @@ interface Props extends ShowParams {
|
||||
const PopupContainer: React.FC<Props> = ({ provider: _provider, model, resolve }) => {
|
||||
const [open, setOpen] = useState(true)
|
||||
const { provider, updateProvider, models } = useProvider(_provider.id)
|
||||
const { assistants } = useAssistants()
|
||||
const { defaultModel, setDefaultModel } = useDefaultModel()
|
||||
const dispatch = useAppDispatch()
|
||||
const { assistants, updateAssistants } = useAssistants()
|
||||
const { defaultModel, setDefaultModel, translateModel, setTranslateModel, quickModel, setQuickModel } =
|
||||
useDefaultModel()
|
||||
|
||||
const onOk = () => {
|
||||
setOpen(false)
|
||||
@ -42,22 +40,46 @@ const PopupContainer: React.FC<Props> = ({ provider: _provider, model, resolve }
|
||||
|
||||
updateProvider({ models: updatedModels })
|
||||
|
||||
assistants.forEach((assistant) => {
|
||||
if (assistant?.model?.id === updatedModel.id && assistant.model.provider === provider.id) {
|
||||
dispatch(
|
||||
setModel({
|
||||
assistantId: assistant.id,
|
||||
model: updatedModel
|
||||
})
|
||||
)
|
||||
}
|
||||
})
|
||||
updateAssistants(
|
||||
assistants.map((a) => {
|
||||
let model = a.model
|
||||
let defaultModel = a.defaultModel
|
||||
if (a.model?.id === updatedModel.id && a.model.provider === provider.id) {
|
||||
model = updatedModel
|
||||
}
|
||||
if (a.defaultModel?.id === updatedModel.id && a.defaultModel?.provider === provider.id) {
|
||||
defaultModel = updatedModel
|
||||
}
|
||||
return { ...a, model, defaultModel }
|
||||
})
|
||||
)
|
||||
|
||||
if (defaultModel?.id === updatedModel.id && defaultModel?.provider === provider.id) {
|
||||
setDefaultModel(updatedModel)
|
||||
}
|
||||
if (translateModel?.id === updatedModel.id && translateModel?.provider === provider.id) {
|
||||
setTranslateModel(updatedModel)
|
||||
}
|
||||
if (quickModel?.id === updatedModel.id && quickModel?.provider === provider.id) {
|
||||
setQuickModel(updatedModel)
|
||||
}
|
||||
},
|
||||
[models, updateProvider, provider.id, assistants, defaultModel, dispatch, setDefaultModel]
|
||||
[
|
||||
models,
|
||||
updateProvider,
|
||||
updateAssistants,
|
||||
assistants,
|
||||
defaultModel?.id,
|
||||
defaultModel?.provider,
|
||||
provider.id,
|
||||
translateModel?.id,
|
||||
translateModel?.provider,
|
||||
quickModel?.id,
|
||||
quickModel?.provider,
|
||||
setDefaultModel,
|
||||
setTranslateModel,
|
||||
setQuickModel
|
||||
]
|
||||
)
|
||||
|
||||
return (
|
||||
|
||||
@ -40,6 +40,7 @@ import {
|
||||
detectLanguage,
|
||||
determineTargetLanguage
|
||||
} from '@renderer/utils/translate'
|
||||
import { documentExts } from '@shared/config/constant'
|
||||
import { imageExts, MB, textExts } from '@shared/config/constant'
|
||||
import { FloatButton, Popover, Typography } from 'antd'
|
||||
import type { TextAreaRef } from 'antd/es/input/TextArea'
|
||||
@ -67,7 +68,7 @@ const TranslatePage: FC = () => {
|
||||
const { prompt, getLanguageByLangcode, settings } = useTranslate()
|
||||
const { autoCopy } = settings
|
||||
const { shikiMarkdownIt } = useCodeStyle()
|
||||
const { onSelectFile, selecting, clearFiles } = useFiles({ extensions: [...imageExts, ...textExts] })
|
||||
const { onSelectFile, selecting, clearFiles } = useFiles({ extensions: [...imageExts, ...textExts, ...documentExts] })
|
||||
const { ocr } = useOcr()
|
||||
const { setTimeoutTimer } = useTimer()
|
||||
|
||||
@ -486,33 +487,56 @@ const TranslatePage: FC = () => {
|
||||
const readFile = useCallback(
|
||||
async (file: FileMetadata) => {
|
||||
const _readFile = async () => {
|
||||
let isText: boolean
|
||||
try {
|
||||
// 检查文件是否为文本文件
|
||||
isText = await isTextFile(file.path)
|
||||
} catch (e) {
|
||||
logger.error('Failed to check if file is text.', e as Error)
|
||||
window.toast.error(t('translate.files.error.check_type') + ': ' + formatErrorMessage(e))
|
||||
return
|
||||
}
|
||||
const fileExtension = getFileExtension(file.path)
|
||||
|
||||
if (!isText) {
|
||||
window.toast.error(t('common.file.not_supported', { type: getFileExtension(file.path) }))
|
||||
logger.error('Unsupported file type.')
|
||||
return
|
||||
}
|
||||
// Check if file is supported format (text file or document file)
|
||||
let isText: boolean
|
||||
const isDocument: boolean = documentExts.includes(fileExtension)
|
||||
|
||||
// the threshold may be too large
|
||||
if (file.size > 5 * MB) {
|
||||
window.toast.error(t('translate.files.error.too_large') + ' (0 ~ 5 MB)')
|
||||
} else {
|
||||
if (!isDocument) {
|
||||
try {
|
||||
// For non-document files, check if it's a text file
|
||||
isText = await isTextFile(file.path)
|
||||
} catch (e) {
|
||||
logger.error('Failed to check file type.', e as Error)
|
||||
window.toast.error(t('translate.files.error.check_type') + ': ' + formatErrorMessage(e))
|
||||
return
|
||||
}
|
||||
} else {
|
||||
isText = false
|
||||
}
|
||||
|
||||
if (!isText && !isDocument) {
|
||||
window.toast.error(t('common.file.not_supported', { type: fileExtension }))
|
||||
logger.error('Unsupported file type.')
|
||||
return
|
||||
}
|
||||
|
||||
// File size check - document files allowed to be larger
|
||||
const maxSize = isDocument ? 20 * MB : 5 * MB
|
||||
if (file.size > maxSize) {
|
||||
window.toast.error(t('translate.files.error.too_large') + ` (0 ~ ${maxSize / MB} MB)`)
|
||||
return
|
||||
}
|
||||
|
||||
let result: string
|
||||
try {
|
||||
const result = await window.api.fs.readText(file.path)
|
||||
if (isDocument) {
|
||||
// Use the new document reading API
|
||||
result = await window.api.file.readExternal(file.path, true)
|
||||
} else {
|
||||
// Read text file
|
||||
result = await window.api.fs.readText(file.path)
|
||||
}
|
||||
setText(text + result)
|
||||
} catch (e) {
|
||||
logger.error('Failed to read text file.', e as Error)
|
||||
logger.error('Failed to read file.', e as Error)
|
||||
window.toast.error(t('translate.files.error.unknown') + ': ' + formatErrorMessage(e))
|
||||
}
|
||||
} catch (e) {
|
||||
logger.error('Failed to read file.', e as Error)
|
||||
window.toast.error(t('translate.files.error.unknown') + ': ' + formatErrorMessage(e))
|
||||
}
|
||||
}
|
||||
const promise = _readFile()
|
||||
|
||||
@ -8,8 +8,8 @@ import { buildStreamTextParams } from '@renderer/aiCore/prepareParams'
|
||||
import { isDedicatedImageGenerationModel, isEmbeddingModel, isFunctionCallingModel } from '@renderer/config/models'
|
||||
import i18n from '@renderer/i18n'
|
||||
import store from '@renderer/store'
|
||||
import type { FetchChatCompletionParams } from '@renderer/types'
|
||||
import type { Assistant, MCPServer, MCPTool, Model, Provider } from '@renderer/types'
|
||||
import { type FetchChatCompletionParams, isSystemProvider } from '@renderer/types'
|
||||
import type { StreamTextParams } from '@renderer/types/aiCoreTypes'
|
||||
import { type Chunk, ChunkType } from '@renderer/types/chunk'
|
||||
import type { Message, ResponseError } from '@renderer/types/newMessage'
|
||||
@ -21,7 +21,8 @@ import { purifyMarkdownImages } from '@renderer/utils/markdown'
|
||||
import { isPromptToolUse, isSupportedToolUse } from '@renderer/utils/mcp-tools'
|
||||
import { findFileBlocks, getMainTextContent } from '@renderer/utils/messageUtils/find'
|
||||
import { containsSupportedVariables, replacePromptVariables } from '@renderer/utils/prompt'
|
||||
import { isEmpty, takeRight } from 'lodash'
|
||||
import { NOT_SUPPORT_API_KEY_PROVIDER_TYPES, NOT_SUPPORT_API_KEY_PROVIDERS } from '@renderer/utils/provider'
|
||||
import { cloneDeep, isEmpty, takeRight } from 'lodash'
|
||||
|
||||
import type { ModernAiProviderConfig } from '../aiCore/index_new'
|
||||
import AiProviderNew from '../aiCore/index_new'
|
||||
@ -42,6 +43,8 @@ import {
|
||||
// } from './MessagesService'
|
||||
// import WebSearchService from './WebSearchService'
|
||||
|
||||
// FIXME: 这里太多重复逻辑,需要重构
|
||||
|
||||
const logger = loggerService.withContext('ApiService')
|
||||
|
||||
export async function fetchMcpTools(assistant: Assistant) {
|
||||
@ -94,7 +97,15 @@ export async function fetchChatCompletion({
|
||||
modelId: assistant.model?.id,
|
||||
modelName: assistant.model?.name
|
||||
})
|
||||
const AI = new AiProviderNew(assistant.model || getDefaultModel())
|
||||
|
||||
// Get base provider and apply API key rotation
|
||||
const baseProvider = getProviderByModel(assistant.model || getDefaultModel())
|
||||
const providerWithRotatedKey = {
|
||||
...cloneDeep(baseProvider),
|
||||
apiKey: getRotatedApiKey(baseProvider)
|
||||
}
|
||||
|
||||
const AI = new AiProviderNew(assistant.model || getDefaultModel(), providerWithRotatedKey)
|
||||
const provider = AI.getActualProvider()
|
||||
|
||||
const mcpTools: MCPTool[] = []
|
||||
@ -171,7 +182,13 @@ export async function fetchMessagesSummary({ messages, assistant }: { messages:
|
||||
return null
|
||||
}
|
||||
|
||||
const AI = new AiProviderNew(model)
|
||||
// Apply API key rotation
|
||||
const providerWithRotatedKey = {
|
||||
...cloneDeep(provider),
|
||||
apiKey: getRotatedApiKey(provider)
|
||||
}
|
||||
|
||||
const AI = new AiProviderNew(model, providerWithRotatedKey)
|
||||
|
||||
const topicId = messages?.find((message) => message.topicId)?.topicId || ''
|
||||
|
||||
@ -270,7 +287,13 @@ export async function fetchNoteSummary({ content, assistant }: { content: string
|
||||
return null
|
||||
}
|
||||
|
||||
const AI = new AiProviderNew(model)
|
||||
// Apply API key rotation
|
||||
const providerWithRotatedKey = {
|
||||
...cloneDeep(provider),
|
||||
apiKey: getRotatedApiKey(provider)
|
||||
}
|
||||
|
||||
const AI = new AiProviderNew(model, providerWithRotatedKey)
|
||||
|
||||
// only 2000 char and no images
|
||||
const truncatedContent = content.substring(0, 2000)
|
||||
@ -358,7 +381,13 @@ export async function fetchGenerate({
|
||||
return ''
|
||||
}
|
||||
|
||||
const AI = new AiProviderNew(model)
|
||||
// Apply API key rotation
|
||||
const providerWithRotatedKey = {
|
||||
...cloneDeep(provider),
|
||||
apiKey: getRotatedApiKey(provider)
|
||||
}
|
||||
|
||||
const AI = new AiProviderNew(model, providerWithRotatedKey)
|
||||
|
||||
const assistant = getDefaultAssistant()
|
||||
assistant.model = model
|
||||
@ -403,43 +432,91 @@ export async function fetchGenerate({
|
||||
|
||||
export function hasApiKey(provider: Provider) {
|
||||
if (!provider) return false
|
||||
if (['ollama', 'lmstudio', 'vertexai', 'cherryai'].includes(provider.id)) return true
|
||||
if (provider.id === 'cherryai') return true
|
||||
if (
|
||||
(isSystemProvider(provider) && NOT_SUPPORT_API_KEY_PROVIDERS.includes(provider.id)) ||
|
||||
NOT_SUPPORT_API_KEY_PROVIDER_TYPES.includes(provider.type)
|
||||
)
|
||||
return true
|
||||
return !isEmpty(provider.apiKey)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the first available embedding model from enabled providers
|
||||
* Get rotated API key for providers that support multiple keys
|
||||
* Returns empty string for providers that don't require API keys
|
||||
*/
|
||||
// function getFirstEmbeddingModel() {
|
||||
// const providers = store.getState().llm.providers.filter((p) => p.enabled)
|
||||
function getRotatedApiKey(provider: Provider): string {
|
||||
// Handle providers that don't require API keys
|
||||
if (!provider.apiKey || provider.apiKey.trim() === '') {
|
||||
return ''
|
||||
}
|
||||
|
||||
// for (const provider of providers) {
|
||||
// const embeddingModel = provider.models.find((model) => isEmbeddingModel(model))
|
||||
// if (embeddingModel) {
|
||||
// return embeddingModel
|
||||
// }
|
||||
// }
|
||||
const keys = provider.apiKey
|
||||
.split(',')
|
||||
.map((key) => key.trim())
|
||||
.filter(Boolean)
|
||||
|
||||
// return undefined
|
||||
// }
|
||||
if (keys.length === 0) {
|
||||
return ''
|
||||
}
|
||||
|
||||
const keyName = `provider:${provider.id}:last_used_key`
|
||||
|
||||
// If only one key, return it directly
|
||||
if (keys.length === 1) {
|
||||
return keys[0]
|
||||
}
|
||||
|
||||
const lastUsedKey = window.keyv.get(keyName)
|
||||
if (!lastUsedKey) {
|
||||
window.keyv.set(keyName, keys[0])
|
||||
return keys[0]
|
||||
}
|
||||
|
||||
const currentIndex = keys.indexOf(lastUsedKey)
|
||||
|
||||
// Log when the last used key is no longer in the list
|
||||
if (currentIndex === -1) {
|
||||
logger.debug('Last used API key no longer found in provider keys, falling back to first key', {
|
||||
providerId: provider.id,
|
||||
lastUsedKey: lastUsedKey.substring(0, 8) + '...' // Only log first 8 chars for security
|
||||
})
|
||||
}
|
||||
|
||||
const nextIndex = (currentIndex + 1) % keys.length
|
||||
const nextKey = keys[nextIndex]
|
||||
window.keyv.set(keyName, nextKey)
|
||||
|
||||
return nextKey
|
||||
}
|
||||
|
||||
export async function fetchModels(provider: Provider): Promise<Model[]> {
|
||||
const AI = new AiProviderNew(provider)
|
||||
// Apply API key rotation
|
||||
const providerWithRotatedKey = {
|
||||
...cloneDeep(provider),
|
||||
apiKey: getRotatedApiKey(provider)
|
||||
}
|
||||
|
||||
const AI = new AiProviderNew(providerWithRotatedKey)
|
||||
|
||||
try {
|
||||
return await AI.models()
|
||||
} catch (error) {
|
||||
logger.error('Failed to fetch models from provider', {
|
||||
providerId: provider.id,
|
||||
providerName: provider.name,
|
||||
error: error as Error
|
||||
})
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
export function checkApiProvider(provider: Provider): void {
|
||||
if (
|
||||
provider.id !== 'ollama' &&
|
||||
provider.id !== 'lmstudio' &&
|
||||
provider.type !== 'vertexai' &&
|
||||
provider.id !== 'copilot'
|
||||
) {
|
||||
const isExcludedProvider =
|
||||
(isSystemProvider(provider) && NOT_SUPPORT_API_KEY_PROVIDERS.includes(provider.id)) ||
|
||||
NOT_SUPPORT_API_KEY_PROVIDER_TYPES.includes(provider.type)
|
||||
|
||||
if (!isExcludedProvider) {
|
||||
if (!provider.apiKey) {
|
||||
window.toast.error(i18n.t('message.error.enter.api.label'))
|
||||
throw new Error(i18n.t('message.error.enter.api.label'))
|
||||
@ -460,8 +537,7 @@ export function checkApiProvider(provider: Provider): void {
|
||||
export async function checkApi(provider: Provider, model: Model, timeout = 15000): Promise<void> {
|
||||
checkApiProvider(provider)
|
||||
|
||||
// Don't pass in provider parameter. We need auto-format URL
|
||||
const ai = new AiProviderNew(model)
|
||||
const ai = new AiProviderNew(model, provider)
|
||||
|
||||
const assistant = getDefaultAssistant()
|
||||
assistant.model = model
|
||||
|
||||
@ -8,7 +8,6 @@ import {
|
||||
UNLIMITED_CONTEXT_COUNT
|
||||
} from '@renderer/config/constant'
|
||||
import { isQwenMTModel } from '@renderer/config/models/qwen'
|
||||
import { CHERRYAI_PROVIDER } from '@renderer/config/providers'
|
||||
import { UNKNOWN } from '@renderer/config/translate'
|
||||
import { getStoreProviders } from '@renderer/hooks/useStore'
|
||||
import i18n from '@renderer/i18n'
|
||||
@ -28,7 +27,7 @@ import { uuid } from '@renderer/utils'
|
||||
|
||||
const logger = loggerService.withContext('AssistantService')
|
||||
|
||||
export const DEFAULT_ASSISTANT_SETTINGS: AssistantSettings = {
|
||||
export const DEFAULT_ASSISTANT_SETTINGS = {
|
||||
temperature: DEFAULT_TEMPERATURE,
|
||||
enableTemperature: true,
|
||||
contextCount: DEFAULT_CONTEXTCOUNT,
|
||||
@ -40,7 +39,7 @@ export const DEFAULT_ASSISTANT_SETTINGS: AssistantSettings = {
|
||||
// It would gracefully fallback to prompt if not supported by model.
|
||||
toolUseMode: 'function',
|
||||
customParameters: []
|
||||
} as const
|
||||
} as const satisfies AssistantSettings
|
||||
|
||||
export function getDefaultAssistant(): Assistant {
|
||||
return {
|
||||
@ -148,7 +147,7 @@ export function getProviderByModel(model?: Model): Provider {
|
||||
|
||||
if (!provider) {
|
||||
const defaultProvider = providers.find((p) => p.id === getDefaultModel()?.provider)
|
||||
return defaultProvider || CHERRYAI_PROVIDER || providers[0]
|
||||
return defaultProvider || providers[0]
|
||||
}
|
||||
|
||||
return provider
|
||||
|
||||
@ -83,6 +83,68 @@ export async function renameNode(node: NotesTreeNode, newName: string): Promise<
|
||||
}
|
||||
|
||||
export async function uploadNotes(files: File[], targetPath: string): Promise<UploadResult> {
|
||||
const basePath = normalizePath(targetPath)
|
||||
const totalFiles = files.length
|
||||
|
||||
if (files.length === 0) {
|
||||
return {
|
||||
uploadedNodes: [],
|
||||
totalFiles: 0,
|
||||
skippedFiles: 0,
|
||||
fileCount: 0,
|
||||
folderCount: 0
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Get file paths from File objects
|
||||
// For browser File objects from drag-and-drop, we need to use FileReader to save temporarily
|
||||
// However, for directory uploads, the files already have paths
|
||||
const filePaths: string[] = []
|
||||
|
||||
for (const file of files) {
|
||||
// @ts-ignore - webkitRelativePath exists on File objects from directory uploads
|
||||
if (file.path) {
|
||||
// @ts-ignore - Electron File objects have .path property
|
||||
filePaths.push(file.path)
|
||||
} else {
|
||||
// For browser File API, we'd need to use FileReader and create temp files
|
||||
// For now, fall back to the old method for these cases
|
||||
logger.warn('File without path detected, using fallback method')
|
||||
return uploadNotesLegacy(files, targetPath)
|
||||
}
|
||||
}
|
||||
|
||||
// Pause file watcher to prevent N refresh events
|
||||
await window.api.file.pauseFileWatcher()
|
||||
|
||||
try {
|
||||
// Use the new optimized batch upload API that runs in Main process
|
||||
const result = await window.api.file.batchUploadMarkdown(filePaths, basePath)
|
||||
|
||||
return {
|
||||
uploadedNodes: [],
|
||||
totalFiles,
|
||||
skippedFiles: result.skippedFiles,
|
||||
fileCount: result.fileCount,
|
||||
folderCount: result.folderCount
|
||||
}
|
||||
} finally {
|
||||
// Resume watcher and trigger single refresh
|
||||
await window.api.file.resumeFileWatcher()
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Batch upload failed, falling back to legacy method:', error as Error)
|
||||
// Fall back to old method if new method fails
|
||||
return uploadNotesLegacy(files, targetPath)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Legacy upload method using Renderer process
|
||||
* Kept as fallback for browser File API files without paths
|
||||
*/
|
||||
async function uploadNotesLegacy(files: File[], targetPath: string): Promise<UploadResult> {
|
||||
const basePath = normalizePath(targetPath)
|
||||
const markdownFiles = filterMarkdown(files)
|
||||
const skippedFiles = files.length - markdownFiles.length
|
||||
@ -101,18 +163,37 @@ export async function uploadNotes(files: File[], targetPath: string): Promise<Up
|
||||
await createFolders(folders)
|
||||
|
||||
let fileCount = 0
|
||||
const BATCH_SIZE = 5 // Process 5 files concurrently to balance performance and responsiveness
|
||||
|
||||
for (const file of markdownFiles) {
|
||||
const { dir, name } = resolveFileTarget(file, basePath)
|
||||
const { safeName } = await window.api.file.checkFileName(dir, name, true)
|
||||
const finalPath = `${dir}/${safeName}${MARKDOWN_EXT}`
|
||||
// Process files in batches to avoid blocking the UI thread
|
||||
for (let i = 0; i < markdownFiles.length; i += BATCH_SIZE) {
|
||||
const batch = markdownFiles.slice(i, i + BATCH_SIZE)
|
||||
|
||||
try {
|
||||
const content = await file.text()
|
||||
await window.api.file.write(finalPath, content)
|
||||
fileCount += 1
|
||||
} catch (error) {
|
||||
logger.error('Failed to write uploaded file:', error as Error)
|
||||
// Process current batch in parallel
|
||||
const results = await Promise.allSettled(
|
||||
batch.map(async (file) => {
|
||||
const { dir, name } = resolveFileTarget(file, basePath)
|
||||
const { safeName } = await window.api.file.checkFileName(dir, name, true)
|
||||
const finalPath = `${dir}/${safeName}${MARKDOWN_EXT}`
|
||||
|
||||
const content = await file.text()
|
||||
await window.api.file.write(finalPath, content)
|
||||
return true
|
||||
})
|
||||
)
|
||||
|
||||
// Count successful uploads
|
||||
results.forEach((result) => {
|
||||
if (result.status === 'fulfilled') {
|
||||
fileCount += 1
|
||||
} else {
|
||||
logger.error('Failed to write uploaded file:', result.reason)
|
||||
}
|
||||
})
|
||||
|
||||
// Yield to the event loop between batches to keep UI responsive
|
||||
if (i + BATCH_SIZE < markdownFiles.length) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 0))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -71,7 +71,7 @@ const persistedReducer = persistReducer(
|
||||
{
|
||||
key: 'cherry-studio',
|
||||
storage,
|
||||
version: 181,
|
||||
version: 182,
|
||||
blacklist: ['runtime', 'messages', 'messageBlocks', 'tabs', 'toolPermissions'],
|
||||
migrate
|
||||
},
|
||||
|
||||
@ -3,7 +3,12 @@
|
||||
*/
|
||||
import { loggerService } from '@logger'
|
||||
import { nanoid } from '@reduxjs/toolkit'
|
||||
import { DEFAULT_CONTEXTCOUNT, DEFAULT_TEMPERATURE, isMac } from '@renderer/config/constant'
|
||||
import {
|
||||
DEFAULT_CONTEXTCOUNT,
|
||||
DEFAULT_STREAM_OPTIONS_INCLUDE_USAGE,
|
||||
DEFAULT_TEMPERATURE,
|
||||
isMac
|
||||
} from '@renderer/config/constant'
|
||||
import { DEFAULT_MIN_APPS } from '@renderer/config/minapps'
|
||||
import {
|
||||
glm45FlashModel,
|
||||
@ -2950,6 +2955,10 @@ const migrateConfig = {
|
||||
model.provider = SystemProviderIds.gateway
|
||||
}
|
||||
})
|
||||
// @ts-ignore
|
||||
if (provider.type === 'ai-gateway') {
|
||||
provider.type = 'gateway'
|
||||
}
|
||||
})
|
||||
logger.info('migrate 181 success')
|
||||
return state
|
||||
@ -2957,6 +2966,21 @@ const migrateConfig = {
|
||||
logger.error('migrate 181 error', error as Error)
|
||||
return state
|
||||
}
|
||||
},
|
||||
'182': (state: RootState) => {
|
||||
try {
|
||||
// Initialize streamOptions in settings.openAI if not exists
|
||||
if (!state.settings.openAI.streamOptions) {
|
||||
state.settings.openAI.streamOptions = {
|
||||
includeUsage: DEFAULT_STREAM_OPTIONS_INCLUDE_USAGE
|
||||
}
|
||||
}
|
||||
logger.info('migrate 182 success')
|
||||
return state
|
||||
} catch (error) {
|
||||
logger.error('migrate 182 error', error as Error)
|
||||
return state
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -38,6 +38,7 @@ export interface UpdateState {
|
||||
downloaded: boolean
|
||||
downloadProgress: number
|
||||
available: boolean
|
||||
ignore: boolean
|
||||
}
|
||||
|
||||
export interface RuntimeState {
|
||||
@ -83,7 +84,8 @@ const initialState: RuntimeState = {
|
||||
// downloading: false,
|
||||
// downloaded: false,
|
||||
// downloadProgress: 0,
|
||||
// available: false
|
||||
// available: false,
|
||||
// ignore: false
|
||||
// },
|
||||
// export: {
|
||||
// isExporting: false
|
||||
|
||||
@ -3,7 +3,7 @@
|
||||
*/
|
||||
import type { PayloadAction } from '@reduxjs/toolkit'
|
||||
import { createSlice } from '@reduxjs/toolkit'
|
||||
import { isMac } from '@renderer/config/constant'
|
||||
import { DEFAULT_STREAM_OPTIONS_INCLUDE_USAGE, isMac } from '@renderer/config/constant'
|
||||
import type {
|
||||
ApiServerConfig,
|
||||
CodeStyleVarious,
|
||||
@ -13,7 +13,11 @@ import type {
|
||||
S3Config,
|
||||
TranslateLanguageCode
|
||||
} from '@renderer/types'
|
||||
import type { OpenAISummaryText, OpenAIVerbosity } from '@renderer/types/aiCoreTypes'
|
||||
import type {
|
||||
OpenAICompletionsStreamOptions,
|
||||
OpenAIReasoningSummary,
|
||||
OpenAIVerbosity
|
||||
} from '@renderer/types/aiCoreTypes'
|
||||
import { uuid } from '@renderer/utils'
|
||||
import { API_SERVER_DEFAULTS } from '@shared/config/constant'
|
||||
import { TRANSLATE_PROMPT } from '@shared/config/prompts'
|
||||
@ -201,10 +205,14 @@ export interface SettingsState {
|
||||
}
|
||||
// OpenAI
|
||||
openAI: {
|
||||
summaryText: OpenAISummaryText
|
||||
// TODO: it's a bad naming. rename it to reasoningSummary in v2.
|
||||
summaryText: OpenAIReasoningSummary
|
||||
/** @deprecated 现在该设置迁移到Provider对象中 */
|
||||
serviceTier: OpenAIServiceTier
|
||||
verbosity: OpenAIVerbosity
|
||||
streamOptions: {
|
||||
includeUsage: OpenAICompletionsStreamOptions['include_usage']
|
||||
}
|
||||
}
|
||||
// Notification
|
||||
notification: {
|
||||
@ -384,7 +392,10 @@ export const initialState: SettingsState = {
|
||||
openAI: {
|
||||
summaryText: 'auto',
|
||||
serviceTier: 'auto',
|
||||
verbosity: undefined
|
||||
verbosity: undefined,
|
||||
streamOptions: {
|
||||
includeUsage: DEFAULT_STREAM_OPTIONS_INCLUDE_USAGE
|
||||
}
|
||||
},
|
||||
notification: {
|
||||
assistant: false,
|
||||
@ -799,12 +810,18 @@ const settingsSlice = createSlice({
|
||||
// // setDisableHardwareAcceleration: (state, action: PayloadAction<boolean>) => {
|
||||
// // state.disableHardwareAcceleration = action.payload
|
||||
// // },
|
||||
setOpenAISummaryText: (state, action: PayloadAction<OpenAISummaryText>) => {
|
||||
setOpenAISummaryText: (state, action: PayloadAction<OpenAIReasoningSummary>) => {
|
||||
state.openAI.summaryText = action.payload
|
||||
},
|
||||
setOpenAIVerbosity: (state, action: PayloadAction<OpenAIVerbosity>) => {
|
||||
state.openAI.verbosity = action.payload
|
||||
},
|
||||
setOpenAIStreamOptionsIncludeUsage: (
|
||||
state,
|
||||
action: PayloadAction<OpenAICompletionsStreamOptions['include_usage']>
|
||||
) => {
|
||||
state.openAI.streamOptions.includeUsage = action.payload
|
||||
},
|
||||
// setNotificationSettings: (state, action: PayloadAction<SettingsState['notification']>) => {
|
||||
// state.notification = action.payload
|
||||
// },
|
||||
@ -975,6 +992,7 @@ export const {
|
||||
// setDisableHardwareAcceleration,
|
||||
setOpenAISummaryText,
|
||||
setOpenAIVerbosity,
|
||||
setOpenAIStreamOptionsIncludeUsage,
|
||||
// setNotificationSettings,
|
||||
// Local backup settings
|
||||
// setLocalBackupDir,
|
||||
|
||||
@ -50,7 +50,12 @@ export type OpenAIReasoningEffort = OpenAI.ReasoningEffort
|
||||
* When undefined, the parameter is omitted from the request.
|
||||
* When null, verbosity is explicitly disabled.
|
||||
*/
|
||||
export type OpenAISummaryText = OpenAI.Reasoning['summary']
|
||||
export type OpenAIReasoningSummary = OpenAI.Reasoning['summary']
|
||||
|
||||
/**
|
||||
* Options for streaming response. Only set this when you set `stream: true`.
|
||||
*/
|
||||
export type OpenAICompletionsStreamOptions = OpenAI.ChatCompletionStreamOptions
|
||||
|
||||
const AiSdkParamsSchema = z.enum([
|
||||
'maxOutputTokens',
|
||||
|
||||
@ -439,6 +439,7 @@ export type MinAppType = {
|
||||
name: string
|
||||
logo?: string
|
||||
url: string
|
||||
// FIXME: It should be `bordered`
|
||||
bodered?: boolean
|
||||
background?: string
|
||||
style?: CSSProperties
|
||||
|
||||
@ -13,7 +13,7 @@ export type NotesSortType =
|
||||
export interface NotesTreeNode {
|
||||
id: string
|
||||
name: string // 不包含扩展名
|
||||
type: 'folder' | 'file'
|
||||
type: 'folder' | 'file' | 'hint'
|
||||
treePath: string // 相对路径
|
||||
externalPath: string // 绝对路径
|
||||
children?: NotesTreeNode[]
|
||||
|
||||
163
src/renderer/src/utils/__tests__/select.test.ts
Normal file
163
src/renderer/src/utils/__tests__/select.test.ts
Normal file
@ -0,0 +1,163 @@
|
||||
import { describe, expect, it } from 'vitest'
|
||||
|
||||
import { toOptionValue, toRealValue } from '../select'
|
||||
|
||||
describe('toOptionValue', () => {
|
||||
describe('primitive values', () => {
|
||||
it('should convert undefined to string "undefined"', () => {
|
||||
expect(toOptionValue(undefined)).toBe('undefined')
|
||||
})
|
||||
|
||||
it('should convert null to string "null"', () => {
|
||||
expect(toOptionValue(null)).toBe('null')
|
||||
})
|
||||
|
||||
it('should convert true to string "true"', () => {
|
||||
expect(toOptionValue(true)).toBe('true')
|
||||
})
|
||||
|
||||
it('should convert false to string "false"', () => {
|
||||
expect(toOptionValue(false)).toBe('false')
|
||||
})
|
||||
})
|
||||
|
||||
describe('string values', () => {
|
||||
it('should return string as-is', () => {
|
||||
expect(toOptionValue('hello')).toBe('hello')
|
||||
})
|
||||
|
||||
it('should return empty string as-is', () => {
|
||||
expect(toOptionValue('')).toBe('')
|
||||
})
|
||||
|
||||
it('should return string with special characters as-is', () => {
|
||||
expect(toOptionValue('hello-world_123')).toBe('hello-world_123')
|
||||
})
|
||||
|
||||
it('should return string that looks like a boolean as-is', () => {
|
||||
expect(toOptionValue('True')).toBe('True')
|
||||
expect(toOptionValue('FALSE')).toBe('FALSE')
|
||||
})
|
||||
})
|
||||
|
||||
describe('mixed type scenarios', () => {
|
||||
it('should handle union types correctly', () => {
|
||||
const values: Array<string | boolean | null | undefined> = ['test', true, false, null, undefined, '']
|
||||
|
||||
expect(toOptionValue(values[0])).toBe('test')
|
||||
expect(toOptionValue(values[1])).toBe('true')
|
||||
expect(toOptionValue(values[2])).toBe('false')
|
||||
expect(toOptionValue(values[3])).toBe('null')
|
||||
expect(toOptionValue(values[4])).toBe('undefined')
|
||||
expect(toOptionValue(values[5])).toBe('')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('toRealValue', () => {
|
||||
describe('special string values', () => {
|
||||
it('should convert string "undefined" to undefined', () => {
|
||||
expect(toRealValue('undefined')).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should convert string "null" to null', () => {
|
||||
expect(toRealValue('null')).toBeNull()
|
||||
})
|
||||
|
||||
it('should convert string "true" to boolean true', () => {
|
||||
expect(toRealValue('true')).toBe(true)
|
||||
})
|
||||
|
||||
it('should convert string "false" to boolean false', () => {
|
||||
expect(toRealValue('false')).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('regular string values', () => {
|
||||
it('should return regular string as-is', () => {
|
||||
expect(toRealValue('hello')).toBe('hello')
|
||||
})
|
||||
|
||||
it('should return empty string as-is', () => {
|
||||
expect(toRealValue('')).toBe('')
|
||||
})
|
||||
|
||||
it('should return string with special characters as-is', () => {
|
||||
expect(toRealValue('hello-world_123')).toBe('hello-world_123')
|
||||
})
|
||||
|
||||
it('should return string that looks like special value but with different casing', () => {
|
||||
expect(toRealValue('Undefined')).toBe('Undefined')
|
||||
expect(toRealValue('NULL')).toBe('NULL')
|
||||
expect(toRealValue('True')).toBe('True')
|
||||
expect(toRealValue('False')).toBe('False')
|
||||
})
|
||||
})
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle strings containing special values as substring', () => {
|
||||
expect(toRealValue('undefined_value')).toBe('undefined_value')
|
||||
expect(toRealValue('null_check')).toBe('null_check')
|
||||
expect(toRealValue('true_condition')).toBe('true_condition')
|
||||
expect(toRealValue('false_flag')).toBe('false_flag')
|
||||
})
|
||||
|
||||
it('should handle strings with whitespace', () => {
|
||||
expect(toRealValue(' undefined')).toBe(' undefined')
|
||||
expect(toRealValue('null ')).toBe('null ')
|
||||
expect(toRealValue(' true ')).toBe(' true ')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('toOptionValue and toRealValue roundtrip', () => {
|
||||
it('should correctly convert and restore undefined', () => {
|
||||
const original = undefined
|
||||
const option = toOptionValue(original)
|
||||
const restored = toRealValue(option)
|
||||
expect(restored).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should correctly convert and restore null', () => {
|
||||
const original = null
|
||||
const option = toOptionValue(original)
|
||||
const restored = toRealValue(option)
|
||||
expect(restored).toBeNull()
|
||||
})
|
||||
|
||||
it('should correctly convert and restore true', () => {
|
||||
const original = true
|
||||
const option = toOptionValue(original)
|
||||
const restored = toRealValue(option)
|
||||
expect(restored).toBe(true)
|
||||
})
|
||||
|
||||
it('should correctly convert and restore false', () => {
|
||||
const original = false
|
||||
const option = toOptionValue(original)
|
||||
const restored = toRealValue(option)
|
||||
expect(restored).toBe(false)
|
||||
})
|
||||
|
||||
it('should correctly convert and restore string values', () => {
|
||||
const strings = ['hello', '', 'test-123', 'some_value']
|
||||
strings.forEach((str) => {
|
||||
const option = toOptionValue(str)
|
||||
const restored = toRealValue(option)
|
||||
expect(restored).toBe(str)
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle array of mixed values', () => {
|
||||
const values: Array<string | boolean | null | undefined> = ['test', true, false, null, undefined]
|
||||
|
||||
const options = values.map(toOptionValue)
|
||||
const restored = options.map(toRealValue)
|
||||
|
||||
expect(restored[0]).toBe('test')
|
||||
expect(restored[1]).toBe(true)
|
||||
expect(restored[2]).toBe(false)
|
||||
expect(restored[3]).toBeNull()
|
||||
expect(restored[4]).toBeUndefined()
|
||||
})
|
||||
})
|
||||
@ -201,7 +201,7 @@ export const providerCharge = async (provider: string) => {
|
||||
height: 700
|
||||
},
|
||||
aihubmix: {
|
||||
url: `https://aihubmix.com/topup?client_id=cherry_studio_oauth&lang=${getLanguageCode()}&aff=SJyh`,
|
||||
url: `https://console.aihubmix.com/topup?client_id=cherry_studio_oauth&lang=${getLanguageCode()}&aff=SJyh`,
|
||||
width: 720,
|
||||
height: 900
|
||||
},
|
||||
@ -244,7 +244,7 @@ export const providerBills = async (provider: string) => {
|
||||
height: 700
|
||||
},
|
||||
aihubmix: {
|
||||
url: `https://aihubmix.com/statistics?client_id=cherry_studio_oauth&lang=${getLanguageCode()}&aff=SJyh`,
|
||||
url: `https://console.aihubmix.com/statistics?client_id=cherry_studio_oauth&lang=${getLanguageCode()}&aff=SJyh`,
|
||||
width: 900,
|
||||
height: 700
|
||||
},
|
||||
|
||||
@ -187,3 +187,13 @@ export const isSupportAPIVersionProvider = (provider: Provider) => {
|
||||
}
|
||||
return provider.apiOptions?.isNotSupportAPIVersion !== false
|
||||
}
|
||||
|
||||
export const NOT_SUPPORT_API_KEY_PROVIDERS: readonly SystemProviderId[] = [
|
||||
'ollama',
|
||||
'lmstudio',
|
||||
'vertexai',
|
||||
'aws-bedrock',
|
||||
'copilot'
|
||||
]
|
||||
|
||||
export const NOT_SUPPORT_API_KEY_PROVIDER_TYPES: readonly ProviderType[] = ['vertexai', 'aws-bedrock']
|
||||
|
||||
@ -1,36 +1,63 @@
|
||||
/**
|
||||
* Convert a value (string | undefined | null) into an option-compatible string.
|
||||
* Convert a value (string | undefined | null | boolean) into an option-compatible string.
|
||||
* - `undefined` becomes the literal string `'undefined'`
|
||||
* - `null` becomes the literal string `'null'`
|
||||
* - `true` becomes the literal string `'true'`
|
||||
* - `false` becomes the literal string `'false'`
|
||||
* - Any other string is returned as-is
|
||||
*
|
||||
* @param v - The value to convert
|
||||
* @returns The string representation safe for option usage
|
||||
*/
|
||||
export function toOptionValue<T extends undefined | Exclude<string, null>>(v: T): NonNullable<T> | 'undefined'
|
||||
export function toOptionValue<T extends null | Exclude<string, undefined>>(v: T): NonNullable<T> | 'null'
|
||||
export function toOptionValue<T extends string | undefined | null>(v: T): NonNullable<T> | 'undefined' | 'null'
|
||||
export function toOptionValue<T extends Exclude<string, null | undefined>>(v: T): T
|
||||
export function toOptionValue(v: string | undefined | null) {
|
||||
if (v === undefined) return 'undefined'
|
||||
if (v === null) return 'null'
|
||||
return v
|
||||
export function toOptionValue(v: undefined): 'undefined'
|
||||
export function toOptionValue(v: null): 'null'
|
||||
export function toOptionValue(v: boolean): 'true' | 'false'
|
||||
export function toOptionValue(v: boolean | undefined): 'true' | 'false' | 'undefined'
|
||||
export function toOptionValue(v: boolean | null): 'true' | 'false' | 'null'
|
||||
export function toOptionValue(v: boolean | undefined | null): 'true' | 'false' | 'undefined' | 'null'
|
||||
export function toOptionValue<T extends string>(v: T): T
|
||||
export function toOptionValue<T extends Exclude<string, 'undefined'> | undefined>(v: T): NonNullable<T> | 'undefined'
|
||||
export function toOptionValue<T extends Exclude<string, 'null'> | null>(v: T): NonNullable<T> | 'null'
|
||||
export function toOptionValue<T extends Exclude<string, 'boolean'> | boolean>(v: T): T | 'true' | 'false'
|
||||
export function toOptionValue<T extends Exclude<string, 'null' | 'undefined'> | null | undefined>(
|
||||
v: T
|
||||
): NonNullable<T> | 'null' | 'undefined'
|
||||
export function toOptionValue<T extends Exclude<string, 'null' | 'true' | 'false'> | null | boolean>(
|
||||
v: T
|
||||
): NonNullable<T> | 'null' | 'true' | 'false'
|
||||
export function toOptionValue<T extends Exclude<string, 'undefined' | 'true' | 'false'> | undefined | boolean>(
|
||||
v: T
|
||||
): NonNullable<T> | 'undefined' | 'true' | 'false'
|
||||
export function toOptionValue<
|
||||
T extends Exclude<string, 'null' | 'undefined' | 'true' | 'false'> | null | undefined | boolean
|
||||
>(v: T): NonNullable<T> | 'null' | 'undefined' | 'true' | 'false'
|
||||
export function toOptionValue(v: string | undefined | null | boolean) {
|
||||
return String(v)
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert an option string back to its original value.
|
||||
* - The literal string `'undefined'` becomes `undefined`
|
||||
* - The literal string `'null'` becomes `null`
|
||||
* - The literal string `'true'` becomes `true`
|
||||
* - The literal string `'false'` becomes `false`
|
||||
* - Any other string is returned as-is
|
||||
*
|
||||
* @param v - The option string to convert
|
||||
* @returns The real value (`undefined`, `null`, or the original string)
|
||||
* @returns The real value (`undefined`, `null`, `boolean`, or the original string)
|
||||
*/
|
||||
export function toRealValue<T extends 'undefined'>(v: T): undefined
|
||||
export function toRealValue<T extends 'null'>(v: T): null
|
||||
export function toRealValue<T extends string>(v: T): Exclude<T, 'undefined' | 'null'>
|
||||
export function toRealValue(v: 'undefined'): undefined
|
||||
export function toRealValue(v: 'null'): null
|
||||
export function toRealValue(v: 'true' | 'false'): boolean
|
||||
export function toRealValue(v: 'undefined' | 'null'): undefined | null
|
||||
export function toRealValue(v: 'undefined' | 'true' | 'false'): undefined | boolean
|
||||
export function toRealValue(v: 'null' | 'true' | 'false'): null | boolean
|
||||
export function toRealValue(v: 'undefined' | 'null' | 'true' | 'false'): undefined | null | boolean
|
||||
export function toRealValue<T extends string>(v: T): Exclude<T, 'undefined' | 'null' | 'true' | 'false'>
|
||||
export function toRealValue(v: string) {
|
||||
if (v === 'undefined') return undefined
|
||||
if (v === 'null') return null
|
||||
if (v === 'true') return true
|
||||
if (v === 'false') return false
|
||||
return v
|
||||
}
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import { Button, Tooltip } from '@cherrystudio/ui'
|
||||
import { usePreference } from '@data/hooks/usePreference'
|
||||
import { isMac } from '@renderer/config/constant'
|
||||
import { isMac, isWin } from '@renderer/config/constant'
|
||||
import i18n from '@renderer/i18n'
|
||||
import { defaultLanguage } from '@shared/config/constant'
|
||||
import type { SelectionActionItem } from '@shared/data/preference/preferenceTypes'
|
||||
@ -8,11 +8,14 @@ import { IpcChannel } from '@shared/IpcChannel'
|
||||
import { Slider } from 'antd'
|
||||
import { Droplet, Minus, Pin, X } from 'lucide-react'
|
||||
import { DynamicIcon } from 'lucide-react/dynamic'
|
||||
import type { FC } from 'react'
|
||||
import type { FC, MouseEvent as ReactMouseEvent } from 'react'
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import styled from 'styled-components'
|
||||
|
||||
// [Windows only] Electron bug workaround type - can be removed once https://github.com/electron/electron/issues/48554 is fixed
|
||||
type ResizeDirection = 'n' | 's' | 'e' | 'w' | 'ne' | 'nw' | 'se' | 'sw'
|
||||
|
||||
import ActionGeneral from './components/ActionGeneral'
|
||||
import ActionTranslate from './components/ActionTranslate'
|
||||
|
||||
@ -188,11 +191,62 @@ const SelectionActionApp: FC = () => {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* [Windows only] Manual window resize handler
|
||||
*
|
||||
* ELECTRON BUG WORKAROUND:
|
||||
* In Electron, when using `frame: false` + `transparent: true`, the native window
|
||||
* resize functionality is broken on Windows. This is a known Electron bug.
|
||||
* See: https://github.com/electron/electron/issues/48554
|
||||
*
|
||||
* This custom resize implementation can be removed once the Electron bug is fixed.
|
||||
*/
|
||||
const handleResizeStart = useCallback((e: ReactMouseEvent, direction: ResizeDirection) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
|
||||
let lastX = e.screenX
|
||||
let lastY = e.screenY
|
||||
|
||||
const handleMouseMove = (moveEvent: MouseEvent) => {
|
||||
const deltaX = moveEvent.screenX - lastX
|
||||
const deltaY = moveEvent.screenY - lastY
|
||||
|
||||
if (deltaX !== 0 || deltaY !== 0) {
|
||||
window.api.selection.resizeActionWindow(deltaX, deltaY, direction)
|
||||
lastX = moveEvent.screenX
|
||||
lastY = moveEvent.screenY
|
||||
}
|
||||
}
|
||||
|
||||
const handleMouseUp = () => {
|
||||
window.removeEventListener('mousemove', handleMouseMove)
|
||||
window.removeEventListener('mouseup', handleMouseUp)
|
||||
}
|
||||
|
||||
window.addEventListener('mousemove', handleMouseMove)
|
||||
window.addEventListener('mouseup', handleMouseUp)
|
||||
}, [])
|
||||
|
||||
//we don't need to render the component if action is not set
|
||||
if (!action) return null
|
||||
|
||||
return (
|
||||
<WindowFrame $opacity={opacity / 100}>
|
||||
{/* [Windows only] Custom resize handles - Electron bug workaround, can be removed once fixed */}
|
||||
{isWin && (
|
||||
<>
|
||||
<ResizeHandle $direction="n" onMouseDown={(e) => handleResizeStart(e, 'n')} />
|
||||
<ResizeHandle $direction="s" onMouseDown={(e) => handleResizeStart(e, 's')} />
|
||||
<ResizeHandle $direction="e" onMouseDown={(e) => handleResizeStart(e, 'e')} />
|
||||
<ResizeHandle $direction="w" onMouseDown={(e) => handleResizeStart(e, 'w')} />
|
||||
<ResizeHandle $direction="ne" onMouseDown={(e) => handleResizeStart(e, 'ne')} />
|
||||
<ResizeHandle $direction="nw" onMouseDown={(e) => handleResizeStart(e, 'nw')} />
|
||||
<ResizeHandle $direction="se" onMouseDown={(e) => handleResizeStart(e, 'se')} />
|
||||
<ResizeHandle $direction="sw" onMouseDown={(e) => handleResizeStart(e, 'sw')} />
|
||||
</>
|
||||
)}
|
||||
|
||||
<TitleBar $isWindowFocus={isWindowFocus} style={isMac ? { paddingLeft: '70px' } : {}}>
|
||||
{action.icon && (
|
||||
<TitleBarIcon>
|
||||
@ -435,4 +489,90 @@ const OpacitySlider = styled.div`
|
||||
}
|
||||
`
|
||||
|
||||
/**
|
||||
* [Windows only] Custom resize handle styled component
|
||||
*
|
||||
* ELECTRON BUG WORKAROUND:
|
||||
* This component can be removed once https://github.com/electron/electron/issues/48554 is fixed.
|
||||
*/
|
||||
const ResizeHandle = styled.div<{ $direction: ResizeDirection }>`
|
||||
position: absolute;
|
||||
-webkit-app-region: no-drag;
|
||||
z-index: 10;
|
||||
|
||||
${({ $direction }) => {
|
||||
const edgeSize = '6px'
|
||||
const cornerSize = '12px'
|
||||
|
||||
switch ($direction) {
|
||||
case 'n':
|
||||
return `
|
||||
top: 0;
|
||||
left: ${cornerSize};
|
||||
right: ${cornerSize};
|
||||
height: ${edgeSize};
|
||||
cursor: ns-resize;
|
||||
`
|
||||
case 's':
|
||||
return `
|
||||
bottom: 0;
|
||||
left: ${cornerSize};
|
||||
right: ${cornerSize};
|
||||
height: ${edgeSize};
|
||||
cursor: ns-resize;
|
||||
`
|
||||
case 'e':
|
||||
return `
|
||||
right: 0;
|
||||
top: ${cornerSize};
|
||||
bottom: ${cornerSize};
|
||||
width: ${edgeSize};
|
||||
cursor: ew-resize;
|
||||
`
|
||||
case 'w':
|
||||
return `
|
||||
left: 0;
|
||||
top: ${cornerSize};
|
||||
bottom: ${cornerSize};
|
||||
width: ${edgeSize};
|
||||
cursor: ew-resize;
|
||||
`
|
||||
case 'ne':
|
||||
return `
|
||||
top: 0;
|
||||
right: 0;
|
||||
width: ${cornerSize};
|
||||
height: ${cornerSize};
|
||||
cursor: nesw-resize;
|
||||
`
|
||||
case 'nw':
|
||||
return `
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: ${cornerSize};
|
||||
height: ${cornerSize};
|
||||
cursor: nwse-resize;
|
||||
`
|
||||
case 'se':
|
||||
return `
|
||||
bottom: 0;
|
||||
right: 0;
|
||||
width: ${cornerSize};
|
||||
height: ${cornerSize};
|
||||
cursor: nwse-resize;
|
||||
`
|
||||
case 'sw':
|
||||
return `
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
width: ${cornerSize};
|
||||
height: ${cornerSize};
|
||||
cursor: nesw-resize;
|
||||
`
|
||||
default:
|
||||
return ''
|
||||
}
|
||||
}}
|
||||
`
|
||||
|
||||
export default SelectionActionApp
|
||||
|
||||
Loading…
Reference in New Issue
Block a user