mirror of
https://github.com/CherryHQ/cherry-studio.git
synced 2025-12-19 06:30:10 +08:00
Merge branch 'main' into feat/bonjour
This commit is contained in:
commit
f95b040b07
@ -134,54 +134,38 @@ artifactBuildCompleted: scripts/artifact-build-completed.js
|
|||||||
releaseInfo:
|
releaseInfo:
|
||||||
releaseNotes: |
|
releaseNotes: |
|
||||||
<!--LANG:en-->
|
<!--LANG:en-->
|
||||||
Cherry Studio 1.7.4 - New Browser MCP & Model Updates
|
Cherry Studio 1.7.5 - Filesystem MCP Overhaul & Topic Management
|
||||||
|
|
||||||
This release adds a powerful browser automation MCP server, new web search provider, and model support updates.
|
This release features a completely rewritten filesystem MCP server, new batch topic management, and improved assistant management.
|
||||||
|
|
||||||
✨ New Features
|
✨ New Features
|
||||||
- [MCP] Add @cherry/browser CDP MCP server with session management for browser automation
|
- [MCP] Rewrite filesystem MCP server with improved tool set (glob, ls, grep, read, write, edit, delete)
|
||||||
- [Web Search] Add ExaMCP free web search provider (no API key required)
|
- [Topics] Add topic manage mode for batch delete and move operations with search functionality
|
||||||
- [Model] Support GPT 5.2 series models
|
- [Assistants] Merge import/subscribe popups and add export to assistant management
|
||||||
- [Model] Add capabilities support for Doubao Seed Code models (tool calling, reasoning, vision)
|
- [Knowledge] Use prompt injection for forced knowledge base search (faster response times)
|
||||||
|
- [Settings] Add tool use mode setting (prompt/function) to default assistant settings
|
||||||
🔧 Improvements
|
|
||||||
- [Translate] Add reasoning effort option to translate service
|
|
||||||
- [i18n] Improve zh-TW Traditional Chinese locale
|
|
||||||
- [Settings] Update MCP Settings layout and styling
|
|
||||||
|
|
||||||
🐛 Bug Fixes
|
🐛 Bug Fixes
|
||||||
- [Chat] Fix line numbers being wrongly copied from code blocks
|
- [Model] Correct typo in Gemini 3 Pro Image Preview model name
|
||||||
- [Translate] Fix default to first supported reasoning effort when translating
|
- [Installer] Auto-install VC++ Redistributable without user prompt
|
||||||
- [Chat] Fix preserve thinking block in assistant messages
|
- [Notes] Fix notes directory validation and default path reset for cross-platform restore
|
||||||
- [Web Search] Fix max search result limit
|
- [OAuth] Bind OAuth callback server to localhost (127.0.0.1) for security
|
||||||
- [Embedding] Fix embedding dimensions retrieval for ModernAiProvider
|
|
||||||
- [Chat] Fix token calculation in prompt tool use plugin
|
|
||||||
- [Model] Fix Ollama provider options for Qwen model support
|
|
||||||
- [UI] Fix Chat component marginRight calculation for improved layout
|
|
||||||
|
|
||||||
<!--LANG:zh-CN-->
|
<!--LANG:zh-CN-->
|
||||||
Cherry Studio 1.7.4 - 新增浏览器 MCP 与模型更新
|
Cherry Studio 1.7.5 - 文件系统 MCP 重构与话题管理
|
||||||
|
|
||||||
本次更新新增强大的浏览器自动化 MCP 服务器、新的网页搜索提供商以及模型支持更新。
|
本次更新完全重写了文件系统 MCP 服务器,新增批量话题管理功能,并改进了助手管理。
|
||||||
|
|
||||||
✨ 新功能
|
✨ 新功能
|
||||||
- [MCP] 新增 @cherry/browser CDP MCP 服务器,支持会话管理的浏览器自动化
|
- [MCP] 重写文件系统 MCP 服务器,提供改进的工具集(glob、ls、grep、read、write、edit、delete)
|
||||||
- [网页搜索] 新增 ExaMCP 免费网页搜索提供商(无需 API 密钥)
|
- [话题] 新增话题管理模式,支持批量删除和移动操作,带搜索功能
|
||||||
- [模型] 支持 GPT 5.2 系列模型
|
- [助手] 合并导入/订阅弹窗,并在助手管理中添加导出功能
|
||||||
- [模型] 为豆包 Seed Code 模型添加能力支持(工具调用、推理、视觉)
|
- [知识库] 使用提示词注入进行强制知识库搜索(响应更快)
|
||||||
|
- [设置] 在默认助手设置中添加工具使用模式设置(prompt/function)
|
||||||
🔧 功能改进
|
|
||||||
- [翻译] 为翻译服务添加推理强度选项
|
|
||||||
- [国际化] 改进繁体中文(zh-TW)本地化
|
|
||||||
- [设置] 优化 MCP 设置布局和样式
|
|
||||||
|
|
||||||
🐛 问题修复
|
🐛 问题修复
|
||||||
- [聊天] 修复代码块中行号被错误复制的问题
|
- [模型] 修正 Gemini 3 Pro Image Preview 模型名称的拼写错误
|
||||||
- [翻译] 修复翻译时默认使用第一个支持的推理强度
|
- [安装程序] 自动安装 VC++ 运行库,无需用户确认
|
||||||
- [聊天] 修复助手消息中思考块的保留问题
|
- [笔记] 修复跨平台恢复场景下的笔记目录验证和默认路径重置逻辑
|
||||||
- [网页搜索] 修复最大搜索结果数限制
|
- [OAuth] 将 OAuth 回调服务器绑定到 localhost (127.0.0.1) 以提高安全性
|
||||||
- [嵌入] 修复 ModernAiProvider 嵌入维度获取问题
|
|
||||||
- [聊天] 修复提示词工具使用插件的 token 计算问题
|
|
||||||
- [模型] 修复 Ollama 提供商对 Qwen 模型的支持选项
|
|
||||||
- [界面] 修复聊天组件右边距计算以改善布局
|
|
||||||
<!--LANG:END-->
|
<!--LANG:END-->
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "CherryStudio",
|
"name": "CherryStudio",
|
||||||
"version": "1.7.4",
|
"version": "1.7.5",
|
||||||
"private": true,
|
"private": true,
|
||||||
"description": "A powerful AI assistant for producer.",
|
"description": "A powerful AI assistant for producer.",
|
||||||
"main": "./out/main/index.js",
|
"main": "./out/main/index.js",
|
||||||
|
|||||||
@ -246,6 +246,7 @@ export enum IpcChannel {
|
|||||||
System_GetCpuName = 'system:getCpuName',
|
System_GetCpuName = 'system:getCpuName',
|
||||||
System_CheckGitBash = 'system:checkGitBash',
|
System_CheckGitBash = 'system:checkGitBash',
|
||||||
System_GetGitBashPath = 'system:getGitBashPath',
|
System_GetGitBashPath = 'system:getGitBashPath',
|
||||||
|
System_GetGitBashPathInfo = 'system:getGitBashPathInfo',
|
||||||
System_SetGitBashPath = 'system:setGitBashPath',
|
System_SetGitBashPath = 'system:setGitBashPath',
|
||||||
|
|
||||||
// DevTools
|
// DevTools
|
||||||
|
|||||||
@ -488,3 +488,11 @@ export const MACOS_TERMINALS_WITH_COMMANDS: TerminalConfigWithCommand[] = [
|
|||||||
|
|
||||||
// resources/scripts should be maintained manually
|
// resources/scripts should be maintained manually
|
||||||
export const HOME_CHERRY_DIR = '.cherrystudio'
|
export const HOME_CHERRY_DIR = '.cherrystudio'
|
||||||
|
|
||||||
|
// Git Bash path configuration types
|
||||||
|
export type GitBashPathSource = 'manual' | 'auto'
|
||||||
|
|
||||||
|
export interface GitBashPathInfo {
|
||||||
|
path: string | null
|
||||||
|
source: GitBashPathSource | null
|
||||||
|
}
|
||||||
|
|||||||
@ -6,7 +6,14 @@ import { loggerService } from '@logger'
|
|||||||
import { isLinux, isMac, isPortable, isWin } from '@main/constant'
|
import { isLinux, isMac, isPortable, isWin } from '@main/constant'
|
||||||
import { generateSignature } from '@main/integration/cherryai'
|
import { generateSignature } from '@main/integration/cherryai'
|
||||||
import anthropicService from '@main/services/AnthropicService'
|
import anthropicService from '@main/services/AnthropicService'
|
||||||
import { findGitBash, getBinaryPath, isBinaryExists, runInstallScript, validateGitBashPath } from '@main/utils/process'
|
import {
|
||||||
|
autoDiscoverGitBash,
|
||||||
|
getBinaryPath,
|
||||||
|
getGitBashPathInfo,
|
||||||
|
isBinaryExists,
|
||||||
|
runInstallScript,
|
||||||
|
validateGitBashPath
|
||||||
|
} from '@main/utils/process'
|
||||||
import { handleZoomFactor } from '@main/utils/zoom'
|
import { handleZoomFactor } from '@main/utils/zoom'
|
||||||
import type { SpanEntity, TokenUsage } from '@mcp-trace/trace-core'
|
import type { SpanEntity, TokenUsage } from '@mcp-trace/trace-core'
|
||||||
import type { UpgradeChannel } from '@shared/config/constant'
|
import type { UpgradeChannel } from '@shared/config/constant'
|
||||||
@ -501,9 +508,8 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const customPath = configManager.get(ConfigKeys.GitBashPath) as string | undefined
|
// Use autoDiscoverGitBash to handle auto-discovery and persistence
|
||||||
const bashPath = findGitBash(customPath)
|
const bashPath = autoDiscoverGitBash()
|
||||||
|
|
||||||
if (bashPath) {
|
if (bashPath) {
|
||||||
logger.info('Git Bash is available', { path: bashPath })
|
logger.info('Git Bash is available', { path: bashPath })
|
||||||
return true
|
return true
|
||||||
@ -526,13 +532,22 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
|||||||
return customPath ?? null
|
return customPath ?? null
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// Returns { path, source } where source is 'manual' | 'auto' | null
|
||||||
|
ipcMain.handle(IpcChannel.System_GetGitBashPathInfo, () => {
|
||||||
|
return getGitBashPathInfo()
|
||||||
|
})
|
||||||
|
|
||||||
ipcMain.handle(IpcChannel.System_SetGitBashPath, (_, newPath: string | null) => {
|
ipcMain.handle(IpcChannel.System_SetGitBashPath, (_, newPath: string | null) => {
|
||||||
if (!isWin) {
|
if (!isWin) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!newPath) {
|
if (!newPath) {
|
||||||
|
// Clear manual setting and re-run auto-discovery
|
||||||
configManager.set(ConfigKeys.GitBashPath, null)
|
configManager.set(ConfigKeys.GitBashPath, null)
|
||||||
|
configManager.set(ConfigKeys.GitBashPathSource, null)
|
||||||
|
// Re-run auto-discovery to restore auto-discovered path if available
|
||||||
|
autoDiscoverGitBash()
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -541,7 +556,9 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Set path with 'manual' source
|
||||||
configManager.set(ConfigKeys.GitBashPath, validated)
|
configManager.set(ConfigKeys.GitBashPath, validated)
|
||||||
|
configManager.set(ConfigKeys.GitBashPathSource, 'manual')
|
||||||
return true
|
return true
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|||||||
@ -36,7 +36,7 @@ export function createInMemoryMCPServer(
|
|||||||
return new FetchServer().server
|
return new FetchServer().server
|
||||||
}
|
}
|
||||||
case BuiltinMCPServerNames.filesystem: {
|
case BuiltinMCPServerNames.filesystem: {
|
||||||
return new FileSystemServer(args).server
|
return new FileSystemServer(envs.WORKSPACE_ROOT).server
|
||||||
}
|
}
|
||||||
case BuiltinMCPServerNames.difyKnowledge: {
|
case BuiltinMCPServerNames.difyKnowledge: {
|
||||||
const difyKey = envs.DIFY_KEY
|
const difyKey = envs.DIFY_KEY
|
||||||
|
|||||||
@ -1,652 +0,0 @@
|
|||||||
// port https://github.com/modelcontextprotocol/servers/blob/main/src/filesystem/index.ts
|
|
||||||
|
|
||||||
import { loggerService } from '@logger'
|
|
||||||
import { Server } from '@modelcontextprotocol/sdk/server/index.js'
|
|
||||||
import { CallToolRequestSchema, ListToolsRequestSchema } from '@modelcontextprotocol/sdk/types.js'
|
|
||||||
import { createTwoFilesPatch } from 'diff'
|
|
||||||
import fs from 'fs/promises'
|
|
||||||
import { minimatch } from 'minimatch'
|
|
||||||
import os from 'os'
|
|
||||||
import path from 'path'
|
|
||||||
import * as z from 'zod'
|
|
||||||
|
|
||||||
const logger = loggerService.withContext('MCP:FileSystemServer')
|
|
||||||
|
|
||||||
// Normalize all paths consistently
|
|
||||||
function normalizePath(p: string): string {
|
|
||||||
return path.normalize(p)
|
|
||||||
}
|
|
||||||
|
|
||||||
function expandHome(filepath: string): string {
|
|
||||||
if (filepath.startsWith('~/') || filepath === '~') {
|
|
||||||
return path.join(os.homedir(), filepath.slice(1))
|
|
||||||
}
|
|
||||||
return filepath
|
|
||||||
}
|
|
||||||
|
|
||||||
// Security utilities
|
|
||||||
async function validatePath(allowedDirectories: string[], requestedPath: string): Promise<string> {
|
|
||||||
const expandedPath = expandHome(requestedPath)
|
|
||||||
const absolute = path.isAbsolute(expandedPath)
|
|
||||||
? path.resolve(expandedPath)
|
|
||||||
: path.resolve(process.cwd(), expandedPath)
|
|
||||||
|
|
||||||
const normalizedRequested = normalizePath(absolute)
|
|
||||||
|
|
||||||
// Check if path is within allowed directories
|
|
||||||
const isAllowed = allowedDirectories.some((dir) => normalizedRequested.startsWith(dir))
|
|
||||||
if (!isAllowed) {
|
|
||||||
throw new Error(
|
|
||||||
`Access denied - path outside allowed directories: ${absolute} not in ${allowedDirectories.join(', ')}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle symlinks by checking their real path
|
|
||||||
try {
|
|
||||||
const realPath = await fs.realpath(absolute)
|
|
||||||
const normalizedReal = normalizePath(realPath)
|
|
||||||
const isRealPathAllowed = allowedDirectories.some((dir) => normalizedReal.startsWith(dir))
|
|
||||||
if (!isRealPathAllowed) {
|
|
||||||
throw new Error('Access denied - symlink target outside allowed directories')
|
|
||||||
}
|
|
||||||
return realPath
|
|
||||||
} catch (error) {
|
|
||||||
// For new files that don't exist yet, verify parent directory
|
|
||||||
const parentDir = path.dirname(absolute)
|
|
||||||
try {
|
|
||||||
const realParentPath = await fs.realpath(parentDir)
|
|
||||||
const normalizedParent = normalizePath(realParentPath)
|
|
||||||
const isParentAllowed = allowedDirectories.some((dir) => normalizedParent.startsWith(dir))
|
|
||||||
if (!isParentAllowed) {
|
|
||||||
throw new Error('Access denied - parent directory outside allowed directories')
|
|
||||||
}
|
|
||||||
return absolute
|
|
||||||
} catch {
|
|
||||||
throw new Error(`Parent directory does not exist: ${parentDir}`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Schema definitions
|
|
||||||
const ReadFileArgsSchema = z.object({
|
|
||||||
path: z.string()
|
|
||||||
})
|
|
||||||
|
|
||||||
const ReadMultipleFilesArgsSchema = z.object({
|
|
||||||
paths: z.array(z.string())
|
|
||||||
})
|
|
||||||
|
|
||||||
const WriteFileArgsSchema = z.object({
|
|
||||||
path: z.string(),
|
|
||||||
content: z.string()
|
|
||||||
})
|
|
||||||
|
|
||||||
const EditOperation = z.object({
|
|
||||||
oldText: z.string().describe('Text to search for - must match exactly'),
|
|
||||||
newText: z.string().describe('Text to replace with')
|
|
||||||
})
|
|
||||||
|
|
||||||
const EditFileArgsSchema = z.object({
|
|
||||||
path: z.string(),
|
|
||||||
edits: z.array(EditOperation),
|
|
||||||
dryRun: z.boolean().default(false).describe('Preview changes using git-style diff format')
|
|
||||||
})
|
|
||||||
|
|
||||||
const CreateDirectoryArgsSchema = z.object({
|
|
||||||
path: z.string()
|
|
||||||
})
|
|
||||||
|
|
||||||
const ListDirectoryArgsSchema = z.object({
|
|
||||||
path: z.string()
|
|
||||||
})
|
|
||||||
|
|
||||||
const DirectoryTreeArgsSchema = z.object({
|
|
||||||
path: z.string()
|
|
||||||
})
|
|
||||||
|
|
||||||
const MoveFileArgsSchema = z.object({
|
|
||||||
source: z.string(),
|
|
||||||
destination: z.string()
|
|
||||||
})
|
|
||||||
|
|
||||||
const SearchFilesArgsSchema = z.object({
|
|
||||||
path: z.string(),
|
|
||||||
pattern: z.string(),
|
|
||||||
excludePatterns: z.array(z.string()).optional().default([])
|
|
||||||
})
|
|
||||||
|
|
||||||
const GetFileInfoArgsSchema = z.object({
|
|
||||||
path: z.string()
|
|
||||||
})
|
|
||||||
|
|
||||||
interface FileInfo {
|
|
||||||
size: number
|
|
||||||
created: Date
|
|
||||||
modified: Date
|
|
||||||
accessed: Date
|
|
||||||
isDirectory: boolean
|
|
||||||
isFile: boolean
|
|
||||||
permissions: string
|
|
||||||
}
|
|
||||||
|
|
||||||
// Tool implementations
|
|
||||||
async function getFileStats(filePath: string): Promise<FileInfo> {
|
|
||||||
const stats = await fs.stat(filePath)
|
|
||||||
return {
|
|
||||||
size: stats.size,
|
|
||||||
created: stats.birthtime,
|
|
||||||
modified: stats.mtime,
|
|
||||||
accessed: stats.atime,
|
|
||||||
isDirectory: stats.isDirectory(),
|
|
||||||
isFile: stats.isFile(),
|
|
||||||
permissions: stats.mode.toString(8).slice(-3)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function searchFiles(
|
|
||||||
allowedDirectories: string[],
|
|
||||||
rootPath: string,
|
|
||||||
pattern: string,
|
|
||||||
excludePatterns: string[] = []
|
|
||||||
): Promise<string[]> {
|
|
||||||
const results: string[] = []
|
|
||||||
|
|
||||||
async function search(currentPath: string) {
|
|
||||||
const entries = await fs.readdir(currentPath, { withFileTypes: true })
|
|
||||||
|
|
||||||
for (const entry of entries) {
|
|
||||||
const fullPath = path.join(currentPath, entry.name)
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Validate each path before processing
|
|
||||||
await validatePath(allowedDirectories, fullPath)
|
|
||||||
|
|
||||||
// Check if path matches any exclude pattern
|
|
||||||
const relativePath = path.relative(rootPath, fullPath)
|
|
||||||
const shouldExclude = excludePatterns.some((pattern) => {
|
|
||||||
const globPattern = pattern.includes('*') ? pattern : `**/${pattern}/**`
|
|
||||||
return minimatch(relativePath, globPattern, { dot: true })
|
|
||||||
})
|
|
||||||
|
|
||||||
if (shouldExclude) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if (entry.name.toLowerCase().includes(pattern.toLowerCase())) {
|
|
||||||
results.push(fullPath)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (entry.isDirectory()) {
|
|
||||||
await search(fullPath)
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
// Skip invalid paths during search
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
await search(rootPath)
|
|
||||||
return results
|
|
||||||
}
|
|
||||||
|
|
||||||
// file editing and diffing utilities
|
|
||||||
function normalizeLineEndings(text: string): string {
|
|
||||||
return text.replace(/\r\n/g, '\n')
|
|
||||||
}
|
|
||||||
|
|
||||||
function createUnifiedDiff(originalContent: string, newContent: string, filepath: string = 'file'): string {
|
|
||||||
// Ensure consistent line endings for diff
|
|
||||||
const normalizedOriginal = normalizeLineEndings(originalContent)
|
|
||||||
const normalizedNew = normalizeLineEndings(newContent)
|
|
||||||
|
|
||||||
return createTwoFilesPatch(filepath, filepath, normalizedOriginal, normalizedNew, 'original', 'modified')
|
|
||||||
}
|
|
||||||
|
|
||||||
async function applyFileEdits(
|
|
||||||
filePath: string,
|
|
||||||
edits: Array<{ oldText: string; newText: string }>,
|
|
||||||
dryRun = false
|
|
||||||
): Promise<string> {
|
|
||||||
// Read file content and normalize line endings
|
|
||||||
const content = normalizeLineEndings(await fs.readFile(filePath, 'utf-8'))
|
|
||||||
|
|
||||||
// Apply edits sequentially
|
|
||||||
let modifiedContent = content
|
|
||||||
for (const edit of edits) {
|
|
||||||
const normalizedOld = normalizeLineEndings(edit.oldText)
|
|
||||||
const normalizedNew = normalizeLineEndings(edit.newText)
|
|
||||||
|
|
||||||
// If exact match exists, use it
|
|
||||||
if (modifiedContent.includes(normalizedOld)) {
|
|
||||||
modifiedContent = modifiedContent.replace(normalizedOld, normalizedNew)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise, try line-by-line matching with flexibility for whitespace
|
|
||||||
const oldLines = normalizedOld.split('\n')
|
|
||||||
const contentLines = modifiedContent.split('\n')
|
|
||||||
let matchFound = false
|
|
||||||
|
|
||||||
for (let i = 0; i <= contentLines.length - oldLines.length; i++) {
|
|
||||||
const potentialMatch = contentLines.slice(i, i + oldLines.length)
|
|
||||||
|
|
||||||
// Compare lines with normalized whitespace
|
|
||||||
const isMatch = oldLines.every((oldLine, j) => {
|
|
||||||
const contentLine = potentialMatch[j]
|
|
||||||
return oldLine.trim() === contentLine.trim()
|
|
||||||
})
|
|
||||||
|
|
||||||
if (isMatch) {
|
|
||||||
// Preserve original indentation of first line
|
|
||||||
const originalIndent = contentLines[i].match(/^\s*/)?.[0] || ''
|
|
||||||
const newLines = normalizedNew.split('\n').map((line, j) => {
|
|
||||||
if (j === 0) return originalIndent + line.trimStart()
|
|
||||||
// For subsequent lines, try to preserve relative indentation
|
|
||||||
const oldIndent = oldLines[j]?.match(/^\s*/)?.[0] || ''
|
|
||||||
const newIndent = line.match(/^\s*/)?.[0] || ''
|
|
||||||
if (oldIndent && newIndent) {
|
|
||||||
const relativeIndent = newIndent.length - oldIndent.length
|
|
||||||
return originalIndent + ' '.repeat(Math.max(0, relativeIndent)) + line.trimStart()
|
|
||||||
}
|
|
||||||
return line
|
|
||||||
})
|
|
||||||
|
|
||||||
contentLines.splice(i, oldLines.length, ...newLines)
|
|
||||||
modifiedContent = contentLines.join('\n')
|
|
||||||
matchFound = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!matchFound) {
|
|
||||||
throw new Error(`Could not find exact match for edit:\n${edit.oldText}`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create unified diff
|
|
||||||
const diff = createUnifiedDiff(content, modifiedContent, filePath)
|
|
||||||
|
|
||||||
// Format diff with appropriate number of backticks
|
|
||||||
let numBackticks = 3
|
|
||||||
while (diff.includes('`'.repeat(numBackticks))) {
|
|
||||||
numBackticks++
|
|
||||||
}
|
|
||||||
const formattedDiff = `${'`'.repeat(numBackticks)}diff\n${diff}${'`'.repeat(numBackticks)}\n\n`
|
|
||||||
|
|
||||||
if (!dryRun) {
|
|
||||||
await fs.writeFile(filePath, modifiedContent, 'utf-8')
|
|
||||||
}
|
|
||||||
|
|
||||||
return formattedDiff
|
|
||||||
}
|
|
||||||
|
|
||||||
class FileSystemServer {
|
|
||||||
public server: Server
|
|
||||||
private allowedDirectories: string[]
|
|
||||||
constructor(allowedDirs: string[]) {
|
|
||||||
if (!Array.isArray(allowedDirs) || allowedDirs.length === 0) {
|
|
||||||
throw new Error('No allowed directories provided, please specify at least one directory in args')
|
|
||||||
}
|
|
||||||
|
|
||||||
this.allowedDirectories = allowedDirs.map((dir) => normalizePath(path.resolve(expandHome(dir))))
|
|
||||||
|
|
||||||
// Validate that all directories exist and are accessible
|
|
||||||
this.validateDirs().catch((error) => {
|
|
||||||
logger.error('Error validating allowed directories:', error)
|
|
||||||
throw new Error(`Error validating allowed directories: ${error}`)
|
|
||||||
})
|
|
||||||
|
|
||||||
this.server = new Server(
|
|
||||||
{
|
|
||||||
name: 'secure-filesystem-server',
|
|
||||||
version: '0.2.0'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
capabilities: {
|
|
||||||
tools: {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
this.initialize()
|
|
||||||
}
|
|
||||||
|
|
||||||
async validateDirs() {
|
|
||||||
// Validate that all directories exist and are accessible
|
|
||||||
await Promise.all(
|
|
||||||
this.allowedDirectories.map(async (dir) => {
|
|
||||||
try {
|
|
||||||
const stats = await fs.stat(expandHome(dir))
|
|
||||||
if (!stats.isDirectory()) {
|
|
||||||
logger.error(`Error: ${dir} is not a directory`)
|
|
||||||
throw new Error(`Error: ${dir} is not a directory`)
|
|
||||||
}
|
|
||||||
} catch (error: any) {
|
|
||||||
logger.error(`Error accessing directory ${dir}:`, error)
|
|
||||||
throw new Error(`Error accessing directory ${dir}:`, error)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
initialize() {
|
|
||||||
// Tool handlers
|
|
||||||
this.server.setRequestHandler(ListToolsRequestSchema, async () => {
|
|
||||||
return {
|
|
||||||
tools: [
|
|
||||||
{
|
|
||||||
name: 'read_file',
|
|
||||||
description:
|
|
||||||
'Read the complete contents of a file from the file system. ' +
|
|
||||||
'Handles various text encodings and provides detailed error messages ' +
|
|
||||||
'if the file cannot be read. Use this tool when you need to examine ' +
|
|
||||||
'the contents of a single file. Only works within allowed directories.',
|
|
||||||
inputSchema: z.toJSONSchema(ReadFileArgsSchema)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'read_multiple_files',
|
|
||||||
description:
|
|
||||||
'Read the contents of multiple files simultaneously. This is more ' +
|
|
||||||
'efficient than reading files one by one when you need to analyze ' +
|
|
||||||
"or compare multiple files. Each file's content is returned with its " +
|
|
||||||
"path as a reference. Failed reads for individual files won't stop " +
|
|
||||||
'the entire operation. Only works within allowed directories.',
|
|
||||||
inputSchema: z.toJSONSchema(ReadMultipleFilesArgsSchema)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'write_file',
|
|
||||||
description:
|
|
||||||
'Create a new file or completely overwrite an existing file with new content. ' +
|
|
||||||
'Use with caution as it will overwrite existing files without warning. ' +
|
|
||||||
'Handles text content with proper encoding. Only works within allowed directories.',
|
|
||||||
inputSchema: z.toJSONSchema(WriteFileArgsSchema)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'edit_file',
|
|
||||||
description:
|
|
||||||
'Make line-based edits to a text file. Each edit replaces exact line sequences ' +
|
|
||||||
'with new content. Returns a git-style diff showing the changes made. ' +
|
|
||||||
'Only works within allowed directories.',
|
|
||||||
inputSchema: z.toJSONSchema(EditFileArgsSchema)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'create_directory',
|
|
||||||
description:
|
|
||||||
'Create a new directory or ensure a directory exists. Can create multiple ' +
|
|
||||||
'nested directories in one operation. If the directory already exists, ' +
|
|
||||||
'this operation will succeed silently. Perfect for setting up directory ' +
|
|
||||||
'structures for projects or ensuring required paths exist. Only works within allowed directories.',
|
|
||||||
inputSchema: z.toJSONSchema(CreateDirectoryArgsSchema)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'list_directory',
|
|
||||||
description:
|
|
||||||
'Get a detailed listing of all files and directories in a specified path. ' +
|
|
||||||
'Results clearly distinguish between files and directories with [FILE] and [DIR] ' +
|
|
||||||
'prefixes. This tool is essential for understanding directory structure and ' +
|
|
||||||
'finding specific files within a directory. Only works within allowed directories.',
|
|
||||||
inputSchema: z.toJSONSchema(ListDirectoryArgsSchema)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'directory_tree',
|
|
||||||
description:
|
|
||||||
'Get a recursive tree view of files and directories as a JSON structure. ' +
|
|
||||||
"Each entry includes 'name', 'type' (file/directory), and 'children' for directories. " +
|
|
||||||
'Files have no children array, while directories always have a children array (which may be empty). ' +
|
|
||||||
'The output is formatted with 2-space indentation for readability. Only works within allowed directories.',
|
|
||||||
inputSchema: z.toJSONSchema(DirectoryTreeArgsSchema)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'move_file',
|
|
||||||
description:
|
|
||||||
'Move or rename files and directories. Can move files between directories ' +
|
|
||||||
'and rename them in a single operation. If the destination exists, the ' +
|
|
||||||
'operation will fail. Works across different directories and can be used ' +
|
|
||||||
'for simple renaming within the same directory. Both source and destination must be within allowed directories.',
|
|
||||||
inputSchema: z.toJSONSchema(MoveFileArgsSchema)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'search_files',
|
|
||||||
description:
|
|
||||||
'Recursively search for files and directories matching a pattern. ' +
|
|
||||||
'Searches through all subdirectories from the starting path. The search ' +
|
|
||||||
'is case-insensitive and matches partial names. Returns full paths to all ' +
|
|
||||||
"matching items. Great for finding files when you don't know their exact location. " +
|
|
||||||
'Only searches within allowed directories.',
|
|
||||||
inputSchema: z.toJSONSchema(SearchFilesArgsSchema)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'get_file_info',
|
|
||||||
description:
|
|
||||||
'Retrieve detailed metadata about a file or directory. Returns comprehensive ' +
|
|
||||||
'information including size, creation time, last modified time, permissions, ' +
|
|
||||||
'and type. This tool is perfect for understanding file characteristics ' +
|
|
||||||
'without reading the actual content. Only works within allowed directories.',
|
|
||||||
inputSchema: z.toJSONSchema(GetFileInfoArgsSchema)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'list_allowed_directories',
|
|
||||||
description:
|
|
||||||
'Returns the list of directories that this server is allowed to access. ' +
|
|
||||||
'Use this to understand which directories are available before trying to access files.',
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {},
|
|
||||||
required: []
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
this.server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
||||||
try {
|
|
||||||
const { name, arguments: args } = request.params
|
|
||||||
|
|
||||||
switch (name) {
|
|
||||||
case 'read_file': {
|
|
||||||
const parsed = ReadFileArgsSchema.safeParse(args)
|
|
||||||
if (!parsed.success) {
|
|
||||||
throw new Error(`Invalid arguments for read_file: ${parsed.error}`)
|
|
||||||
}
|
|
||||||
const validPath = await validatePath(this.allowedDirectories, parsed.data.path)
|
|
||||||
const content = await fs.readFile(validPath, 'utf-8')
|
|
||||||
return {
|
|
||||||
content: [{ type: 'text', text: content }]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'read_multiple_files': {
|
|
||||||
const parsed = ReadMultipleFilesArgsSchema.safeParse(args)
|
|
||||||
if (!parsed.success) {
|
|
||||||
throw new Error(`Invalid arguments for read_multiple_files: ${parsed.error}`)
|
|
||||||
}
|
|
||||||
const results = await Promise.all(
|
|
||||||
parsed.data.paths.map(async (filePath: string) => {
|
|
||||||
try {
|
|
||||||
const validPath = await validatePath(this.allowedDirectories, filePath)
|
|
||||||
const content = await fs.readFile(validPath, 'utf-8')
|
|
||||||
return `${filePath}:\n${content}\n`
|
|
||||||
} catch (error) {
|
|
||||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
|
||||||
return `${filePath}: Error - ${errorMessage}`
|
|
||||||
}
|
|
||||||
})
|
|
||||||
)
|
|
||||||
return {
|
|
||||||
content: [{ type: 'text', text: results.join('\n---\n') }]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'write_file': {
|
|
||||||
const parsed = WriteFileArgsSchema.safeParse(args)
|
|
||||||
if (!parsed.success) {
|
|
||||||
throw new Error(`Invalid arguments for write_file: ${parsed.error}`)
|
|
||||||
}
|
|
||||||
const validPath = await validatePath(this.allowedDirectories, parsed.data.path)
|
|
||||||
await fs.writeFile(validPath, parsed.data.content, 'utf-8')
|
|
||||||
return {
|
|
||||||
content: [{ type: 'text', text: `Successfully wrote to ${parsed.data.path}` }]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'edit_file': {
|
|
||||||
const parsed = EditFileArgsSchema.safeParse(args)
|
|
||||||
if (!parsed.success) {
|
|
||||||
throw new Error(`Invalid arguments for edit_file: ${parsed.error}`)
|
|
||||||
}
|
|
||||||
const validPath = await validatePath(this.allowedDirectories, parsed.data.path)
|
|
||||||
const result = await applyFileEdits(validPath, parsed.data.edits, parsed.data.dryRun)
|
|
||||||
return {
|
|
||||||
content: [{ type: 'text', text: result }]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'create_directory': {
|
|
||||||
const parsed = CreateDirectoryArgsSchema.safeParse(args)
|
|
||||||
if (!parsed.success) {
|
|
||||||
throw new Error(`Invalid arguments for create_directory: ${parsed.error}`)
|
|
||||||
}
|
|
||||||
const validPath = await validatePath(this.allowedDirectories, parsed.data.path)
|
|
||||||
await fs.mkdir(validPath, { recursive: true })
|
|
||||||
return {
|
|
||||||
content: [{ type: 'text', text: `Successfully created directory ${parsed.data.path}` }]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'list_directory': {
|
|
||||||
const parsed = ListDirectoryArgsSchema.safeParse(args)
|
|
||||||
if (!parsed.success) {
|
|
||||||
throw new Error(`Invalid arguments for list_directory: ${parsed.error}`)
|
|
||||||
}
|
|
||||||
const validPath = await validatePath(this.allowedDirectories, parsed.data.path)
|
|
||||||
const entries = await fs.readdir(validPath, { withFileTypes: true })
|
|
||||||
const formatted = entries
|
|
||||||
.map((entry) => `${entry.isDirectory() ? '[DIR]' : '[FILE]'} ${entry.name}`)
|
|
||||||
.join('\n')
|
|
||||||
return {
|
|
||||||
content: [{ type: 'text', text: formatted }]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'directory_tree': {
|
|
||||||
const parsed = DirectoryTreeArgsSchema.safeParse(args)
|
|
||||||
if (!parsed.success) {
|
|
||||||
throw new Error(`Invalid arguments for directory_tree: ${parsed.error}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
interface TreeEntry {
|
|
||||||
name: string
|
|
||||||
type: 'file' | 'directory'
|
|
||||||
children?: TreeEntry[]
|
|
||||||
}
|
|
||||||
|
|
||||||
async function buildTree(allowedDirectories: string[], currentPath: string): Promise<TreeEntry[]> {
|
|
||||||
const validPath = await validatePath(allowedDirectories, currentPath)
|
|
||||||
const entries = await fs.readdir(validPath, { withFileTypes: true })
|
|
||||||
const result: TreeEntry[] = []
|
|
||||||
|
|
||||||
for (const entry of entries) {
|
|
||||||
const entryData: TreeEntry = {
|
|
||||||
name: entry.name,
|
|
||||||
type: entry.isDirectory() ? 'directory' : 'file'
|
|
||||||
}
|
|
||||||
|
|
||||||
if (entry.isDirectory()) {
|
|
||||||
const subPath = path.join(currentPath, entry.name)
|
|
||||||
entryData.children = await buildTree(allowedDirectories, subPath)
|
|
||||||
}
|
|
||||||
|
|
||||||
result.push(entryData)
|
|
||||||
}
|
|
||||||
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
const treeData = await buildTree(this.allowedDirectories, parsed.data.path)
|
|
||||||
return {
|
|
||||||
content: [
|
|
||||||
{
|
|
||||||
type: 'text',
|
|
||||||
text: JSON.stringify(treeData, null, 2)
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'move_file': {
|
|
||||||
const parsed = MoveFileArgsSchema.safeParse(args)
|
|
||||||
if (!parsed.success) {
|
|
||||||
throw new Error(`Invalid arguments for move_file: ${parsed.error}`)
|
|
||||||
}
|
|
||||||
const validSourcePath = await validatePath(this.allowedDirectories, parsed.data.source)
|
|
||||||
const validDestPath = await validatePath(this.allowedDirectories, parsed.data.destination)
|
|
||||||
await fs.rename(validSourcePath, validDestPath)
|
|
||||||
return {
|
|
||||||
content: [
|
|
||||||
{ type: 'text', text: `Successfully moved ${parsed.data.source} to ${parsed.data.destination}` }
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'search_files': {
|
|
||||||
const parsed = SearchFilesArgsSchema.safeParse(args)
|
|
||||||
if (!parsed.success) {
|
|
||||||
throw new Error(`Invalid arguments for search_files: ${parsed.error}`)
|
|
||||||
}
|
|
||||||
const validPath = await validatePath(this.allowedDirectories, parsed.data.path)
|
|
||||||
const results = await searchFiles(
|
|
||||||
this.allowedDirectories,
|
|
||||||
validPath,
|
|
||||||
parsed.data.pattern,
|
|
||||||
parsed.data.excludePatterns
|
|
||||||
)
|
|
||||||
return {
|
|
||||||
content: [{ type: 'text', text: results.length > 0 ? results.join('\n') : 'No matches found' }]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'get_file_info': {
|
|
||||||
const parsed = GetFileInfoArgsSchema.safeParse(args)
|
|
||||||
if (!parsed.success) {
|
|
||||||
throw new Error(`Invalid arguments for get_file_info: ${parsed.error}`)
|
|
||||||
}
|
|
||||||
const validPath = await validatePath(this.allowedDirectories, parsed.data.path)
|
|
||||||
const info = await getFileStats(validPath)
|
|
||||||
return {
|
|
||||||
content: [
|
|
||||||
{
|
|
||||||
type: 'text',
|
|
||||||
text: Object.entries(info)
|
|
||||||
.map(([key, value]) => `${key}: ${value}`)
|
|
||||||
.join('\n')
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'list_allowed_directories': {
|
|
||||||
return {
|
|
||||||
content: [
|
|
||||||
{
|
|
||||||
type: 'text',
|
|
||||||
text: `Allowed directories:\n${this.allowedDirectories.join('\n')}`
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
default:
|
|
||||||
throw new Error(`Unknown tool: ${name}`)
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
|
||||||
return {
|
|
||||||
content: [{ type: 'text', text: `Error: ${errorMessage}` }],
|
|
||||||
isError: true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export default FileSystemServer
|
|
||||||
2
src/main/mcpServers/filesystem/index.ts
Normal file
2
src/main/mcpServers/filesystem/index.ts
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
// Re-export FileSystemServer to maintain existing import pattern
|
||||||
|
export { default, FileSystemServer } from './server'
|
||||||
118
src/main/mcpServers/filesystem/server.ts
Normal file
118
src/main/mcpServers/filesystem/server.ts
Normal file
@ -0,0 +1,118 @@
|
|||||||
|
import { Server } from '@modelcontextprotocol/sdk/server/index.js'
|
||||||
|
import { CallToolRequestSchema, ListToolsRequestSchema } from '@modelcontextprotocol/sdk/types.js'
|
||||||
|
import { app } from 'electron'
|
||||||
|
import fs from 'fs/promises'
|
||||||
|
import path from 'path'
|
||||||
|
|
||||||
|
import {
|
||||||
|
deleteToolDefinition,
|
||||||
|
editToolDefinition,
|
||||||
|
globToolDefinition,
|
||||||
|
grepToolDefinition,
|
||||||
|
handleDeleteTool,
|
||||||
|
handleEditTool,
|
||||||
|
handleGlobTool,
|
||||||
|
handleGrepTool,
|
||||||
|
handleLsTool,
|
||||||
|
handleReadTool,
|
||||||
|
handleWriteTool,
|
||||||
|
lsToolDefinition,
|
||||||
|
readToolDefinition,
|
||||||
|
writeToolDefinition
|
||||||
|
} from './tools'
|
||||||
|
import { logger } from './types'
|
||||||
|
|
||||||
|
export class FileSystemServer {
|
||||||
|
public server: Server
|
||||||
|
private baseDir: string
|
||||||
|
|
||||||
|
constructor(baseDir?: string) {
|
||||||
|
if (baseDir && path.isAbsolute(baseDir)) {
|
||||||
|
this.baseDir = baseDir
|
||||||
|
logger.info(`Using provided baseDir for filesystem MCP: ${baseDir}`)
|
||||||
|
} else {
|
||||||
|
const userData = app.getPath('userData')
|
||||||
|
this.baseDir = path.join(userData, 'Data', 'Workspace')
|
||||||
|
logger.info(`Using default workspace for filesystem MCP baseDir: ${this.baseDir}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
this.server = new Server(
|
||||||
|
{
|
||||||
|
name: 'filesystem-server',
|
||||||
|
version: '2.0.0'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
capabilities: {
|
||||||
|
tools: {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
this.initialize()
|
||||||
|
}
|
||||||
|
|
||||||
|
async initialize() {
|
||||||
|
try {
|
||||||
|
await fs.mkdir(this.baseDir, { recursive: true })
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to create filesystem MCP baseDir', { error, baseDir: this.baseDir })
|
||||||
|
}
|
||||||
|
|
||||||
|
// Register tool list handler
|
||||||
|
this.server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||||
|
return {
|
||||||
|
tools: [
|
||||||
|
globToolDefinition,
|
||||||
|
lsToolDefinition,
|
||||||
|
grepToolDefinition,
|
||||||
|
readToolDefinition,
|
||||||
|
editToolDefinition,
|
||||||
|
writeToolDefinition,
|
||||||
|
deleteToolDefinition
|
||||||
|
]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// Register tool call handler
|
||||||
|
this.server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||||
|
try {
|
||||||
|
const { name, arguments: args } = request.params
|
||||||
|
|
||||||
|
switch (name) {
|
||||||
|
case 'glob':
|
||||||
|
return await handleGlobTool(args, this.baseDir)
|
||||||
|
|
||||||
|
case 'ls':
|
||||||
|
return await handleLsTool(args, this.baseDir)
|
||||||
|
|
||||||
|
case 'grep':
|
||||||
|
return await handleGrepTool(args, this.baseDir)
|
||||||
|
|
||||||
|
case 'read':
|
||||||
|
return await handleReadTool(args, this.baseDir)
|
||||||
|
|
||||||
|
case 'edit':
|
||||||
|
return await handleEditTool(args, this.baseDir)
|
||||||
|
|
||||||
|
case 'write':
|
||||||
|
return await handleWriteTool(args, this.baseDir)
|
||||||
|
|
||||||
|
case 'delete':
|
||||||
|
return await handleDeleteTool(args, this.baseDir)
|
||||||
|
|
||||||
|
default:
|
||||||
|
throw new Error(`Unknown tool: ${name}`)
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||||
|
logger.error(`Tool execution error for ${request.params.name}:`, { error })
|
||||||
|
return {
|
||||||
|
content: [{ type: 'text', text: `Error: ${errorMessage}` }],
|
||||||
|
isError: true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default FileSystemServer
|
||||||
93
src/main/mcpServers/filesystem/tools/delete.ts
Normal file
93
src/main/mcpServers/filesystem/tools/delete.ts
Normal file
@ -0,0 +1,93 @@
|
|||||||
|
import fs from 'fs/promises'
|
||||||
|
import path from 'path'
|
||||||
|
import * as z from 'zod'
|
||||||
|
|
||||||
|
import { logger, validatePath } from '../types'
|
||||||
|
|
||||||
|
// Schema definition
|
||||||
|
export const DeleteToolSchema = z.object({
|
||||||
|
path: z.string().describe('The path to the file or directory to delete'),
|
||||||
|
recursive: z.boolean().optional().describe('For directories, whether to delete recursively (default: false)')
|
||||||
|
})
|
||||||
|
|
||||||
|
// Tool definition with detailed description
|
||||||
|
export const deleteToolDefinition = {
|
||||||
|
name: 'delete',
|
||||||
|
description: `Deletes a file or directory from the filesystem.
|
||||||
|
|
||||||
|
CAUTION: This operation cannot be undone!
|
||||||
|
|
||||||
|
- For files: simply provide the path
|
||||||
|
- For empty directories: provide the path
|
||||||
|
- For non-empty directories: set recursive=true
|
||||||
|
- The path must be an absolute path, not a relative path
|
||||||
|
- Always verify the path before deleting to avoid data loss`,
|
||||||
|
inputSchema: z.toJSONSchema(DeleteToolSchema)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handler implementation
|
||||||
|
export async function handleDeleteTool(args: unknown, baseDir: string) {
|
||||||
|
const parsed = DeleteToolSchema.safeParse(args)
|
||||||
|
if (!parsed.success) {
|
||||||
|
throw new Error(`Invalid arguments for delete: ${parsed.error}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const targetPath = parsed.data.path
|
||||||
|
const validPath = await validatePath(targetPath, baseDir)
|
||||||
|
const recursive = parsed.data.recursive || false
|
||||||
|
|
||||||
|
// Check if path exists and get stats
|
||||||
|
let stats
|
||||||
|
try {
|
||||||
|
stats = await fs.stat(validPath)
|
||||||
|
} catch (error: any) {
|
||||||
|
if (error.code === 'ENOENT') {
|
||||||
|
throw new Error(`Path not found: ${targetPath}`)
|
||||||
|
}
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
|
||||||
|
const isDirectory = stats.isDirectory()
|
||||||
|
const relativePath = path.relative(baseDir, validPath)
|
||||||
|
|
||||||
|
// Perform deletion
|
||||||
|
try {
|
||||||
|
if (isDirectory) {
|
||||||
|
if (recursive) {
|
||||||
|
// Delete directory recursively
|
||||||
|
await fs.rm(validPath, { recursive: true, force: true })
|
||||||
|
} else {
|
||||||
|
// Try to delete empty directory
|
||||||
|
await fs.rmdir(validPath)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Delete file
|
||||||
|
await fs.unlink(validPath)
|
||||||
|
}
|
||||||
|
} catch (error: any) {
|
||||||
|
if (error.code === 'ENOTEMPTY') {
|
||||||
|
throw new Error(`Directory not empty: ${targetPath}. Use recursive=true to delete non-empty directories.`)
|
||||||
|
}
|
||||||
|
throw new Error(`Failed to delete: ${error.message}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log the operation
|
||||||
|
logger.info('Path deleted', {
|
||||||
|
path: validPath,
|
||||||
|
type: isDirectory ? 'directory' : 'file',
|
||||||
|
recursive: isDirectory ? recursive : undefined
|
||||||
|
})
|
||||||
|
|
||||||
|
// Format output
|
||||||
|
const itemType = isDirectory ? 'Directory' : 'File'
|
||||||
|
const recursiveNote = isDirectory && recursive ? ' (recursive)' : ''
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: `${itemType} deleted${recursiveNote}: ${relativePath}`
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
130
src/main/mcpServers/filesystem/tools/edit.ts
Normal file
130
src/main/mcpServers/filesystem/tools/edit.ts
Normal file
@ -0,0 +1,130 @@
|
|||||||
|
import fs from 'fs/promises'
|
||||||
|
import path from 'path'
|
||||||
|
import * as z from 'zod'
|
||||||
|
|
||||||
|
import { logger, replaceWithFuzzyMatch, validatePath } from '../types'
|
||||||
|
|
||||||
|
// Schema definition
|
||||||
|
export const EditToolSchema = z.object({
|
||||||
|
file_path: z.string().describe('The path to the file to modify'),
|
||||||
|
old_string: z.string().describe('The text to replace'),
|
||||||
|
new_string: z.string().describe('The text to replace it with'),
|
||||||
|
replace_all: z.boolean().optional().default(false).describe('Replace all occurrences of old_string (default false)')
|
||||||
|
})
|
||||||
|
|
||||||
|
// Tool definition with detailed description
|
||||||
|
export const editToolDefinition = {
|
||||||
|
name: 'edit',
|
||||||
|
description: `Performs exact string replacements in files.
|
||||||
|
|
||||||
|
- You must use the 'read' tool at least once before editing
|
||||||
|
- The file_path must be an absolute path, not a relative path
|
||||||
|
- Preserve exact indentation from read output (after the line number prefix)
|
||||||
|
- Never include line number prefixes in old_string or new_string
|
||||||
|
- ALWAYS prefer editing existing files over creating new ones
|
||||||
|
- The edit will FAIL if old_string is not found in the file
|
||||||
|
- The edit will FAIL if old_string appears multiple times (provide more context or use replace_all)
|
||||||
|
- The edit will FAIL if old_string equals new_string
|
||||||
|
- Use replace_all to rename variables or replace all occurrences`,
|
||||||
|
inputSchema: z.toJSONSchema(EditToolSchema)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handler implementation
|
||||||
|
export async function handleEditTool(args: unknown, baseDir: string) {
|
||||||
|
const parsed = EditToolSchema.safeParse(args)
|
||||||
|
if (!parsed.success) {
|
||||||
|
throw new Error(`Invalid arguments for edit: ${parsed.error}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const { file_path: filePath, old_string: oldString, new_string: newString, replace_all: replaceAll } = parsed.data
|
||||||
|
|
||||||
|
// Validate path
|
||||||
|
const validPath = await validatePath(filePath, baseDir)
|
||||||
|
|
||||||
|
// Check if file exists
|
||||||
|
try {
|
||||||
|
const stats = await fs.stat(validPath)
|
||||||
|
if (!stats.isFile()) {
|
||||||
|
throw new Error(`Path is not a file: ${filePath}`)
|
||||||
|
}
|
||||||
|
} catch (error: any) {
|
||||||
|
if (error.code === 'ENOENT') {
|
||||||
|
// If old_string is empty, this is a create new file operation
|
||||||
|
if (oldString === '') {
|
||||||
|
// Create parent directory if needed
|
||||||
|
const parentDir = path.dirname(validPath)
|
||||||
|
await fs.mkdir(parentDir, { recursive: true })
|
||||||
|
|
||||||
|
// Write the new content
|
||||||
|
await fs.writeFile(validPath, newString, 'utf-8')
|
||||||
|
|
||||||
|
logger.info('File created', { path: validPath })
|
||||||
|
|
||||||
|
const relativePath = path.relative(baseDir, validPath)
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: `Created new file: ${relativePath}\nLines: ${newString.split('\n').length}`
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
throw new Error(`File not found: ${filePath}`)
|
||||||
|
}
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read current content
|
||||||
|
const content = await fs.readFile(validPath, 'utf-8')
|
||||||
|
|
||||||
|
// Handle special case: old_string is empty (create file with content)
|
||||||
|
if (oldString === '') {
|
||||||
|
await fs.writeFile(validPath, newString, 'utf-8')
|
||||||
|
|
||||||
|
logger.info('File overwritten', { path: validPath })
|
||||||
|
|
||||||
|
const relativePath = path.relative(baseDir, validPath)
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: `Overwrote file: ${relativePath}\nLines: ${newString.split('\n').length}`
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Perform the replacement with fuzzy matching
|
||||||
|
const newContent = replaceWithFuzzyMatch(content, oldString, newString, replaceAll)
|
||||||
|
|
||||||
|
// Write the modified content
|
||||||
|
await fs.writeFile(validPath, newContent, 'utf-8')
|
||||||
|
|
||||||
|
logger.info('File edited', {
|
||||||
|
path: validPath,
|
||||||
|
replaceAll
|
||||||
|
})
|
||||||
|
|
||||||
|
// Generate a simple diff summary
|
||||||
|
const oldLines = content.split('\n').length
|
||||||
|
const newLines = newContent.split('\n').length
|
||||||
|
const lineDiff = newLines - oldLines
|
||||||
|
|
||||||
|
const relativePath = path.relative(baseDir, validPath)
|
||||||
|
let diffSummary = `Edited: ${relativePath}`
|
||||||
|
if (lineDiff > 0) {
|
||||||
|
diffSummary += `\n+${lineDiff} lines`
|
||||||
|
} else if (lineDiff < 0) {
|
||||||
|
diffSummary += `\n${lineDiff} lines`
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: diffSummary
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
149
src/main/mcpServers/filesystem/tools/glob.ts
Normal file
149
src/main/mcpServers/filesystem/tools/glob.ts
Normal file
@ -0,0 +1,149 @@
|
|||||||
|
import fs from 'fs/promises'
|
||||||
|
import path from 'path'
|
||||||
|
import * as z from 'zod'
|
||||||
|
|
||||||
|
import type { FileInfo } from '../types'
|
||||||
|
import { logger, MAX_FILES_LIMIT, runRipgrep, validatePath } from '../types'
|
||||||
|
|
||||||
|
// Schema definition
|
||||||
|
export const GlobToolSchema = z.object({
|
||||||
|
pattern: z.string().describe('The glob pattern to match files against'),
|
||||||
|
path: z
|
||||||
|
.string()
|
||||||
|
.optional()
|
||||||
|
.describe('The directory to search in (must be absolute path). Defaults to the base directory')
|
||||||
|
})
|
||||||
|
|
||||||
|
// Tool definition with detailed description
|
||||||
|
export const globToolDefinition = {
|
||||||
|
name: 'glob',
|
||||||
|
description: `Fast file pattern matching tool that works with any codebase size.
|
||||||
|
|
||||||
|
- Supports glob patterns like "**/*.js" or "src/**/*.ts"
|
||||||
|
- Returns matching absolute file paths sorted by modification time (newest first)
|
||||||
|
- Use this when you need to find files by name patterns
|
||||||
|
- Patterns without "/" (e.g., "*.txt") match files at ANY depth in the directory tree
|
||||||
|
- Patterns with "/" (e.g., "src/*.ts") match relative to the search path
|
||||||
|
- Pattern syntax: * (any chars), ** (any path), {a,b} (alternatives), ? (single char)
|
||||||
|
- Results are limited to 100 files
|
||||||
|
- The path parameter must be an absolute path if specified
|
||||||
|
- If path is not specified, defaults to the base directory
|
||||||
|
- IMPORTANT: Omit the path field for the default directory (don't use "undefined" or "null")`,
|
||||||
|
inputSchema: z.toJSONSchema(GlobToolSchema)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handler implementation
|
||||||
|
export async function handleGlobTool(args: unknown, baseDir: string) {
|
||||||
|
const parsed = GlobToolSchema.safeParse(args)
|
||||||
|
if (!parsed.success) {
|
||||||
|
throw new Error(`Invalid arguments for glob: ${parsed.error}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const searchPath = parsed.data.path || baseDir
|
||||||
|
const validPath = await validatePath(searchPath, baseDir)
|
||||||
|
|
||||||
|
// Verify the search directory exists
|
||||||
|
try {
|
||||||
|
const stats = await fs.stat(validPath)
|
||||||
|
if (!stats.isDirectory()) {
|
||||||
|
throw new Error(`Path is not a directory: ${validPath}`)
|
||||||
|
}
|
||||||
|
} catch (error: unknown) {
|
||||||
|
if (error && typeof error === 'object' && 'code' in error && error.code === 'ENOENT') {
|
||||||
|
throw new Error(`Directory not found: ${validPath}`)
|
||||||
|
}
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate pattern
|
||||||
|
const pattern = parsed.data.pattern.trim()
|
||||||
|
if (!pattern) {
|
||||||
|
throw new Error('Pattern cannot be empty')
|
||||||
|
}
|
||||||
|
|
||||||
|
const files: FileInfo[] = []
|
||||||
|
let truncated = false
|
||||||
|
|
||||||
|
// Build ripgrep arguments for file listing using --glob=pattern format
|
||||||
|
const rgArgs: string[] = [
|
||||||
|
'--files',
|
||||||
|
'--follow',
|
||||||
|
'--hidden',
|
||||||
|
`--glob=${pattern}`,
|
||||||
|
'--glob=!.git/*',
|
||||||
|
'--glob=!node_modules/*',
|
||||||
|
'--glob=!dist/*',
|
||||||
|
'--glob=!build/*',
|
||||||
|
'--glob=!__pycache__/*',
|
||||||
|
validPath
|
||||||
|
]
|
||||||
|
|
||||||
|
// Use ripgrep for file listing
|
||||||
|
logger.debug('Running ripgrep with args', { rgArgs })
|
||||||
|
const rgResult = await runRipgrep(rgArgs)
|
||||||
|
logger.debug('Ripgrep result', {
|
||||||
|
ok: rgResult.ok,
|
||||||
|
exitCode: rgResult.exitCode,
|
||||||
|
stdoutLength: rgResult.stdout.length,
|
||||||
|
stdoutPreview: rgResult.stdout.slice(0, 500)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Process results if we have stdout content
|
||||||
|
// Exit code 2 can indicate partial errors (e.g., permission denied on some dirs) but still have valid results
|
||||||
|
if (rgResult.ok && rgResult.stdout.length > 0) {
|
||||||
|
const lines = rgResult.stdout.split('\n').filter(Boolean)
|
||||||
|
logger.debug('Parsed lines from ripgrep', { lineCount: lines.length, lines })
|
||||||
|
|
||||||
|
for (const line of lines) {
|
||||||
|
if (files.length >= MAX_FILES_LIMIT) {
|
||||||
|
truncated = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
const filePath = line.trim()
|
||||||
|
if (!filePath) continue
|
||||||
|
|
||||||
|
const absolutePath = path.isAbsolute(filePath) ? filePath : path.resolve(validPath, filePath)
|
||||||
|
|
||||||
|
try {
|
||||||
|
const stats = await fs.stat(absolutePath)
|
||||||
|
files.push({
|
||||||
|
path: absolutePath,
|
||||||
|
type: 'file', // ripgrep --files only returns files
|
||||||
|
size: stats.size,
|
||||||
|
modified: stats.mtime
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.debug('Failed to stat file from ripgrep output, skipping', { file: absolutePath, error })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort by modification time (newest first)
|
||||||
|
files.sort((a, b) => {
|
||||||
|
const aTime = a.modified ? a.modified.getTime() : 0
|
||||||
|
const bTime = b.modified ? b.modified.getTime() : 0
|
||||||
|
return bTime - aTime
|
||||||
|
})
|
||||||
|
|
||||||
|
// Format output - always use absolute paths
|
||||||
|
const output: string[] = []
|
||||||
|
if (files.length === 0) {
|
||||||
|
output.push(`No files found matching pattern "${parsed.data.pattern}" in ${validPath}`)
|
||||||
|
} else {
|
||||||
|
output.push(...files.map((f) => f.path))
|
||||||
|
if (truncated) {
|
||||||
|
output.push('')
|
||||||
|
output.push(`(Results truncated to ${MAX_FILES_LIMIT} files. Consider using a more specific pattern.)`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: output.join('\n')
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
266
src/main/mcpServers/filesystem/tools/grep.ts
Normal file
266
src/main/mcpServers/filesystem/tools/grep.ts
Normal file
@ -0,0 +1,266 @@
|
|||||||
|
import fs from 'fs/promises'
|
||||||
|
import path from 'path'
|
||||||
|
import * as z from 'zod'
|
||||||
|
|
||||||
|
import type { GrepMatch } from '../types'
|
||||||
|
import { isBinaryFile, MAX_GREP_MATCHES, MAX_LINE_LENGTH, runRipgrep, validatePath } from '../types'
|
||||||
|
|
||||||
|
// Schema definition
|
||||||
|
export const GrepToolSchema = z.object({
|
||||||
|
pattern: z.string().describe('The regex pattern to search for in file contents'),
|
||||||
|
path: z
|
||||||
|
.string()
|
||||||
|
.optional()
|
||||||
|
.describe('The directory to search in (must be absolute path). Defaults to the base directory'),
|
||||||
|
include: z.string().optional().describe('File pattern to include in the search (e.g. "*.js", "*.{ts,tsx}")')
|
||||||
|
})
|
||||||
|
|
||||||
|
// Tool definition with detailed description
|
||||||
|
export const grepToolDefinition = {
|
||||||
|
name: 'grep',
|
||||||
|
description: `Fast content search tool that works with any codebase size.
|
||||||
|
|
||||||
|
- Searches file contents using regular expressions
|
||||||
|
- Supports full regex syntax (e.g., "log.*Error", "function\\s+\\w+")
|
||||||
|
- Filter files by pattern with include (e.g., "*.js", "*.{ts,tsx}")
|
||||||
|
- Returns absolute file paths and line numbers with matching content
|
||||||
|
- Results are limited to 100 matches
|
||||||
|
- Binary files are automatically skipped
|
||||||
|
- Common directories (node_modules, .git, dist) are excluded
|
||||||
|
- The path parameter must be an absolute path if specified
|
||||||
|
- If path is not specified, defaults to the base directory`,
|
||||||
|
inputSchema: z.toJSONSchema(GrepToolSchema)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handler implementation
|
||||||
|
export async function handleGrepTool(args: unknown, baseDir: string) {
|
||||||
|
const parsed = GrepToolSchema.safeParse(args)
|
||||||
|
if (!parsed.success) {
|
||||||
|
throw new Error(`Invalid arguments for grep: ${parsed.error}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = parsed.data
|
||||||
|
|
||||||
|
if (!data.pattern) {
|
||||||
|
throw new Error('Pattern is required for grep')
|
||||||
|
}
|
||||||
|
|
||||||
|
const searchPath = data.path || baseDir
|
||||||
|
const validPath = await validatePath(searchPath, baseDir)
|
||||||
|
|
||||||
|
const matches: GrepMatch[] = []
|
||||||
|
let truncated = false
|
||||||
|
let regex: RegExp
|
||||||
|
|
||||||
|
// Build ripgrep arguments
|
||||||
|
const rgArgs: string[] = [
|
||||||
|
'--no-heading',
|
||||||
|
'--line-number',
|
||||||
|
'--color',
|
||||||
|
'never',
|
||||||
|
'--ignore-case',
|
||||||
|
'--glob',
|
||||||
|
'!.git/**',
|
||||||
|
'--glob',
|
||||||
|
'!node_modules/**',
|
||||||
|
'--glob',
|
||||||
|
'!dist/**',
|
||||||
|
'--glob',
|
||||||
|
'!build/**',
|
||||||
|
'--glob',
|
||||||
|
'!__pycache__/**'
|
||||||
|
]
|
||||||
|
|
||||||
|
if (data.include) {
|
||||||
|
for (const pat of data.include
|
||||||
|
.split(',')
|
||||||
|
.map((p) => p.trim())
|
||||||
|
.filter(Boolean)) {
|
||||||
|
rgArgs.push('--glob', pat)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rgArgs.push(data.pattern)
|
||||||
|
rgArgs.push(validPath)
|
||||||
|
|
||||||
|
try {
|
||||||
|
regex = new RegExp(data.pattern, 'gi')
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error(`Invalid regex pattern: ${data.pattern}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function searchFile(filePath: string): Promise<void> {
|
||||||
|
if (matches.length >= MAX_GREP_MATCHES) {
|
||||||
|
truncated = true
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Skip binary files
|
||||||
|
if (await isBinaryFile(filePath)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = await fs.readFile(filePath, 'utf-8')
|
||||||
|
const lines = content.split('\n')
|
||||||
|
|
||||||
|
lines.forEach((line, index) => {
|
||||||
|
if (matches.length >= MAX_GREP_MATCHES) {
|
||||||
|
truncated = true
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (regex.test(line)) {
|
||||||
|
// Truncate long lines
|
||||||
|
const truncatedLine = line.length > MAX_LINE_LENGTH ? line.substring(0, MAX_LINE_LENGTH) + '...' : line
|
||||||
|
|
||||||
|
matches.push({
|
||||||
|
file: filePath,
|
||||||
|
line: index + 1,
|
||||||
|
content: truncatedLine.trim()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
// Skip files we can't read
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function searchDirectory(dir: string): Promise<void> {
|
||||||
|
if (matches.length >= MAX_GREP_MATCHES) {
|
||||||
|
truncated = true
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const entries = await fs.readdir(dir, { withFileTypes: true })
|
||||||
|
|
||||||
|
for (const entry of entries) {
|
||||||
|
if (matches.length >= MAX_GREP_MATCHES) {
|
||||||
|
truncated = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
const fullPath = path.join(dir, entry.name)
|
||||||
|
|
||||||
|
// Skip common ignore patterns
|
||||||
|
if (entry.name.startsWith('.') && entry.name !== '.env.example') {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if (['node_modules', 'dist', 'build', '__pycache__', '.git'].includes(entry.name)) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (entry.isFile()) {
|
||||||
|
// Check if file matches include pattern
|
||||||
|
if (data.include) {
|
||||||
|
const includePatterns = data.include.split(',').map((p) => p.trim())
|
||||||
|
const fileName = path.basename(fullPath)
|
||||||
|
const matchesInclude = includePatterns.some((pattern) => {
|
||||||
|
// Simple glob pattern matching
|
||||||
|
const regexPattern = pattern
|
||||||
|
.replace(/\*/g, '.*')
|
||||||
|
.replace(/\?/g, '.')
|
||||||
|
.replace(/\{([^}]+)\}/g, (_, group) => `(${group.split(',').join('|')})`)
|
||||||
|
return new RegExp(`^${regexPattern}$`).test(fileName)
|
||||||
|
})
|
||||||
|
if (!matchesInclude) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await searchFile(fullPath)
|
||||||
|
} else if (entry.isDirectory()) {
|
||||||
|
await searchDirectory(fullPath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
// Skip directories we can't read
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Perform the search
|
||||||
|
let usedRipgrep = false
|
||||||
|
try {
|
||||||
|
const rgResult = await runRipgrep(rgArgs)
|
||||||
|
if (rgResult.ok && rgResult.exitCode !== null && rgResult.exitCode !== 2) {
|
||||||
|
usedRipgrep = true
|
||||||
|
const lines = rgResult.stdout.split('\n').filter(Boolean)
|
||||||
|
for (const line of lines) {
|
||||||
|
if (matches.length >= MAX_GREP_MATCHES) {
|
||||||
|
truncated = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
const firstColon = line.indexOf(':')
|
||||||
|
const secondColon = line.indexOf(':', firstColon + 1)
|
||||||
|
if (firstColon === -1 || secondColon === -1) continue
|
||||||
|
|
||||||
|
const filePart = line.slice(0, firstColon)
|
||||||
|
const linePart = line.slice(firstColon + 1, secondColon)
|
||||||
|
const contentPart = line.slice(secondColon + 1)
|
||||||
|
const lineNum = Number.parseInt(linePart, 10)
|
||||||
|
if (!Number.isFinite(lineNum)) continue
|
||||||
|
|
||||||
|
const absoluteFilePath = path.isAbsolute(filePart) ? filePart : path.resolve(baseDir, filePart)
|
||||||
|
const truncatedLine =
|
||||||
|
contentPart.length > MAX_LINE_LENGTH ? contentPart.substring(0, MAX_LINE_LENGTH) + '...' : contentPart
|
||||||
|
|
||||||
|
matches.push({
|
||||||
|
file: absoluteFilePath,
|
||||||
|
line: lineNum,
|
||||||
|
content: truncatedLine.trim()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
usedRipgrep = false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!usedRipgrep) {
|
||||||
|
const stats = await fs.stat(validPath)
|
||||||
|
if (stats.isFile()) {
|
||||||
|
await searchFile(validPath)
|
||||||
|
} else {
|
||||||
|
await searchDirectory(validPath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Format output
|
||||||
|
const output: string[] = []
|
||||||
|
|
||||||
|
if (matches.length === 0) {
|
||||||
|
output.push('No matches found')
|
||||||
|
} else {
|
||||||
|
// Group matches by file
|
||||||
|
const fileGroups = new Map<string, GrepMatch[]>()
|
||||||
|
matches.forEach((match) => {
|
||||||
|
if (!fileGroups.has(match.file)) {
|
||||||
|
fileGroups.set(match.file, [])
|
||||||
|
}
|
||||||
|
fileGroups.get(match.file)!.push(match)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Format grouped matches - always use absolute paths
|
||||||
|
fileGroups.forEach((fileMatches, filePath) => {
|
||||||
|
output.push(`\n${filePath}:`)
|
||||||
|
fileMatches.forEach((match) => {
|
||||||
|
output.push(` ${match.line}: ${match.content}`)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
if (truncated) {
|
||||||
|
output.push('')
|
||||||
|
output.push(`(Results truncated to ${MAX_GREP_MATCHES} matches. Consider using a more specific pattern or path.)`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: output.join('\n')
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
8
src/main/mcpServers/filesystem/tools/index.ts
Normal file
8
src/main/mcpServers/filesystem/tools/index.ts
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
// Export all tool definitions and handlers
|
||||||
|
export { deleteToolDefinition, handleDeleteTool } from './delete'
|
||||||
|
export { editToolDefinition, handleEditTool } from './edit'
|
||||||
|
export { globToolDefinition, handleGlobTool } from './glob'
|
||||||
|
export { grepToolDefinition, handleGrepTool } from './grep'
|
||||||
|
export { handleLsTool, lsToolDefinition } from './ls'
|
||||||
|
export { handleReadTool, readToolDefinition } from './read'
|
||||||
|
export { handleWriteTool, writeToolDefinition } from './write'
|
||||||
150
src/main/mcpServers/filesystem/tools/ls.ts
Normal file
150
src/main/mcpServers/filesystem/tools/ls.ts
Normal file
@ -0,0 +1,150 @@
|
|||||||
|
import fs from 'fs/promises'
|
||||||
|
import path from 'path'
|
||||||
|
import * as z from 'zod'
|
||||||
|
|
||||||
|
import { MAX_FILES_LIMIT, validatePath } from '../types'
|
||||||
|
|
||||||
|
// Schema definition
|
||||||
|
export const LsToolSchema = z.object({
|
||||||
|
path: z.string().optional().describe('The directory to list (must be absolute path). Defaults to the base directory'),
|
||||||
|
recursive: z.boolean().optional().describe('Whether to list directories recursively (default: false)')
|
||||||
|
})
|
||||||
|
|
||||||
|
// Tool definition with detailed description
|
||||||
|
export const lsToolDefinition = {
|
||||||
|
name: 'ls',
|
||||||
|
description: `Lists files and directories in a specified path.
|
||||||
|
|
||||||
|
- Returns a tree-like structure with icons (📁 directories, 📄 files)
|
||||||
|
- Shows the absolute directory path in the header
|
||||||
|
- Entries are sorted alphabetically with directories first
|
||||||
|
- Can list recursively with recursive=true (up to 5 levels deep)
|
||||||
|
- Common directories (node_modules, dist, .git) are excluded
|
||||||
|
- Hidden files (starting with .) are excluded except .env.example
|
||||||
|
- Results are limited to 100 entries
|
||||||
|
- The path parameter must be an absolute path if specified
|
||||||
|
- If path is not specified, defaults to the base directory`,
|
||||||
|
inputSchema: z.toJSONSchema(LsToolSchema)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handler implementation
|
||||||
|
export async function handleLsTool(args: unknown, baseDir: string) {
|
||||||
|
const parsed = LsToolSchema.safeParse(args)
|
||||||
|
if (!parsed.success) {
|
||||||
|
throw new Error(`Invalid arguments for ls: ${parsed.error}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const targetPath = parsed.data.path || baseDir
|
||||||
|
const validPath = await validatePath(targetPath, baseDir)
|
||||||
|
const recursive = parsed.data.recursive || false
|
||||||
|
|
||||||
|
interface TreeNode {
|
||||||
|
name: string
|
||||||
|
type: 'file' | 'directory'
|
||||||
|
children?: TreeNode[]
|
||||||
|
}
|
||||||
|
|
||||||
|
let fileCount = 0
|
||||||
|
let truncated = false
|
||||||
|
|
||||||
|
async function buildTree(dirPath: string, depth: number = 0): Promise<TreeNode[]> {
|
||||||
|
if (fileCount >= MAX_FILES_LIMIT) {
|
||||||
|
truncated = true
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const entries = await fs.readdir(dirPath, { withFileTypes: true })
|
||||||
|
const nodes: TreeNode[] = []
|
||||||
|
|
||||||
|
// Sort entries: directories first, then files, alphabetically
|
||||||
|
entries.sort((a, b) => {
|
||||||
|
if (a.isDirectory() && !b.isDirectory()) return -1
|
||||||
|
if (!a.isDirectory() && b.isDirectory()) return 1
|
||||||
|
return a.name.localeCompare(b.name)
|
||||||
|
})
|
||||||
|
|
||||||
|
for (const entry of entries) {
|
||||||
|
if (fileCount >= MAX_FILES_LIMIT) {
|
||||||
|
truncated = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip hidden files and common ignore patterns
|
||||||
|
if (entry.name.startsWith('.') && entry.name !== '.env.example') {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if (['node_modules', 'dist', 'build', '__pycache__'].includes(entry.name)) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
fileCount++
|
||||||
|
const node: TreeNode = {
|
||||||
|
name: entry.name,
|
||||||
|
type: entry.isDirectory() ? 'directory' : 'file'
|
||||||
|
}
|
||||||
|
|
||||||
|
if (entry.isDirectory() && recursive && depth < 5) {
|
||||||
|
// Limit depth to prevent infinite recursion
|
||||||
|
const childPath = path.join(dirPath, entry.name)
|
||||||
|
node.children = await buildTree(childPath, depth + 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
nodes.push(node)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nodes
|
||||||
|
} catch (error) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build the tree
|
||||||
|
const tree = await buildTree(validPath)
|
||||||
|
|
||||||
|
// Format as text output
|
||||||
|
function formatTree(nodes: TreeNode[], prefix: string = ''): string[] {
|
||||||
|
const lines: string[] = []
|
||||||
|
|
||||||
|
nodes.forEach((node, index) => {
|
||||||
|
const isLastNode = index === nodes.length - 1
|
||||||
|
const connector = isLastNode ? '└── ' : '├── '
|
||||||
|
const icon = node.type === 'directory' ? '📁 ' : '📄 '
|
||||||
|
|
||||||
|
lines.push(prefix + connector + icon + node.name)
|
||||||
|
|
||||||
|
if (node.children && node.children.length > 0) {
|
||||||
|
const childPrefix = prefix + (isLastNode ? ' ' : '│ ')
|
||||||
|
lines.push(...formatTree(node.children, childPrefix))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return lines
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate output
|
||||||
|
const output: string[] = []
|
||||||
|
output.push(`Directory: ${validPath}`)
|
||||||
|
output.push('')
|
||||||
|
|
||||||
|
if (tree.length === 0) {
|
||||||
|
output.push('(empty directory)')
|
||||||
|
} else {
|
||||||
|
const treeLines = formatTree(tree, '')
|
||||||
|
output.push(...treeLines)
|
||||||
|
|
||||||
|
if (truncated) {
|
||||||
|
output.push('')
|
||||||
|
output.push(`(Results truncated to ${MAX_FILES_LIMIT} files. Consider listing a more specific directory.)`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: output.join('\n')
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
101
src/main/mcpServers/filesystem/tools/read.ts
Normal file
101
src/main/mcpServers/filesystem/tools/read.ts
Normal file
@ -0,0 +1,101 @@
|
|||||||
|
import fs from 'fs/promises'
|
||||||
|
import path from 'path'
|
||||||
|
import * as z from 'zod'
|
||||||
|
|
||||||
|
import { DEFAULT_READ_LIMIT, isBinaryFile, MAX_LINE_LENGTH, validatePath } from '../types'
|
||||||
|
|
||||||
|
// Schema definition
|
||||||
|
export const ReadToolSchema = z.object({
|
||||||
|
file_path: z.string().describe('The path to the file to read'),
|
||||||
|
offset: z.number().optional().describe('The line number to start reading from (1-based)'),
|
||||||
|
limit: z.number().optional().describe('The number of lines to read (defaults to 2000)')
|
||||||
|
})
|
||||||
|
|
||||||
|
// Tool definition with detailed description
|
||||||
|
export const readToolDefinition = {
|
||||||
|
name: 'read',
|
||||||
|
description: `Reads a file from the local filesystem.
|
||||||
|
|
||||||
|
- Assumes this tool can read all files on the machine
|
||||||
|
- The file_path parameter must be an absolute path, not a relative path
|
||||||
|
- By default, reads up to 2000 lines starting from the beginning
|
||||||
|
- You can optionally specify a line offset and limit for long files
|
||||||
|
- Any lines longer than 2000 characters will be truncated
|
||||||
|
- Results are returned with line numbers starting at 1
|
||||||
|
- Binary files are detected and rejected with an error
|
||||||
|
- Empty files return a warning`,
|
||||||
|
inputSchema: z.toJSONSchema(ReadToolSchema)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handler implementation
|
||||||
|
export async function handleReadTool(args: unknown, baseDir: string) {
|
||||||
|
const parsed = ReadToolSchema.safeParse(args)
|
||||||
|
if (!parsed.success) {
|
||||||
|
throw new Error(`Invalid arguments for read: ${parsed.error}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const filePath = parsed.data.file_path
|
||||||
|
const validPath = await validatePath(filePath, baseDir)
|
||||||
|
|
||||||
|
// Check if file exists
|
||||||
|
try {
|
||||||
|
const stats = await fs.stat(validPath)
|
||||||
|
if (!stats.isFile()) {
|
||||||
|
throw new Error(`Path is not a file: ${filePath}`)
|
||||||
|
}
|
||||||
|
} catch (error: any) {
|
||||||
|
if (error.code === 'ENOENT') {
|
||||||
|
throw new Error(`File not found: ${filePath}`)
|
||||||
|
}
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if file is binary
|
||||||
|
if (await isBinaryFile(validPath)) {
|
||||||
|
throw new Error(`Cannot read binary file: ${filePath}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read file content
|
||||||
|
const content = await fs.readFile(validPath, 'utf-8')
|
||||||
|
const lines = content.split('\n')
|
||||||
|
|
||||||
|
// Apply offset and limit
|
||||||
|
const offset = (parsed.data.offset || 1) - 1 // Convert to 0-based
|
||||||
|
const limit = parsed.data.limit || DEFAULT_READ_LIMIT
|
||||||
|
|
||||||
|
if (offset < 0 || offset >= lines.length) {
|
||||||
|
throw new Error(`Invalid offset: ${offset + 1}. File has ${lines.length} lines.`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const selectedLines = lines.slice(offset, offset + limit)
|
||||||
|
|
||||||
|
// Format output with line numbers and truncate long lines
|
||||||
|
const output: string[] = []
|
||||||
|
const relativePath = path.relative(baseDir, validPath)
|
||||||
|
|
||||||
|
output.push(`File: ${relativePath}`)
|
||||||
|
if (offset > 0 || limit < lines.length) {
|
||||||
|
output.push(`Lines ${offset + 1} to ${Math.min(offset + limit, lines.length)} of ${lines.length}`)
|
||||||
|
}
|
||||||
|
output.push('')
|
||||||
|
|
||||||
|
selectedLines.forEach((line, index) => {
|
||||||
|
const lineNumber = offset + index + 1
|
||||||
|
const truncatedLine = line.length > MAX_LINE_LENGTH ? line.substring(0, MAX_LINE_LENGTH) + '...' : line
|
||||||
|
output.push(`${lineNumber.toString().padStart(6)}\t${truncatedLine}`)
|
||||||
|
})
|
||||||
|
|
||||||
|
if (offset + limit < lines.length) {
|
||||||
|
output.push('')
|
||||||
|
output.push(`(${lines.length - (offset + limit)} more lines not shown)`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: output.join('\n')
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
83
src/main/mcpServers/filesystem/tools/write.ts
Normal file
83
src/main/mcpServers/filesystem/tools/write.ts
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
import fs from 'fs/promises'
|
||||||
|
import path from 'path'
|
||||||
|
import * as z from 'zod'
|
||||||
|
|
||||||
|
import { logger, validatePath } from '../types'
|
||||||
|
|
||||||
|
// Schema definition
|
||||||
|
export const WriteToolSchema = z.object({
|
||||||
|
file_path: z.string().describe('The path to the file to write'),
|
||||||
|
content: z.string().describe('The content to write to the file')
|
||||||
|
})
|
||||||
|
|
||||||
|
// Tool definition with detailed description
|
||||||
|
export const writeToolDefinition = {
|
||||||
|
name: 'write',
|
||||||
|
description: `Writes a file to the local filesystem.
|
||||||
|
|
||||||
|
- This tool will overwrite the existing file if one exists at the path
|
||||||
|
- You MUST use the read tool first to understand what you're overwriting
|
||||||
|
- ALWAYS prefer using the 'edit' tool for existing files
|
||||||
|
- NEVER proactively create documentation files unless explicitly requested
|
||||||
|
- Parent directories will be created automatically if they don't exist
|
||||||
|
- The file_path must be an absolute path, not a relative path`,
|
||||||
|
inputSchema: z.toJSONSchema(WriteToolSchema)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handler implementation
|
||||||
|
export async function handleWriteTool(args: unknown, baseDir: string) {
|
||||||
|
const parsed = WriteToolSchema.safeParse(args)
|
||||||
|
if (!parsed.success) {
|
||||||
|
throw new Error(`Invalid arguments for write: ${parsed.error}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const filePath = parsed.data.file_path
|
||||||
|
const validPath = await validatePath(filePath, baseDir)
|
||||||
|
|
||||||
|
// Create parent directory if it doesn't exist
|
||||||
|
const parentDir = path.dirname(validPath)
|
||||||
|
try {
|
||||||
|
await fs.mkdir(parentDir, { recursive: true })
|
||||||
|
} catch (error: any) {
|
||||||
|
if (error.code !== 'EEXIST') {
|
||||||
|
throw new Error(`Failed to create parent directory: ${error.message}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if file exists (for logging)
|
||||||
|
let isOverwrite = false
|
||||||
|
try {
|
||||||
|
await fs.stat(validPath)
|
||||||
|
isOverwrite = true
|
||||||
|
} catch {
|
||||||
|
// File doesn't exist, that's fine
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write the file
|
||||||
|
try {
|
||||||
|
await fs.writeFile(validPath, parsed.data.content, 'utf-8')
|
||||||
|
} catch (error: any) {
|
||||||
|
throw new Error(`Failed to write file: ${error.message}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log the operation
|
||||||
|
logger.info('File written', {
|
||||||
|
path: validPath,
|
||||||
|
overwrite: isOverwrite,
|
||||||
|
size: parsed.data.content.length
|
||||||
|
})
|
||||||
|
|
||||||
|
// Format output
|
||||||
|
const relativePath = path.relative(baseDir, validPath)
|
||||||
|
const action = isOverwrite ? 'Updated' : 'Created'
|
||||||
|
const lines = parsed.data.content.split('\n').length
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: `${action} file: ${relativePath}\n` + `Size: ${parsed.data.content.length} bytes\n` + `Lines: ${lines}`
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
627
src/main/mcpServers/filesystem/types.ts
Normal file
627
src/main/mcpServers/filesystem/types.ts
Normal file
@ -0,0 +1,627 @@
|
|||||||
|
import { loggerService } from '@logger'
|
||||||
|
import { isMac, isWin } from '@main/constant'
|
||||||
|
import { spawn } from 'child_process'
|
||||||
|
import fs from 'fs/promises'
|
||||||
|
import os from 'os'
|
||||||
|
import path from 'path'
|
||||||
|
|
||||||
|
export const logger = loggerService.withContext('MCP:FileSystemServer')
|
||||||
|
|
||||||
|
// Constants
|
||||||
|
export const MAX_LINE_LENGTH = 2000
|
||||||
|
export const DEFAULT_READ_LIMIT = 2000
|
||||||
|
export const MAX_FILES_LIMIT = 100
|
||||||
|
export const MAX_GREP_MATCHES = 100
|
||||||
|
|
||||||
|
// Common types
|
||||||
|
export interface FileInfo {
|
||||||
|
path: string
|
||||||
|
type: 'file' | 'directory'
|
||||||
|
size?: number
|
||||||
|
modified?: Date
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GrepMatch {
|
||||||
|
file: string
|
||||||
|
line: number
|
||||||
|
content: string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Utility functions for path handling
|
||||||
|
export function normalizePath(p: string): string {
|
||||||
|
return path.normalize(p)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function expandHome(filepath: string): string {
|
||||||
|
if (filepath.startsWith('~/') || filepath === '~') {
|
||||||
|
return path.join(os.homedir(), filepath.slice(1))
|
||||||
|
}
|
||||||
|
return filepath
|
||||||
|
}
|
||||||
|
|
||||||
|
// Security validation
|
||||||
|
export async function validatePath(requestedPath: string, baseDir?: string): Promise<string> {
|
||||||
|
const expandedPath = expandHome(requestedPath)
|
||||||
|
const root = baseDir ?? process.cwd()
|
||||||
|
const absolute = path.isAbsolute(expandedPath) ? path.resolve(expandedPath) : path.resolve(root, expandedPath)
|
||||||
|
|
||||||
|
// Handle symlinks by checking their real path
|
||||||
|
try {
|
||||||
|
const realPath = await fs.realpath(absolute)
|
||||||
|
return normalizePath(realPath)
|
||||||
|
} catch (error) {
|
||||||
|
// For new files that don't exist yet, verify parent directory
|
||||||
|
const parentDir = path.dirname(absolute)
|
||||||
|
try {
|
||||||
|
const realParentPath = await fs.realpath(parentDir)
|
||||||
|
normalizePath(realParentPath)
|
||||||
|
return normalizePath(absolute)
|
||||||
|
} catch {
|
||||||
|
return normalizePath(absolute)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Edit Tool Utilities - Fuzzy matching replacers from opencode
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
export type Replacer = (content: string, find: string) => Generator<string, void, unknown>
|
||||||
|
|
||||||
|
// Similarity thresholds for block anchor fallback matching
|
||||||
|
const SINGLE_CANDIDATE_SIMILARITY_THRESHOLD = 0.0
|
||||||
|
const MULTIPLE_CANDIDATES_SIMILARITY_THRESHOLD = 0.3
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Levenshtein distance algorithm implementation
|
||||||
|
*/
|
||||||
|
function levenshtein(a: string, b: string): number {
|
||||||
|
if (a === '' || b === '') {
|
||||||
|
return Math.max(a.length, b.length)
|
||||||
|
}
|
||||||
|
const matrix = Array.from({ length: a.length + 1 }, (_, i) =>
|
||||||
|
Array.from({ length: b.length + 1 }, (_, j) => (i === 0 ? j : j === 0 ? i : 0))
|
||||||
|
)
|
||||||
|
|
||||||
|
for (let i = 1; i <= a.length; i++) {
|
||||||
|
for (let j = 1; j <= b.length; j++) {
|
||||||
|
const cost = a[i - 1] === b[j - 1] ? 0 : 1
|
||||||
|
matrix[i][j] = Math.min(matrix[i - 1][j] + 1, matrix[i][j - 1] + 1, matrix[i - 1][j - 1] + cost)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return matrix[a.length][b.length]
|
||||||
|
}
|
||||||
|
|
||||||
|
export const SimpleReplacer: Replacer = function* (_content, find) {
|
||||||
|
yield find
|
||||||
|
}
|
||||||
|
|
||||||
|
export const LineTrimmedReplacer: Replacer = function* (content, find) {
|
||||||
|
const originalLines = content.split('\n')
|
||||||
|
const searchLines = find.split('\n')
|
||||||
|
|
||||||
|
if (searchLines[searchLines.length - 1] === '') {
|
||||||
|
searchLines.pop()
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 0; i <= originalLines.length - searchLines.length; i++) {
|
||||||
|
let matches = true
|
||||||
|
|
||||||
|
for (let j = 0; j < searchLines.length; j++) {
|
||||||
|
const originalTrimmed = originalLines[i + j].trim()
|
||||||
|
const searchTrimmed = searchLines[j].trim()
|
||||||
|
|
||||||
|
if (originalTrimmed !== searchTrimmed) {
|
||||||
|
matches = false
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (matches) {
|
||||||
|
let matchStartIndex = 0
|
||||||
|
for (let k = 0; k < i; k++) {
|
||||||
|
matchStartIndex += originalLines[k].length + 1
|
||||||
|
}
|
||||||
|
|
||||||
|
let matchEndIndex = matchStartIndex
|
||||||
|
for (let k = 0; k < searchLines.length; k++) {
|
||||||
|
matchEndIndex += originalLines[i + k].length
|
||||||
|
if (k < searchLines.length - 1) {
|
||||||
|
matchEndIndex += 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
yield content.substring(matchStartIndex, matchEndIndex)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const BlockAnchorReplacer: Replacer = function* (content, find) {
|
||||||
|
const originalLines = content.split('\n')
|
||||||
|
const searchLines = find.split('\n')
|
||||||
|
|
||||||
|
if (searchLines.length < 3) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (searchLines[searchLines.length - 1] === '') {
|
||||||
|
searchLines.pop()
|
||||||
|
}
|
||||||
|
|
||||||
|
const firstLineSearch = searchLines[0].trim()
|
||||||
|
const lastLineSearch = searchLines[searchLines.length - 1].trim()
|
||||||
|
const searchBlockSize = searchLines.length
|
||||||
|
|
||||||
|
const candidates: Array<{ startLine: number; endLine: number }> = []
|
||||||
|
for (let i = 0; i < originalLines.length; i++) {
|
||||||
|
if (originalLines[i].trim() !== firstLineSearch) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let j = i + 2; j < originalLines.length; j++) {
|
||||||
|
if (originalLines[j].trim() === lastLineSearch) {
|
||||||
|
candidates.push({ startLine: i, endLine: j })
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (candidates.length === 0) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (candidates.length === 1) {
|
||||||
|
const { startLine, endLine } = candidates[0]
|
||||||
|
const actualBlockSize = endLine - startLine + 1
|
||||||
|
|
||||||
|
let similarity = 0
|
||||||
|
const linesToCheck = Math.min(searchBlockSize - 2, actualBlockSize - 2)
|
||||||
|
|
||||||
|
if (linesToCheck > 0) {
|
||||||
|
for (let j = 1; j < searchBlockSize - 1 && j < actualBlockSize - 1; j++) {
|
||||||
|
const originalLine = originalLines[startLine + j].trim()
|
||||||
|
const searchLine = searchLines[j].trim()
|
||||||
|
const maxLen = Math.max(originalLine.length, searchLine.length)
|
||||||
|
if (maxLen === 0) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const distance = levenshtein(originalLine, searchLine)
|
||||||
|
similarity += (1 - distance / maxLen) / linesToCheck
|
||||||
|
|
||||||
|
if (similarity >= SINGLE_CANDIDATE_SIMILARITY_THRESHOLD) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
similarity = 1.0
|
||||||
|
}
|
||||||
|
|
||||||
|
if (similarity >= SINGLE_CANDIDATE_SIMILARITY_THRESHOLD) {
|
||||||
|
let matchStartIndex = 0
|
||||||
|
for (let k = 0; k < startLine; k++) {
|
||||||
|
matchStartIndex += originalLines[k].length + 1
|
||||||
|
}
|
||||||
|
let matchEndIndex = matchStartIndex
|
||||||
|
for (let k = startLine; k <= endLine; k++) {
|
||||||
|
matchEndIndex += originalLines[k].length
|
||||||
|
if (k < endLine) {
|
||||||
|
matchEndIndex += 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
yield content.substring(matchStartIndex, matchEndIndex)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
let bestMatch: { startLine: number; endLine: number } | null = null
|
||||||
|
let maxSimilarity = -1
|
||||||
|
|
||||||
|
for (const candidate of candidates) {
|
||||||
|
const { startLine, endLine } = candidate
|
||||||
|
const actualBlockSize = endLine - startLine + 1
|
||||||
|
|
||||||
|
let similarity = 0
|
||||||
|
const linesToCheck = Math.min(searchBlockSize - 2, actualBlockSize - 2)
|
||||||
|
|
||||||
|
if (linesToCheck > 0) {
|
||||||
|
for (let j = 1; j < searchBlockSize - 1 && j < actualBlockSize - 1; j++) {
|
||||||
|
const originalLine = originalLines[startLine + j].trim()
|
||||||
|
const searchLine = searchLines[j].trim()
|
||||||
|
const maxLen = Math.max(originalLine.length, searchLine.length)
|
||||||
|
if (maxLen === 0) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const distance = levenshtein(originalLine, searchLine)
|
||||||
|
similarity += 1 - distance / maxLen
|
||||||
|
}
|
||||||
|
similarity /= linesToCheck
|
||||||
|
} else {
|
||||||
|
similarity = 1.0
|
||||||
|
}
|
||||||
|
|
||||||
|
if (similarity > maxSimilarity) {
|
||||||
|
maxSimilarity = similarity
|
||||||
|
bestMatch = candidate
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (maxSimilarity >= MULTIPLE_CANDIDATES_SIMILARITY_THRESHOLD && bestMatch) {
|
||||||
|
const { startLine, endLine } = bestMatch
|
||||||
|
let matchStartIndex = 0
|
||||||
|
for (let k = 0; k < startLine; k++) {
|
||||||
|
matchStartIndex += originalLines[k].length + 1
|
||||||
|
}
|
||||||
|
let matchEndIndex = matchStartIndex
|
||||||
|
for (let k = startLine; k <= endLine; k++) {
|
||||||
|
matchEndIndex += originalLines[k].length
|
||||||
|
if (k < endLine) {
|
||||||
|
matchEndIndex += 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
yield content.substring(matchStartIndex, matchEndIndex)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const WhitespaceNormalizedReplacer: Replacer = function* (content, find) {
|
||||||
|
const normalizeWhitespace = (text: string) => text.replace(/\s+/g, ' ').trim()
|
||||||
|
const normalizedFind = normalizeWhitespace(find)
|
||||||
|
|
||||||
|
const lines = content.split('\n')
|
||||||
|
for (let i = 0; i < lines.length; i++) {
|
||||||
|
const line = lines[i]
|
||||||
|
if (normalizeWhitespace(line) === normalizedFind) {
|
||||||
|
yield line
|
||||||
|
} else {
|
||||||
|
const normalizedLine = normalizeWhitespace(line)
|
||||||
|
if (normalizedLine.includes(normalizedFind)) {
|
||||||
|
const words = find.trim().split(/\s+/)
|
||||||
|
if (words.length > 0) {
|
||||||
|
const pattern = words.map((word) => word.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')).join('\\s+')
|
||||||
|
try {
|
||||||
|
const regex = new RegExp(pattern)
|
||||||
|
const match = line.match(regex)
|
||||||
|
if (match) {
|
||||||
|
yield match[0]
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Invalid regex pattern, skip
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const findLines = find.split('\n')
|
||||||
|
if (findLines.length > 1) {
|
||||||
|
for (let i = 0; i <= lines.length - findLines.length; i++) {
|
||||||
|
const block = lines.slice(i, i + findLines.length)
|
||||||
|
if (normalizeWhitespace(block.join('\n')) === normalizedFind) {
|
||||||
|
yield block.join('\n')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const IndentationFlexibleReplacer: Replacer = function* (content, find) {
|
||||||
|
const removeIndentation = (text: string) => {
|
||||||
|
const lines = text.split('\n')
|
||||||
|
const nonEmptyLines = lines.filter((line) => line.trim().length > 0)
|
||||||
|
if (nonEmptyLines.length === 0) return text
|
||||||
|
|
||||||
|
const minIndent = Math.min(
|
||||||
|
...nonEmptyLines.map((line) => {
|
||||||
|
const match = line.match(/^(\s*)/)
|
||||||
|
return match ? match[1].length : 0
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
return lines.map((line) => (line.trim().length === 0 ? line : line.slice(minIndent))).join('\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalizedFind = removeIndentation(find)
|
||||||
|
const contentLines = content.split('\n')
|
||||||
|
const findLines = find.split('\n')
|
||||||
|
|
||||||
|
for (let i = 0; i <= contentLines.length - findLines.length; i++) {
|
||||||
|
const block = contentLines.slice(i, i + findLines.length).join('\n')
|
||||||
|
if (removeIndentation(block) === normalizedFind) {
|
||||||
|
yield block
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const EscapeNormalizedReplacer: Replacer = function* (content, find) {
|
||||||
|
const unescapeString = (str: string): string => {
|
||||||
|
return str.replace(/\\(n|t|r|'|"|`|\\|\n|\$)/g, (match, capturedChar) => {
|
||||||
|
switch (capturedChar) {
|
||||||
|
case 'n':
|
||||||
|
return '\n'
|
||||||
|
case 't':
|
||||||
|
return '\t'
|
||||||
|
case 'r':
|
||||||
|
return '\r'
|
||||||
|
case "'":
|
||||||
|
return "'"
|
||||||
|
case '"':
|
||||||
|
return '"'
|
||||||
|
case '`':
|
||||||
|
return '`'
|
||||||
|
case '\\':
|
||||||
|
return '\\'
|
||||||
|
case '\n':
|
||||||
|
return '\n'
|
||||||
|
case '$':
|
||||||
|
return '$'
|
||||||
|
default:
|
||||||
|
return match
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const unescapedFind = unescapeString(find)
|
||||||
|
|
||||||
|
if (content.includes(unescapedFind)) {
|
||||||
|
yield unescapedFind
|
||||||
|
}
|
||||||
|
|
||||||
|
const lines = content.split('\n')
|
||||||
|
const findLines = unescapedFind.split('\n')
|
||||||
|
|
||||||
|
for (let i = 0; i <= lines.length - findLines.length; i++) {
|
||||||
|
const block = lines.slice(i, i + findLines.length).join('\n')
|
||||||
|
const unescapedBlock = unescapeString(block)
|
||||||
|
|
||||||
|
if (unescapedBlock === unescapedFind) {
|
||||||
|
yield block
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const TrimmedBoundaryReplacer: Replacer = function* (content, find) {
|
||||||
|
const trimmedFind = find.trim()
|
||||||
|
|
||||||
|
if (trimmedFind === find) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (content.includes(trimmedFind)) {
|
||||||
|
yield trimmedFind
|
||||||
|
}
|
||||||
|
|
||||||
|
const lines = content.split('\n')
|
||||||
|
const findLines = find.split('\n')
|
||||||
|
|
||||||
|
for (let i = 0; i <= lines.length - findLines.length; i++) {
|
||||||
|
const block = lines.slice(i, i + findLines.length).join('\n')
|
||||||
|
|
||||||
|
if (block.trim() === trimmedFind) {
|
||||||
|
yield block
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const ContextAwareReplacer: Replacer = function* (content, find) {
|
||||||
|
const findLines = find.split('\n')
|
||||||
|
if (findLines.length < 3) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (findLines[findLines.length - 1] === '') {
|
||||||
|
findLines.pop()
|
||||||
|
}
|
||||||
|
|
||||||
|
const contentLines = content.split('\n')
|
||||||
|
|
||||||
|
const firstLine = findLines[0].trim()
|
||||||
|
const lastLine = findLines[findLines.length - 1].trim()
|
||||||
|
|
||||||
|
for (let i = 0; i < contentLines.length; i++) {
|
||||||
|
if (contentLines[i].trim() !== firstLine) continue
|
||||||
|
|
||||||
|
for (let j = i + 2; j < contentLines.length; j++) {
|
||||||
|
if (contentLines[j].trim() === lastLine) {
|
||||||
|
const blockLines = contentLines.slice(i, j + 1)
|
||||||
|
const block = blockLines.join('\n')
|
||||||
|
|
||||||
|
if (blockLines.length === findLines.length) {
|
||||||
|
let matchingLines = 0
|
||||||
|
let totalNonEmptyLines = 0
|
||||||
|
|
||||||
|
for (let k = 1; k < blockLines.length - 1; k++) {
|
||||||
|
const blockLine = blockLines[k].trim()
|
||||||
|
const findLine = findLines[k].trim()
|
||||||
|
|
||||||
|
if (blockLine.length > 0 || findLine.length > 0) {
|
||||||
|
totalNonEmptyLines++
|
||||||
|
if (blockLine === findLine) {
|
||||||
|
matchingLines++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (totalNonEmptyLines === 0 || matchingLines / totalNonEmptyLines >= 0.5) {
|
||||||
|
yield block
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const MultiOccurrenceReplacer: Replacer = function* (content, find) {
|
||||||
|
let startIndex = 0
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
const index = content.indexOf(find, startIndex)
|
||||||
|
if (index === -1) break
|
||||||
|
|
||||||
|
yield find
|
||||||
|
startIndex = index + find.length
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* All replacers in order of specificity
|
||||||
|
*/
|
||||||
|
export const ALL_REPLACERS: Replacer[] = [
|
||||||
|
SimpleReplacer,
|
||||||
|
LineTrimmedReplacer,
|
||||||
|
BlockAnchorReplacer,
|
||||||
|
WhitespaceNormalizedReplacer,
|
||||||
|
IndentationFlexibleReplacer,
|
||||||
|
EscapeNormalizedReplacer,
|
||||||
|
TrimmedBoundaryReplacer,
|
||||||
|
ContextAwareReplacer,
|
||||||
|
MultiOccurrenceReplacer
|
||||||
|
]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Replace oldString with newString in content using fuzzy matching
|
||||||
|
*/
|
||||||
|
export function replaceWithFuzzyMatch(
|
||||||
|
content: string,
|
||||||
|
oldString: string,
|
||||||
|
newString: string,
|
||||||
|
replaceAll = false
|
||||||
|
): string {
|
||||||
|
if (oldString === newString) {
|
||||||
|
throw new Error('old_string and new_string must be different')
|
||||||
|
}
|
||||||
|
|
||||||
|
let notFound = true
|
||||||
|
|
||||||
|
for (const replacer of ALL_REPLACERS) {
|
||||||
|
for (const search of replacer(content, oldString)) {
|
||||||
|
const index = content.indexOf(search)
|
||||||
|
if (index === -1) continue
|
||||||
|
notFound = false
|
||||||
|
if (replaceAll) {
|
||||||
|
return content.replaceAll(search, newString)
|
||||||
|
}
|
||||||
|
const lastIndex = content.lastIndexOf(search)
|
||||||
|
if (index !== lastIndex) continue
|
||||||
|
return content.substring(0, index) + newString + content.substring(index + search.length)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (notFound) {
|
||||||
|
throw new Error('old_string not found in content')
|
||||||
|
}
|
||||||
|
throw new Error(
|
||||||
|
'Found multiple matches for old_string. Provide more surrounding lines in old_string to identify the correct match.'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Binary File Detection
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// Check if a file is likely binary
|
||||||
|
export async function isBinaryFile(filePath: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const buffer = Buffer.alloc(4096)
|
||||||
|
const fd = await fs.open(filePath, 'r')
|
||||||
|
const { bytesRead } = await fd.read(buffer, 0, buffer.length, 0)
|
||||||
|
await fd.close()
|
||||||
|
|
||||||
|
if (bytesRead === 0) return false
|
||||||
|
|
||||||
|
const view = buffer.subarray(0, bytesRead)
|
||||||
|
|
||||||
|
let zeroBytes = 0
|
||||||
|
let evenZeros = 0
|
||||||
|
let oddZeros = 0
|
||||||
|
let nonPrintable = 0
|
||||||
|
|
||||||
|
for (let i = 0; i < view.length; i++) {
|
||||||
|
const b = view[i]
|
||||||
|
|
||||||
|
if (b === 0) {
|
||||||
|
zeroBytes++
|
||||||
|
if (i % 2 === 0) evenZeros++
|
||||||
|
else oddZeros++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// treat common whitespace as printable
|
||||||
|
if (b === 9 || b === 10 || b === 13) continue
|
||||||
|
|
||||||
|
// basic ASCII printable range
|
||||||
|
if (b >= 32 && b <= 126) continue
|
||||||
|
|
||||||
|
// bytes >= 128 are likely part of UTF-8 sequences; count as printable
|
||||||
|
if (b >= 128) continue
|
||||||
|
|
||||||
|
nonPrintable++
|
||||||
|
}
|
||||||
|
|
||||||
|
// If there are lots of null bytes, it's probably binary unless it looks like UTF-16 text.
|
||||||
|
if (zeroBytes > 0) {
|
||||||
|
const evenSlots = Math.ceil(view.length / 2)
|
||||||
|
const oddSlots = Math.floor(view.length / 2)
|
||||||
|
const evenZeroRatio = evenSlots > 0 ? evenZeros / evenSlots : 0
|
||||||
|
const oddZeroRatio = oddSlots > 0 ? oddZeros / oddSlots : 0
|
||||||
|
|
||||||
|
// UTF-16LE/BE tends to have zeros on every other byte.
|
||||||
|
if (evenZeroRatio > 0.7 || oddZeroRatio > 0.7) return false
|
||||||
|
|
||||||
|
if (zeroBytes / view.length > 0.05) return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Heuristic: too many non-printable bytes => binary.
|
||||||
|
return nonPrintable / view.length > 0.3
|
||||||
|
} catch {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Ripgrep Utilities
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
export interface RipgrepResult {
|
||||||
|
ok: boolean
|
||||||
|
stdout: string
|
||||||
|
exitCode: number | null
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getRipgrepAddonPath(): string {
|
||||||
|
const pkgJsonPath = require.resolve('@anthropic-ai/claude-agent-sdk/package.json')
|
||||||
|
const pkgRoot = path.dirname(pkgJsonPath)
|
||||||
|
const platform = isMac ? 'darwin' : isWin ? 'win32' : 'linux'
|
||||||
|
const arch = process.arch === 'arm64' ? 'arm64' : 'x64'
|
||||||
|
return path.join(pkgRoot, 'vendor', 'ripgrep', `${arch}-${platform}`, 'ripgrep.node')
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function runRipgrep(args: string[]): Promise<RipgrepResult> {
|
||||||
|
const addonPath = getRipgrepAddonPath()
|
||||||
|
const childScript = `const { ripgrepMain } = require(process.env.RIPGREP_ADDON_PATH); process.exit(ripgrepMain(process.argv.slice(1)));`
|
||||||
|
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
const child = spawn(process.execPath, ['--eval', childScript, 'rg', ...args], {
|
||||||
|
cwd: process.cwd(),
|
||||||
|
env: {
|
||||||
|
...process.env,
|
||||||
|
ELECTRON_RUN_AS_NODE: '1',
|
||||||
|
RIPGREP_ADDON_PATH: addonPath
|
||||||
|
},
|
||||||
|
stdio: ['ignore', 'pipe', 'pipe']
|
||||||
|
})
|
||||||
|
|
||||||
|
let stdout = ''
|
||||||
|
|
||||||
|
child.stdout?.on('data', (chunk) => {
|
||||||
|
stdout += chunk.toString('utf-8')
|
||||||
|
})
|
||||||
|
|
||||||
|
child.on('error', () => {
|
||||||
|
resolve({ ok: false, stdout: '', exitCode: null })
|
||||||
|
})
|
||||||
|
|
||||||
|
child.on('close', (code) => {
|
||||||
|
resolve({ ok: true, stdout, exitCode: code })
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
@ -32,7 +32,8 @@ export enum ConfigKeys {
|
|||||||
Proxy = 'proxy',
|
Proxy = 'proxy',
|
||||||
EnableDeveloperMode = 'enableDeveloperMode',
|
EnableDeveloperMode = 'enableDeveloperMode',
|
||||||
ClientId = 'clientId',
|
ClientId = 'clientId',
|
||||||
GitBashPath = 'gitBashPath'
|
GitBashPath = 'gitBashPath',
|
||||||
|
GitBashPathSource = 'gitBashPathSource' // 'manual' | 'auto' | null
|
||||||
}
|
}
|
||||||
|
|
||||||
export class ConfigManager {
|
export class ConfigManager {
|
||||||
|
|||||||
@ -15,8 +15,8 @@ import { query } from '@anthropic-ai/claude-agent-sdk'
|
|||||||
import { loggerService } from '@logger'
|
import { loggerService } from '@logger'
|
||||||
import { config as apiConfigService } from '@main/apiServer/config'
|
import { config as apiConfigService } from '@main/apiServer/config'
|
||||||
import { validateModelId } from '@main/apiServer/utils'
|
import { validateModelId } from '@main/apiServer/utils'
|
||||||
import { ConfigKeys, configManager } from '@main/services/ConfigManager'
|
import { isWin } from '@main/constant'
|
||||||
import { validateGitBashPath } from '@main/utils/process'
|
import { autoDiscoverGitBash } from '@main/utils/process'
|
||||||
import getLoginShellEnvironment from '@main/utils/shell-env'
|
import getLoginShellEnvironment from '@main/utils/shell-env'
|
||||||
import { app } from 'electron'
|
import { app } from 'electron'
|
||||||
|
|
||||||
@ -109,7 +109,8 @@ class ClaudeCodeService implements AgentServiceInterface {
|
|||||||
Object.entries(loginShellEnv).filter(([key]) => !key.toLowerCase().endsWith('_proxy'))
|
Object.entries(loginShellEnv).filter(([key]) => !key.toLowerCase().endsWith('_proxy'))
|
||||||
) as Record<string, string>
|
) as Record<string, string>
|
||||||
|
|
||||||
const customGitBashPath = validateGitBashPath(configManager.get(ConfigKeys.GitBashPath) as string | undefined)
|
// Auto-discover Git Bash path on Windows (already logs internally)
|
||||||
|
const customGitBashPath = isWin ? autoDiscoverGitBash() : null
|
||||||
|
|
||||||
const env = {
|
const env = {
|
||||||
...loginShellEnvWithoutProxies,
|
...loginShellEnvWithoutProxies,
|
||||||
|
|||||||
@ -1,9 +1,21 @@
|
|||||||
|
import { configManager } from '@main/services/ConfigManager'
|
||||||
import { execFileSync } from 'child_process'
|
import { execFileSync } from 'child_process'
|
||||||
import fs from 'fs'
|
import fs from 'fs'
|
||||||
import path from 'path'
|
import path from 'path'
|
||||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||||
|
|
||||||
import { findExecutable, findGitBash, validateGitBashPath } from '../process'
|
import { autoDiscoverGitBash, findExecutable, findGitBash, validateGitBashPath } from '../process'
|
||||||
|
|
||||||
|
// Mock configManager
|
||||||
|
vi.mock('@main/services/ConfigManager', () => ({
|
||||||
|
ConfigKeys: {
|
||||||
|
GitBashPath: 'gitBashPath'
|
||||||
|
},
|
||||||
|
configManager: {
|
||||||
|
get: vi.fn(),
|
||||||
|
set: vi.fn()
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
|
||||||
// Mock dependencies
|
// Mock dependencies
|
||||||
vi.mock('child_process')
|
vi.mock('child_process')
|
||||||
@ -695,4 +707,284 @@ describe.skipIf(process.platform !== 'win32')('process utilities', () => {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe('autoDiscoverGitBash', () => {
|
||||||
|
const originalEnvVar = process.env.CLAUDE_CODE_GIT_BASH_PATH
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.mocked(configManager.get).mockReset()
|
||||||
|
vi.mocked(configManager.set).mockReset()
|
||||||
|
delete process.env.CLAUDE_CODE_GIT_BASH_PATH
|
||||||
|
})
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
// Restore original environment variable
|
||||||
|
if (originalEnvVar !== undefined) {
|
||||||
|
process.env.CLAUDE_CODE_GIT_BASH_PATH = originalEnvVar
|
||||||
|
} else {
|
||||||
|
delete process.env.CLAUDE_CODE_GIT_BASH_PATH
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper to mock fs.existsSync with a set of valid paths
|
||||||
|
*/
|
||||||
|
const mockExistingPaths = (...validPaths: string[]) => {
|
||||||
|
vi.mocked(fs.existsSync).mockImplementation((p) => validPaths.includes(p as string))
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('with no existing config path', () => {
|
||||||
|
it('should discover and persist Git Bash path when not configured', () => {
|
||||||
|
const bashPath = 'C:\\Program Files\\Git\\bin\\bash.exe'
|
||||||
|
const gitPath = 'C:\\Program Files\\Git\\cmd\\git.exe'
|
||||||
|
|
||||||
|
vi.mocked(configManager.get).mockReturnValue(undefined)
|
||||||
|
process.env.ProgramFiles = 'C:\\Program Files'
|
||||||
|
mockExistingPaths(gitPath, bashPath)
|
||||||
|
|
||||||
|
const result = autoDiscoverGitBash()
|
||||||
|
|
||||||
|
expect(result).toBe(bashPath)
|
||||||
|
expect(configManager.set).toHaveBeenCalledWith('gitBashPath', bashPath)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return null and not persist when Git Bash is not found', () => {
|
||||||
|
vi.mocked(configManager.get).mockReturnValue(undefined)
|
||||||
|
vi.mocked(fs.existsSync).mockReturnValue(false)
|
||||||
|
vi.mocked(execFileSync).mockImplementation(() => {
|
||||||
|
throw new Error('Not found')
|
||||||
|
})
|
||||||
|
|
||||||
|
const result = autoDiscoverGitBash()
|
||||||
|
|
||||||
|
expect(result).toBeNull()
|
||||||
|
expect(configManager.set).not.toHaveBeenCalled()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('environment variable precedence', () => {
|
||||||
|
it('should use env var over valid config path', () => {
|
||||||
|
const envPath = 'C:\\EnvGit\\bin\\bash.exe'
|
||||||
|
const configPath = 'C:\\ConfigGit\\bin\\bash.exe'
|
||||||
|
|
||||||
|
process.env.CLAUDE_CODE_GIT_BASH_PATH = envPath
|
||||||
|
vi.mocked(configManager.get).mockReturnValue(configPath)
|
||||||
|
mockExistingPaths(envPath, configPath)
|
||||||
|
|
||||||
|
const result = autoDiscoverGitBash()
|
||||||
|
|
||||||
|
// Env var should take precedence
|
||||||
|
expect(result).toBe(envPath)
|
||||||
|
// Should not persist env var path (it's a runtime override)
|
||||||
|
expect(configManager.set).not.toHaveBeenCalled()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should fall back to config path when env var is invalid', () => {
|
||||||
|
const envPath = 'C:\\Invalid\\bash.exe'
|
||||||
|
const configPath = 'C:\\ConfigGit\\bin\\bash.exe'
|
||||||
|
|
||||||
|
process.env.CLAUDE_CODE_GIT_BASH_PATH = envPath
|
||||||
|
vi.mocked(configManager.get).mockReturnValue(configPath)
|
||||||
|
// Env path is invalid (doesn't exist), only config path exists
|
||||||
|
mockExistingPaths(configPath)
|
||||||
|
|
||||||
|
const result = autoDiscoverGitBash()
|
||||||
|
|
||||||
|
// Should fall back to config path
|
||||||
|
expect(result).toBe(configPath)
|
||||||
|
expect(configManager.set).not.toHaveBeenCalled()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should fall back to auto-discovery when both env var and config are invalid', () => {
|
||||||
|
const envPath = 'C:\\InvalidEnv\\bash.exe'
|
||||||
|
const configPath = 'C:\\InvalidConfig\\bash.exe'
|
||||||
|
const discoveredPath = 'C:\\Program Files\\Git\\bin\\bash.exe'
|
||||||
|
const gitPath = 'C:\\Program Files\\Git\\cmd\\git.exe'
|
||||||
|
|
||||||
|
process.env.CLAUDE_CODE_GIT_BASH_PATH = envPath
|
||||||
|
process.env.ProgramFiles = 'C:\\Program Files'
|
||||||
|
vi.mocked(configManager.get).mockReturnValue(configPath)
|
||||||
|
// Both env and config paths are invalid, only standard Git exists
|
||||||
|
mockExistingPaths(gitPath, discoveredPath)
|
||||||
|
|
||||||
|
const result = autoDiscoverGitBash()
|
||||||
|
|
||||||
|
expect(result).toBe(discoveredPath)
|
||||||
|
expect(configManager.set).toHaveBeenCalledWith('gitBashPath', discoveredPath)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('with valid existing config path', () => {
|
||||||
|
it('should validate and return existing path without re-discovering', () => {
|
||||||
|
const existingPath = 'C:\\CustomGit\\bin\\bash.exe'
|
||||||
|
|
||||||
|
vi.mocked(configManager.get).mockReturnValue(existingPath)
|
||||||
|
mockExistingPaths(existingPath)
|
||||||
|
|
||||||
|
const result = autoDiscoverGitBash()
|
||||||
|
|
||||||
|
expect(result).toBe(existingPath)
|
||||||
|
// Should not call findGitBash or persist again
|
||||||
|
expect(configManager.set).not.toHaveBeenCalled()
|
||||||
|
// Should not call execFileSync (which findGitBash would use for discovery)
|
||||||
|
expect(execFileSync).not.toHaveBeenCalled()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not override existing valid config with auto-discovery', () => {
|
||||||
|
const existingPath = 'C:\\CustomGit\\bin\\bash.exe'
|
||||||
|
const discoveredPath = 'C:\\Program Files\\Git\\bin\\bash.exe'
|
||||||
|
|
||||||
|
vi.mocked(configManager.get).mockReturnValue(existingPath)
|
||||||
|
mockExistingPaths(existingPath, discoveredPath)
|
||||||
|
|
||||||
|
const result = autoDiscoverGitBash()
|
||||||
|
|
||||||
|
expect(result).toBe(existingPath)
|
||||||
|
expect(configManager.set).not.toHaveBeenCalled()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('with invalid existing config path', () => {
|
||||||
|
it('should attempt auto-discovery when existing path does not exist', () => {
|
||||||
|
const existingPath = 'C:\\NonExistent\\bin\\bash.exe'
|
||||||
|
const discoveredPath = 'C:\\Program Files\\Git\\bin\\bash.exe'
|
||||||
|
const gitPath = 'C:\\Program Files\\Git\\cmd\\git.exe'
|
||||||
|
|
||||||
|
vi.mocked(configManager.get).mockReturnValue(existingPath)
|
||||||
|
process.env.ProgramFiles = 'C:\\Program Files'
|
||||||
|
// Invalid path doesn't exist, but Git is installed at standard location
|
||||||
|
mockExistingPaths(gitPath, discoveredPath)
|
||||||
|
|
||||||
|
const result = autoDiscoverGitBash()
|
||||||
|
|
||||||
|
// Should discover and return the new path
|
||||||
|
expect(result).toBe(discoveredPath)
|
||||||
|
// Should persist the discovered path (overwrites invalid)
|
||||||
|
expect(configManager.set).toHaveBeenCalledWith('gitBashPath', discoveredPath)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should attempt auto-discovery when existing path is not bash.exe', () => {
|
||||||
|
const existingPath = 'C:\\CustomGit\\bin\\git.exe'
|
||||||
|
const discoveredPath = 'C:\\Program Files\\Git\\bin\\bash.exe'
|
||||||
|
const gitPath = 'C:\\Program Files\\Git\\cmd\\git.exe'
|
||||||
|
|
||||||
|
vi.mocked(configManager.get).mockReturnValue(existingPath)
|
||||||
|
process.env.ProgramFiles = 'C:\\Program Files'
|
||||||
|
// Invalid path exists but is not bash.exe (validation will fail)
|
||||||
|
// Git is installed at standard location
|
||||||
|
mockExistingPaths(existingPath, gitPath, discoveredPath)
|
||||||
|
|
||||||
|
const result = autoDiscoverGitBash()
|
||||||
|
|
||||||
|
// Should discover and return the new path
|
||||||
|
expect(result).toBe(discoveredPath)
|
||||||
|
// Should persist the discovered path (overwrites invalid)
|
||||||
|
expect(configManager.set).toHaveBeenCalledWith('gitBashPath', discoveredPath)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return null when existing path is invalid and discovery fails', () => {
|
||||||
|
const existingPath = 'C:\\NonExistent\\bin\\bash.exe'
|
||||||
|
|
||||||
|
vi.mocked(configManager.get).mockReturnValue(existingPath)
|
||||||
|
vi.mocked(fs.existsSync).mockReturnValue(false)
|
||||||
|
vi.mocked(execFileSync).mockImplementation(() => {
|
||||||
|
throw new Error('Not found')
|
||||||
|
})
|
||||||
|
|
||||||
|
const result = autoDiscoverGitBash()
|
||||||
|
|
||||||
|
// Both validation and discovery failed
|
||||||
|
expect(result).toBeNull()
|
||||||
|
// Should not persist when discovery fails
|
||||||
|
expect(configManager.set).not.toHaveBeenCalled()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('config persistence verification', () => {
|
||||||
|
it('should persist discovered path with correct config key', () => {
|
||||||
|
const bashPath = 'C:\\Program Files\\Git\\bin\\bash.exe'
|
||||||
|
const gitPath = 'C:\\Program Files\\Git\\cmd\\git.exe'
|
||||||
|
|
||||||
|
vi.mocked(configManager.get).mockReturnValue(undefined)
|
||||||
|
process.env.ProgramFiles = 'C:\\Program Files'
|
||||||
|
mockExistingPaths(gitPath, bashPath)
|
||||||
|
|
||||||
|
autoDiscoverGitBash()
|
||||||
|
|
||||||
|
// Verify the exact call to configManager.set
|
||||||
|
expect(configManager.set).toHaveBeenCalledTimes(1)
|
||||||
|
expect(configManager.set).toHaveBeenCalledWith('gitBashPath', bashPath)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should persist on each discovery when config remains undefined', () => {
|
||||||
|
const bashPath = 'C:\\Program Files\\Git\\bin\\bash.exe'
|
||||||
|
const gitPath = 'C:\\Program Files\\Git\\cmd\\git.exe'
|
||||||
|
|
||||||
|
vi.mocked(configManager.get).mockReturnValue(undefined)
|
||||||
|
process.env.ProgramFiles = 'C:\\Program Files'
|
||||||
|
mockExistingPaths(gitPath, bashPath)
|
||||||
|
|
||||||
|
autoDiscoverGitBash()
|
||||||
|
autoDiscoverGitBash()
|
||||||
|
|
||||||
|
// Each call discovers and persists since config remains undefined (mocked)
|
||||||
|
expect(configManager.set).toHaveBeenCalledTimes(2)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('real-world scenarios', () => {
|
||||||
|
it('should discover and persist standard Git for Windows installation', () => {
|
||||||
|
const gitPath = 'C:\\Program Files\\Git\\cmd\\git.exe'
|
||||||
|
const bashPath = 'C:\\Program Files\\Git\\bin\\bash.exe'
|
||||||
|
|
||||||
|
vi.mocked(configManager.get).mockReturnValue(undefined)
|
||||||
|
process.env.ProgramFiles = 'C:\\Program Files'
|
||||||
|
mockExistingPaths(gitPath, bashPath)
|
||||||
|
|
||||||
|
const result = autoDiscoverGitBash()
|
||||||
|
|
||||||
|
expect(result).toBe(bashPath)
|
||||||
|
expect(configManager.set).toHaveBeenCalledWith('gitBashPath', bashPath)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should discover portable Git via where.exe and persist', () => {
|
||||||
|
const gitPath = 'D:\\PortableApps\\Git\\bin\\git.exe'
|
||||||
|
const bashPath = 'D:\\PortableApps\\Git\\bin\\bash.exe'
|
||||||
|
|
||||||
|
vi.mocked(configManager.get).mockReturnValue(undefined)
|
||||||
|
|
||||||
|
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
||||||
|
const pathStr = p?.toString() || ''
|
||||||
|
// Common git paths don't exist
|
||||||
|
if (pathStr.includes('Program Files\\Git\\cmd\\git.exe')) return false
|
||||||
|
if (pathStr.includes('Program Files (x86)\\Git\\cmd\\git.exe')) return false
|
||||||
|
// Portable bash path exists
|
||||||
|
if (pathStr === bashPath) return true
|
||||||
|
return false
|
||||||
|
})
|
||||||
|
|
||||||
|
vi.mocked(execFileSync).mockReturnValue(gitPath)
|
||||||
|
|
||||||
|
const result = autoDiscoverGitBash()
|
||||||
|
|
||||||
|
expect(result).toBe(bashPath)
|
||||||
|
expect(configManager.set).toHaveBeenCalledWith('gitBashPath', bashPath)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should respect user-configured path over auto-discovery', () => {
|
||||||
|
const userConfiguredPath = 'D:\\MyGit\\bin\\bash.exe'
|
||||||
|
const systemPath = 'C:\\Program Files\\Git\\bin\\bash.exe'
|
||||||
|
|
||||||
|
vi.mocked(configManager.get).mockReturnValue(userConfiguredPath)
|
||||||
|
mockExistingPaths(userConfiguredPath, systemPath)
|
||||||
|
|
||||||
|
const result = autoDiscoverGitBash()
|
||||||
|
|
||||||
|
expect(result).toBe(userConfiguredPath)
|
||||||
|
expect(configManager.set).not.toHaveBeenCalled()
|
||||||
|
// Verify findGitBash was not called for discovery
|
||||||
|
expect(execFileSync).not.toHaveBeenCalled()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
import { loggerService } from '@logger'
|
import { loggerService } from '@logger'
|
||||||
|
import type { GitBashPathInfo, GitBashPathSource } from '@shared/config/constant'
|
||||||
import { HOME_CHERRY_DIR } from '@shared/config/constant'
|
import { HOME_CHERRY_DIR } from '@shared/config/constant'
|
||||||
import { execFileSync, spawn } from 'child_process'
|
import { execFileSync, spawn } from 'child_process'
|
||||||
import fs from 'fs'
|
import fs from 'fs'
|
||||||
@ -6,6 +7,7 @@ import os from 'os'
|
|||||||
import path from 'path'
|
import path from 'path'
|
||||||
|
|
||||||
import { isWin } from '../constant'
|
import { isWin } from '../constant'
|
||||||
|
import { ConfigKeys, configManager } from '../services/ConfigManager'
|
||||||
import { getResourcePath } from '.'
|
import { getResourcePath } from '.'
|
||||||
|
|
||||||
const logger = loggerService.withContext('Utils:Process')
|
const logger = loggerService.withContext('Utils:Process')
|
||||||
@ -59,7 +61,7 @@ export async function getBinaryPath(name?: string): Promise<string> {
|
|||||||
|
|
||||||
export async function isBinaryExists(name: string): Promise<boolean> {
|
export async function isBinaryExists(name: string): Promise<boolean> {
|
||||||
const cmd = await getBinaryPath(name)
|
const cmd = await getBinaryPath(name)
|
||||||
return await fs.existsSync(cmd)
|
return fs.existsSync(cmd)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -225,3 +227,77 @@ export function validateGitBashPath(customPath?: string | null): string | null {
|
|||||||
logger.debug('Validated custom Git Bash path', { path: resolved })
|
logger.debug('Validated custom Git Bash path', { path: resolved })
|
||||||
return resolved
|
return resolved
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Auto-discover and persist Git Bash path if not already configured
|
||||||
|
* Only called when Git Bash is actually needed
|
||||||
|
*
|
||||||
|
* Precedence order:
|
||||||
|
* 1. CLAUDE_CODE_GIT_BASH_PATH environment variable (highest - runtime override)
|
||||||
|
* 2. Configured path from settings (manual or auto)
|
||||||
|
* 3. Auto-discovery via findGitBash (only if no valid config exists)
|
||||||
|
*/
|
||||||
|
export function autoDiscoverGitBash(): string | null {
|
||||||
|
if (!isWin) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
// 1. Check environment variable override first (highest priority)
|
||||||
|
const envOverride = process.env.CLAUDE_CODE_GIT_BASH_PATH
|
||||||
|
if (envOverride) {
|
||||||
|
const validated = validateGitBashPath(envOverride)
|
||||||
|
if (validated) {
|
||||||
|
logger.debug('Using CLAUDE_CODE_GIT_BASH_PATH override', { path: validated })
|
||||||
|
return validated
|
||||||
|
}
|
||||||
|
logger.warn('CLAUDE_CODE_GIT_BASH_PATH provided but path is invalid', { path: envOverride })
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Check if a path is already configured
|
||||||
|
const existingPath = configManager.get<string | undefined>(ConfigKeys.GitBashPath)
|
||||||
|
const existingSource = configManager.get<GitBashPathSource | undefined>(ConfigKeys.GitBashPathSource)
|
||||||
|
|
||||||
|
if (existingPath) {
|
||||||
|
const validated = validateGitBashPath(existingPath)
|
||||||
|
if (validated) {
|
||||||
|
return validated
|
||||||
|
}
|
||||||
|
// Existing path is invalid, try to auto-discover
|
||||||
|
logger.warn('Existing Git Bash path is invalid, attempting auto-discovery', {
|
||||||
|
path: existingPath,
|
||||||
|
source: existingSource
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Try to find Git Bash via auto-discovery
|
||||||
|
const discoveredPath = findGitBash()
|
||||||
|
if (discoveredPath) {
|
||||||
|
// Persist the discovered path with 'auto' source
|
||||||
|
configManager.set(ConfigKeys.GitBashPath, discoveredPath)
|
||||||
|
configManager.set(ConfigKeys.GitBashPathSource, 'auto')
|
||||||
|
logger.info('Auto-discovered Git Bash path', { path: discoveredPath })
|
||||||
|
}
|
||||||
|
|
||||||
|
return discoveredPath
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get Git Bash path info including source
|
||||||
|
* If no path is configured, triggers auto-discovery first
|
||||||
|
*/
|
||||||
|
export function getGitBashPathInfo(): GitBashPathInfo {
|
||||||
|
if (!isWin) {
|
||||||
|
return { path: null, source: null }
|
||||||
|
}
|
||||||
|
|
||||||
|
let path = configManager.get<string | null>(ConfigKeys.GitBashPath) ?? null
|
||||||
|
let source = configManager.get<GitBashPathSource | null>(ConfigKeys.GitBashPathSource) ?? null
|
||||||
|
|
||||||
|
// If no path configured, trigger auto-discovery (handles upgrade from old versions)
|
||||||
|
if (!path) {
|
||||||
|
path = autoDiscoverGitBash()
|
||||||
|
source = path ? 'auto' : null
|
||||||
|
}
|
||||||
|
|
||||||
|
return { path, source }
|
||||||
|
}
|
||||||
|
|||||||
@ -2,7 +2,7 @@ import type { PermissionUpdate } from '@anthropic-ai/claude-agent-sdk'
|
|||||||
import { electronAPI } from '@electron-toolkit/preload'
|
import { electronAPI } from '@electron-toolkit/preload'
|
||||||
import type { SpanEntity, TokenUsage } from '@mcp-trace/trace-core'
|
import type { SpanEntity, TokenUsage } from '@mcp-trace/trace-core'
|
||||||
import type { SpanContext } from '@opentelemetry/api'
|
import type { SpanContext } from '@opentelemetry/api'
|
||||||
import type { TerminalConfig, UpgradeChannel } from '@shared/config/constant'
|
import type { GitBashPathInfo, TerminalConfig, UpgradeChannel } from '@shared/config/constant'
|
||||||
import type { LogLevel, LogSourceWithContext } from '@shared/config/logger'
|
import type { LogLevel, LogSourceWithContext } from '@shared/config/logger'
|
||||||
import type {
|
import type {
|
||||||
FileChangeEvent,
|
FileChangeEvent,
|
||||||
@ -134,6 +134,7 @@ const api = {
|
|||||||
getCpuName: () => ipcRenderer.invoke(IpcChannel.System_GetCpuName),
|
getCpuName: () => ipcRenderer.invoke(IpcChannel.System_GetCpuName),
|
||||||
checkGitBash: (): Promise<boolean> => ipcRenderer.invoke(IpcChannel.System_CheckGitBash),
|
checkGitBash: (): Promise<boolean> => ipcRenderer.invoke(IpcChannel.System_CheckGitBash),
|
||||||
getGitBashPath: (): Promise<string | null> => ipcRenderer.invoke(IpcChannel.System_GetGitBashPath),
|
getGitBashPath: (): Promise<string | null> => ipcRenderer.invoke(IpcChannel.System_GetGitBashPath),
|
||||||
|
getGitBashPathInfo: (): Promise<GitBashPathInfo> => ipcRenderer.invoke(IpcChannel.System_GetGitBashPathInfo),
|
||||||
setGitBashPath: (newPath: string | null): Promise<boolean> =>
|
setGitBashPath: (newPath: string | null): Promise<boolean> =>
|
||||||
ipcRenderer.invoke(IpcChannel.System_SetGitBashPath, newPath)
|
ipcRenderer.invoke(IpcChannel.System_SetGitBashPath, newPath)
|
||||||
},
|
},
|
||||||
|
|||||||
@ -7,7 +7,6 @@ import type { Chunk } from '@renderer/types/chunk'
|
|||||||
import { isOllamaProvider, isSupportEnableThinkingProvider } from '@renderer/utils/provider'
|
import { isOllamaProvider, isSupportEnableThinkingProvider } from '@renderer/utils/provider'
|
||||||
import type { LanguageModelMiddleware } from 'ai'
|
import type { LanguageModelMiddleware } from 'ai'
|
||||||
import { extractReasoningMiddleware, simulateStreamingMiddleware } from 'ai'
|
import { extractReasoningMiddleware, simulateStreamingMiddleware } from 'ai'
|
||||||
import { isEmpty } from 'lodash'
|
|
||||||
|
|
||||||
import { getAiSdkProviderId } from '../provider/factory'
|
import { getAiSdkProviderId } from '../provider/factory'
|
||||||
import { isOpenRouterGeminiGenerateImageModel } from '../utils/image'
|
import { isOpenRouterGeminiGenerateImageModel } from '../utils/image'
|
||||||
@ -16,7 +15,6 @@ import { openrouterGenerateImageMiddleware } from './openrouterGenerateImageMidd
|
|||||||
import { openrouterReasoningMiddleware } from './openrouterReasoningMiddleware'
|
import { openrouterReasoningMiddleware } from './openrouterReasoningMiddleware'
|
||||||
import { qwenThinkingMiddleware } from './qwenThinkingMiddleware'
|
import { qwenThinkingMiddleware } from './qwenThinkingMiddleware'
|
||||||
import { skipGeminiThoughtSignatureMiddleware } from './skipGeminiThoughtSignatureMiddleware'
|
import { skipGeminiThoughtSignatureMiddleware } from './skipGeminiThoughtSignatureMiddleware'
|
||||||
import { toolChoiceMiddleware } from './toolChoiceMiddleware'
|
|
||||||
|
|
||||||
const logger = loggerService.withContext('AiSdkMiddlewareBuilder')
|
const logger = loggerService.withContext('AiSdkMiddlewareBuilder')
|
||||||
|
|
||||||
@ -136,15 +134,6 @@ export class AiSdkMiddlewareBuilder {
|
|||||||
export function buildAiSdkMiddlewares(config: AiSdkMiddlewareConfig): LanguageModelMiddleware[] {
|
export function buildAiSdkMiddlewares(config: AiSdkMiddlewareConfig): LanguageModelMiddleware[] {
|
||||||
const builder = new AiSdkMiddlewareBuilder()
|
const builder = new AiSdkMiddlewareBuilder()
|
||||||
|
|
||||||
// 0. 知识库强制调用中间件(必须在最前面,确保第一轮强制调用知识库)
|
|
||||||
if (!isEmpty(config.assistant?.knowledge_bases?.map((base) => base.id)) && config.knowledgeRecognition !== 'on') {
|
|
||||||
builder.add({
|
|
||||||
name: 'force-knowledge-first',
|
|
||||||
middleware: toolChoiceMiddleware('builtin_knowledge_search')
|
|
||||||
})
|
|
||||||
logger.debug('Added toolChoice middleware to force knowledge base search on first round')
|
|
||||||
}
|
|
||||||
|
|
||||||
// 1. 根据provider添加特定中间件
|
// 1. 根据provider添加特定中间件
|
||||||
if (config.provider) {
|
if (config.provider) {
|
||||||
addProviderSpecificMiddlewares(builder, config)
|
addProviderSpecificMiddlewares(builder, config)
|
||||||
|
|||||||
@ -31,7 +31,7 @@ import { webSearchToolWithPreExtractedKeywords } from '../tools/WebSearchTool'
|
|||||||
|
|
||||||
const logger = loggerService.withContext('SearchOrchestrationPlugin')
|
const logger = loggerService.withContext('SearchOrchestrationPlugin')
|
||||||
|
|
||||||
const getMessageContent = (message: ModelMessage) => {
|
export const getMessageContent = (message: ModelMessage) => {
|
||||||
if (typeof message.content === 'string') return message.content
|
if (typeof message.content === 'string') return message.content
|
||||||
return message.content.reduce((acc, part) => {
|
return message.content.reduce((acc, part) => {
|
||||||
if (part.type === 'text') {
|
if (part.type === 'text') {
|
||||||
@ -266,14 +266,14 @@ export const searchOrchestrationPlugin = (assistant: Assistant, topicId: string)
|
|||||||
// 判断是否需要各种搜索
|
// 判断是否需要各种搜索
|
||||||
const knowledgeBaseIds = assistant.knowledge_bases?.map((base) => base.id)
|
const knowledgeBaseIds = assistant.knowledge_bases?.map((base) => base.id)
|
||||||
const hasKnowledgeBase = !isEmpty(knowledgeBaseIds)
|
const hasKnowledgeBase = !isEmpty(knowledgeBaseIds)
|
||||||
const knowledgeRecognition = assistant.knowledgeRecognition || 'on'
|
const knowledgeRecognition = assistant.knowledgeRecognition || 'off'
|
||||||
const globalMemoryEnabled = selectGlobalMemoryEnabled(store.getState())
|
const globalMemoryEnabled = selectGlobalMemoryEnabled(store.getState())
|
||||||
const shouldWebSearch = !!assistant.webSearchProviderId
|
const shouldWebSearch = !!assistant.webSearchProviderId
|
||||||
const shouldKnowledgeSearch = hasKnowledgeBase && knowledgeRecognition === 'on'
|
const shouldKnowledgeSearch = hasKnowledgeBase && knowledgeRecognition === 'on'
|
||||||
const shouldMemorySearch = globalMemoryEnabled && assistant.enableMemory
|
const shouldMemorySearch = globalMemoryEnabled && assistant.enableMemory
|
||||||
|
|
||||||
// 执行意图分析
|
// 执行意图分析
|
||||||
if (shouldWebSearch || hasKnowledgeBase) {
|
if (shouldWebSearch || shouldKnowledgeSearch) {
|
||||||
const analysisResult = await analyzeSearchIntent(lastUserMessage, assistant, {
|
const analysisResult = await analyzeSearchIntent(lastUserMessage, assistant, {
|
||||||
shouldWebSearch,
|
shouldWebSearch,
|
||||||
shouldKnowledgeSearch,
|
shouldKnowledgeSearch,
|
||||||
@ -330,41 +330,25 @@ export const searchOrchestrationPlugin = (assistant: Assistant, topicId: string)
|
|||||||
// 📚 知识库搜索工具配置
|
// 📚 知识库搜索工具配置
|
||||||
const knowledgeBaseIds = assistant.knowledge_bases?.map((base) => base.id)
|
const knowledgeBaseIds = assistant.knowledge_bases?.map((base) => base.id)
|
||||||
const hasKnowledgeBase = !isEmpty(knowledgeBaseIds)
|
const hasKnowledgeBase = !isEmpty(knowledgeBaseIds)
|
||||||
const knowledgeRecognition = assistant.knowledgeRecognition || 'on'
|
const knowledgeRecognition = assistant.knowledgeRecognition || 'off'
|
||||||
|
const shouldKnowledgeSearch = hasKnowledgeBase && knowledgeRecognition === 'on'
|
||||||
|
|
||||||
if (hasKnowledgeBase) {
|
if (shouldKnowledgeSearch) {
|
||||||
if (knowledgeRecognition === 'off') {
|
// on 模式:根据意图识别结果决定是否添加工具
|
||||||
// off 模式:直接添加知识库搜索工具,使用用户消息作为搜索关键词
|
const needsKnowledgeSearch =
|
||||||
|
analysisResult?.knowledge &&
|
||||||
|
analysisResult.knowledge.question &&
|
||||||
|
analysisResult.knowledge.question[0] !== 'not_needed'
|
||||||
|
|
||||||
|
if (needsKnowledgeSearch && analysisResult.knowledge) {
|
||||||
|
// logger.info('📚 Adding knowledge search tool (intent-based)')
|
||||||
const userMessage = userMessages[context.requestId]
|
const userMessage = userMessages[context.requestId]
|
||||||
const fallbackKeywords = {
|
|
||||||
question: [getMessageContent(userMessage) || 'search'],
|
|
||||||
rewrite: getMessageContent(userMessage) || 'search'
|
|
||||||
}
|
|
||||||
// logger.info('📚 Adding knowledge search tool (force mode)')
|
|
||||||
params.tools['builtin_knowledge_search'] = knowledgeSearchTool(
|
params.tools['builtin_knowledge_search'] = knowledgeSearchTool(
|
||||||
assistant,
|
assistant,
|
||||||
fallbackKeywords,
|
analysisResult.knowledge,
|
||||||
getMessageContent(userMessage),
|
getMessageContent(userMessage),
|
||||||
topicId
|
topicId
|
||||||
)
|
)
|
||||||
// params.toolChoice = { type: 'tool', toolName: 'builtin_knowledge_search' }
|
|
||||||
} else {
|
|
||||||
// on 模式:根据意图识别结果决定是否添加工具
|
|
||||||
const needsKnowledgeSearch =
|
|
||||||
analysisResult?.knowledge &&
|
|
||||||
analysisResult.knowledge.question &&
|
|
||||||
analysisResult.knowledge.question[0] !== 'not_needed'
|
|
||||||
|
|
||||||
if (needsKnowledgeSearch && analysisResult.knowledge) {
|
|
||||||
// logger.info('📚 Adding knowledge search tool (intent-based)')
|
|
||||||
const userMessage = userMessages[context.requestId]
|
|
||||||
params.tools['builtin_knowledge_search'] = knowledgeSearchTool(
|
|
||||||
assistant,
|
|
||||||
analysisResult.knowledge,
|
|
||||||
getMessageContent(userMessage),
|
|
||||||
topicId
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -3,6 +3,7 @@ import { ErrorBoundary } from '@renderer/components/ErrorBoundary'
|
|||||||
import { HelpTooltip } from '@renderer/components/TooltipIcons'
|
import { HelpTooltip } from '@renderer/components/TooltipIcons'
|
||||||
import { TopView } from '@renderer/components/TopView'
|
import { TopView } from '@renderer/components/TopView'
|
||||||
import { permissionModeCards } from '@renderer/config/agent'
|
import { permissionModeCards } from '@renderer/config/agent'
|
||||||
|
import { isWin } from '@renderer/config/constant'
|
||||||
import { useAgents } from '@renderer/hooks/agents/useAgents'
|
import { useAgents } from '@renderer/hooks/agents/useAgents'
|
||||||
import { useUpdateAgent } from '@renderer/hooks/agents/useUpdateAgent'
|
import { useUpdateAgent } from '@renderer/hooks/agents/useUpdateAgent'
|
||||||
import SelectAgentBaseModelButton from '@renderer/pages/home/components/SelectAgentBaseModelButton'
|
import SelectAgentBaseModelButton from '@renderer/pages/home/components/SelectAgentBaseModelButton'
|
||||||
@ -16,7 +17,8 @@ import type {
|
|||||||
UpdateAgentForm
|
UpdateAgentForm
|
||||||
} from '@renderer/types'
|
} from '@renderer/types'
|
||||||
import { AgentConfigurationSchema, isAgentType } from '@renderer/types'
|
import { AgentConfigurationSchema, isAgentType } from '@renderer/types'
|
||||||
import { Alert, Button, Input, Modal, Select } from 'antd'
|
import type { GitBashPathInfo } from '@shared/config/constant'
|
||||||
|
import { Button, Input, Modal, Select } from 'antd'
|
||||||
import { AlertTriangleIcon } from 'lucide-react'
|
import { AlertTriangleIcon } from 'lucide-react'
|
||||||
import type { ChangeEvent, FormEvent } from 'react'
|
import type { ChangeEvent, FormEvent } from 'react'
|
||||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||||
@ -59,8 +61,7 @@ const PopupContainer: React.FC<Props> = ({ agent, afterSubmit, resolve }) => {
|
|||||||
const isEditing = (agent?: AgentWithTools) => agent !== undefined
|
const isEditing = (agent?: AgentWithTools) => agent !== undefined
|
||||||
|
|
||||||
const [form, setForm] = useState<BaseAgentForm>(() => buildAgentForm(agent))
|
const [form, setForm] = useState<BaseAgentForm>(() => buildAgentForm(agent))
|
||||||
const [hasGitBash, setHasGitBash] = useState<boolean>(true)
|
const [gitBashPathInfo, setGitBashPathInfo] = useState<GitBashPathInfo>({ path: null, source: null })
|
||||||
const [customGitBashPath, setCustomGitBashPath] = useState<string>('')
|
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (open) {
|
if (open) {
|
||||||
@ -68,29 +69,15 @@ const PopupContainer: React.FC<Props> = ({ agent, afterSubmit, resolve }) => {
|
|||||||
}
|
}
|
||||||
}, [agent, open])
|
}, [agent, open])
|
||||||
|
|
||||||
const checkGitBash = useCallback(
|
const checkGitBash = useCallback(async () => {
|
||||||
async (showToast = false) => {
|
if (!isWin) return
|
||||||
try {
|
try {
|
||||||
const [gitBashInstalled, savedPath] = await Promise.all([
|
const pathInfo = await window.api.system.getGitBashPathInfo()
|
||||||
window.api.system.checkGitBash(),
|
setGitBashPathInfo(pathInfo)
|
||||||
window.api.system.getGitBashPath().catch(() => null)
|
} catch (error) {
|
||||||
])
|
logger.error('Failed to check Git Bash:', error as Error)
|
||||||
setCustomGitBashPath(savedPath ?? '')
|
}
|
||||||
setHasGitBash(gitBashInstalled)
|
}, [])
|
||||||
if (showToast) {
|
|
||||||
if (gitBashInstalled) {
|
|
||||||
window.toast.success(t('agent.gitBash.success', 'Git Bash detected successfully!'))
|
|
||||||
} else {
|
|
||||||
window.toast.error(t('agent.gitBash.notFound', 'Git Bash not found. Please install it first.'))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Failed to check Git Bash:', error as Error)
|
|
||||||
setHasGitBash(true) // Default to true on error to avoid false warnings
|
|
||||||
}
|
|
||||||
},
|
|
||||||
[t]
|
|
||||||
)
|
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
checkGitBash()
|
checkGitBash()
|
||||||
@ -119,24 +106,22 @@ const PopupContainer: React.FC<Props> = ({ agent, afterSubmit, resolve }) => {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
setCustomGitBashPath(pickedPath)
|
await checkGitBash()
|
||||||
await checkGitBash(true)
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Failed to pick Git Bash path', error as Error)
|
logger.error('Failed to pick Git Bash path', error as Error)
|
||||||
window.toast.error(t('agent.gitBash.pick.failed', 'Failed to set Git Bash path'))
|
window.toast.error(t('agent.gitBash.pick.failed', 'Failed to set Git Bash path'))
|
||||||
}
|
}
|
||||||
}, [checkGitBash, t])
|
}, [checkGitBash, t])
|
||||||
|
|
||||||
const handleClearGitBash = useCallback(async () => {
|
const handleResetGitBash = useCallback(async () => {
|
||||||
try {
|
try {
|
||||||
|
// Clear manual setting and re-run auto-discovery
|
||||||
await window.api.system.setGitBashPath(null)
|
await window.api.system.setGitBashPath(null)
|
||||||
setCustomGitBashPath('')
|
await checkGitBash()
|
||||||
await checkGitBash(true)
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Failed to clear Git Bash path', error as Error)
|
logger.error('Failed to reset Git Bash path', error as Error)
|
||||||
window.toast.error(t('agent.gitBash.pick.failed', 'Failed to set Git Bash path'))
|
|
||||||
}
|
}
|
||||||
}, [checkGitBash, t])
|
}, [checkGitBash])
|
||||||
|
|
||||||
const onPermissionModeChange = useCallback((value: PermissionMode) => {
|
const onPermissionModeChange = useCallback((value: PermissionMode) => {
|
||||||
setForm((prev) => {
|
setForm((prev) => {
|
||||||
@ -268,6 +253,12 @@ const PopupContainer: React.FC<Props> = ({ agent, afterSubmit, resolve }) => {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (isWin && !gitBashPathInfo.path) {
|
||||||
|
window.toast.error(t('agent.gitBash.error.required', 'Git Bash path is required on Windows'))
|
||||||
|
loadingRef.current = false
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
if (isEditing(agent)) {
|
if (isEditing(agent)) {
|
||||||
if (!agent) {
|
if (!agent) {
|
||||||
loadingRef.current = false
|
loadingRef.current = false
|
||||||
@ -327,7 +318,8 @@ const PopupContainer: React.FC<Props> = ({ agent, afterSubmit, resolve }) => {
|
|||||||
t,
|
t,
|
||||||
updateAgent,
|
updateAgent,
|
||||||
afterSubmit,
|
afterSubmit,
|
||||||
addAgent
|
addAgent,
|
||||||
|
gitBashPathInfo.path
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -346,66 +338,6 @@ const PopupContainer: React.FC<Props> = ({ agent, afterSubmit, resolve }) => {
|
|||||||
footer={null}>
|
footer={null}>
|
||||||
<StyledForm onSubmit={onSubmit}>
|
<StyledForm onSubmit={onSubmit}>
|
||||||
<FormContent>
|
<FormContent>
|
||||||
{!hasGitBash && (
|
|
||||||
<Alert
|
|
||||||
message={t('agent.gitBash.error.title', 'Git Bash Required')}
|
|
||||||
description={
|
|
||||||
<div>
|
|
||||||
<div style={{ marginBottom: 8 }}>
|
|
||||||
{t(
|
|
||||||
'agent.gitBash.error.description',
|
|
||||||
'Git Bash is required to run agents on Windows. The agent cannot function without it. Please install Git for Windows from'
|
|
||||||
)}{' '}
|
|
||||||
<a
|
|
||||||
href="https://git-scm.com/download/win"
|
|
||||||
onClick={(e) => {
|
|
||||||
e.preventDefault()
|
|
||||||
window.api.openWebsite('https://git-scm.com/download/win')
|
|
||||||
}}
|
|
||||||
style={{ textDecoration: 'underline' }}>
|
|
||||||
git-scm.com
|
|
||||||
</a>
|
|
||||||
</div>
|
|
||||||
<Button size="small" onClick={() => checkGitBash(true)}>
|
|
||||||
{t('agent.gitBash.error.recheck', 'Recheck Git Bash Installation')}
|
|
||||||
</Button>
|
|
||||||
<Button size="small" style={{ marginLeft: 8 }} onClick={handlePickGitBash}>
|
|
||||||
{t('agent.gitBash.pick.button', 'Select Git Bash Path')}
|
|
||||||
</Button>
|
|
||||||
</div>
|
|
||||||
}
|
|
||||||
type="error"
|
|
||||||
showIcon
|
|
||||||
style={{ marginBottom: 16 }}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{hasGitBash && customGitBashPath && (
|
|
||||||
<Alert
|
|
||||||
message={t('agent.gitBash.found.title', 'Git Bash configured')}
|
|
||||||
description={
|
|
||||||
<div style={{ display: 'flex', flexDirection: 'column', gap: 8 }}>
|
|
||||||
<div>
|
|
||||||
{t('agent.gitBash.customPath', {
|
|
||||||
defaultValue: 'Using custom path: {{path}}',
|
|
||||||
path: customGitBashPath
|
|
||||||
})}
|
|
||||||
</div>
|
|
||||||
<div style={{ display: 'flex', gap: 8 }}>
|
|
||||||
<Button size="small" onClick={handlePickGitBash}>
|
|
||||||
{t('agent.gitBash.pick.button', 'Select Git Bash Path')}
|
|
||||||
</Button>
|
|
||||||
<Button size="small" onClick={handleClearGitBash}>
|
|
||||||
{t('agent.gitBash.clear.button', 'Clear custom path')}
|
|
||||||
</Button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
}
|
|
||||||
type="success"
|
|
||||||
showIcon
|
|
||||||
style={{ marginBottom: 16 }}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
<FormRow>
|
<FormRow>
|
||||||
<FormItem style={{ flex: 1 }}>
|
<FormItem style={{ flex: 1 }}>
|
||||||
<Label>
|
<Label>
|
||||||
@ -439,6 +371,40 @@ const PopupContainer: React.FC<Props> = ({ agent, afterSubmit, resolve }) => {
|
|||||||
/>
|
/>
|
||||||
</FormItem>
|
</FormItem>
|
||||||
|
|
||||||
|
{isWin && (
|
||||||
|
<FormItem>
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<Label>
|
||||||
|
Git Bash <RequiredMark>*</RequiredMark>
|
||||||
|
</Label>
|
||||||
|
<HelpTooltip
|
||||||
|
title={t(
|
||||||
|
'agent.gitBash.tooltip',
|
||||||
|
'Git Bash is required to run agents on Windows. Install from git-scm.com if not available.'
|
||||||
|
)}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<GitBashInputWrapper>
|
||||||
|
<Input
|
||||||
|
value={gitBashPathInfo.path ?? ''}
|
||||||
|
readOnly
|
||||||
|
placeholder={t('agent.gitBash.placeholder', 'Select bash.exe path')}
|
||||||
|
/>
|
||||||
|
<Button size="small" onClick={handlePickGitBash}>
|
||||||
|
{t('common.select', 'Select')}
|
||||||
|
</Button>
|
||||||
|
{gitBashPathInfo.source === 'manual' && (
|
||||||
|
<Button size="small" onClick={handleResetGitBash}>
|
||||||
|
{t('common.reset', 'Reset')}
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
</GitBashInputWrapper>
|
||||||
|
{gitBashPathInfo.path && gitBashPathInfo.source === 'auto' && (
|
||||||
|
<SourceHint>{t('agent.gitBash.autoDiscoveredHint', 'Auto-discovered')}</SourceHint>
|
||||||
|
)}
|
||||||
|
</FormItem>
|
||||||
|
)}
|
||||||
|
|
||||||
<FormItem>
|
<FormItem>
|
||||||
<Label>
|
<Label>
|
||||||
{t('agent.settings.tooling.permissionMode.title', 'Permission mode')} <RequiredMark>*</RequiredMark>
|
{t('agent.settings.tooling.permissionMode.title', 'Permission mode')} <RequiredMark>*</RequiredMark>
|
||||||
@ -511,7 +477,11 @@ const PopupContainer: React.FC<Props> = ({ agent, afterSubmit, resolve }) => {
|
|||||||
|
|
||||||
<FormFooter>
|
<FormFooter>
|
||||||
<Button onClick={onCancel}>{t('common.close')}</Button>
|
<Button onClick={onCancel}>{t('common.close')}</Button>
|
||||||
<Button type="primary" htmlType="submit" loading={loadingRef.current} disabled={!hasGitBash}>
|
<Button
|
||||||
|
type="primary"
|
||||||
|
htmlType="submit"
|
||||||
|
loading={loadingRef.current}
|
||||||
|
disabled={isWin && !gitBashPathInfo.path}>
|
||||||
{isEditing(agent) ? t('common.confirm') : t('common.add')}
|
{isEditing(agent) ? t('common.confirm') : t('common.add')}
|
||||||
</Button>
|
</Button>
|
||||||
</FormFooter>
|
</FormFooter>
|
||||||
@ -582,6 +552,21 @@ const FormItem = styled.div`
|
|||||||
gap: 8px;
|
gap: 8px;
|
||||||
`
|
`
|
||||||
|
|
||||||
|
const GitBashInputWrapper = styled.div`
|
||||||
|
display: flex;
|
||||||
|
gap: 8px;
|
||||||
|
align-items: center;
|
||||||
|
|
||||||
|
input {
|
||||||
|
flex: 1;
|
||||||
|
}
|
||||||
|
`
|
||||||
|
|
||||||
|
const SourceHint = styled.span`
|
||||||
|
font-size: 12px;
|
||||||
|
color: var(--color-text-3);
|
||||||
|
`
|
||||||
|
|
||||||
const Label = styled.label`
|
const Label = styled.label`
|
||||||
font-size: 14px;
|
font-size: 14px;
|
||||||
color: var(--color-text-1);
|
color: var(--color-text-1);
|
||||||
|
|||||||
@ -362,7 +362,7 @@ export const SYSTEM_MODELS: Record<SystemProviderId | 'defaultModel', Model[]> =
|
|||||||
{
|
{
|
||||||
id: 'gemini-3-pro-image-preview',
|
id: 'gemini-3-pro-image-preview',
|
||||||
provider: 'gemini',
|
provider: 'gemini',
|
||||||
name: 'Gemini 3 Pro Image Privew',
|
name: 'Gemini 3 Pro Image Preview',
|
||||||
group: 'Gemini 3'
|
group: 'Gemini 3'
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
@ -32,6 +32,7 @@
|
|||||||
},
|
},
|
||||||
"gitBash": {
|
"gitBash": {
|
||||||
"autoDetected": "Using auto-detected Git Bash",
|
"autoDetected": "Using auto-detected Git Bash",
|
||||||
|
"autoDiscoveredHint": "Auto-discovered",
|
||||||
"clear": {
|
"clear": {
|
||||||
"button": "Clear custom path"
|
"button": "Clear custom path"
|
||||||
},
|
},
|
||||||
@ -39,6 +40,7 @@
|
|||||||
"error": {
|
"error": {
|
||||||
"description": "Git Bash is required to run agents on Windows. The agent cannot function without it. Please install Git for Windows from",
|
"description": "Git Bash is required to run agents on Windows. The agent cannot function without it. Please install Git for Windows from",
|
||||||
"recheck": "Recheck Git Bash Installation",
|
"recheck": "Recheck Git Bash Installation",
|
||||||
|
"required": "Git Bash path is required on Windows",
|
||||||
"title": "Git Bash Required"
|
"title": "Git Bash Required"
|
||||||
},
|
},
|
||||||
"found": {
|
"found": {
|
||||||
@ -51,7 +53,9 @@
|
|||||||
"invalidPath": "Selected file is not a valid Git Bash executable (bash.exe).",
|
"invalidPath": "Selected file is not a valid Git Bash executable (bash.exe).",
|
||||||
"title": "Select Git Bash executable"
|
"title": "Select Git Bash executable"
|
||||||
},
|
},
|
||||||
"success": "Git Bash detected successfully!"
|
"placeholder": "Select bash.exe path",
|
||||||
|
"success": "Git Bash detected successfully!",
|
||||||
|
"tooltip": "Git Bash is required to run agents on Windows. Install from git-scm.com if not available."
|
||||||
},
|
},
|
||||||
"input": {
|
"input": {
|
||||||
"placeholder": "Enter your message here, send with {{key}} - @ select path, / select command"
|
"placeholder": "Enter your message here, send with {{key}} - @ select path, / select command"
|
||||||
|
|||||||
@ -32,6 +32,7 @@
|
|||||||
},
|
},
|
||||||
"gitBash": {
|
"gitBash": {
|
||||||
"autoDetected": "使用自动检测的 Git Bash",
|
"autoDetected": "使用自动检测的 Git Bash",
|
||||||
|
"autoDiscoveredHint": "自动发现",
|
||||||
"clear": {
|
"clear": {
|
||||||
"button": "清除自定义路径"
|
"button": "清除自定义路径"
|
||||||
},
|
},
|
||||||
@ -39,6 +40,7 @@
|
|||||||
"error": {
|
"error": {
|
||||||
"description": "在 Windows 上运行智能体需要 Git Bash。没有它智能体无法运行。请从以下地址安装 Git for Windows",
|
"description": "在 Windows 上运行智能体需要 Git Bash。没有它智能体无法运行。请从以下地址安装 Git for Windows",
|
||||||
"recheck": "重新检测 Git Bash 安装",
|
"recheck": "重新检测 Git Bash 安装",
|
||||||
|
"required": "在 Windows 上需要配置 Git Bash 路径",
|
||||||
"title": "需要 Git Bash"
|
"title": "需要 Git Bash"
|
||||||
},
|
},
|
||||||
"found": {
|
"found": {
|
||||||
@ -51,7 +53,9 @@
|
|||||||
"invalidPath": "选择的文件不是有效的 Git Bash 可执行文件(bash.exe)。",
|
"invalidPath": "选择的文件不是有效的 Git Bash 可执行文件(bash.exe)。",
|
||||||
"title": "选择 Git Bash 可执行文件"
|
"title": "选择 Git Bash 可执行文件"
|
||||||
},
|
},
|
||||||
"success": "成功检测到 Git Bash!"
|
"placeholder": "选择 bash.exe 路径",
|
||||||
|
"success": "成功检测到 Git Bash!",
|
||||||
|
"tooltip": "在 Windows 上运行智能体需要 Git Bash。如果未安装,请从 git-scm.com 下载安装。"
|
||||||
},
|
},
|
||||||
"input": {
|
"input": {
|
||||||
"placeholder": "在这里输入消息,按 {{key}} 发送 - @ 选择路径, / 选择命令"
|
"placeholder": "在这里输入消息,按 {{key}} 发送 - @ 选择路径, / 选择命令"
|
||||||
|
|||||||
@ -32,6 +32,7 @@
|
|||||||
},
|
},
|
||||||
"gitBash": {
|
"gitBash": {
|
||||||
"autoDetected": "使用自動偵測的 Git Bash",
|
"autoDetected": "使用自動偵測的 Git Bash",
|
||||||
|
"autoDiscoveredHint": "自動發現",
|
||||||
"clear": {
|
"clear": {
|
||||||
"button": "清除自訂路徑"
|
"button": "清除自訂路徑"
|
||||||
},
|
},
|
||||||
@ -39,6 +40,7 @@
|
|||||||
"error": {
|
"error": {
|
||||||
"description": "在 Windows 上執行 Agent 需要 Git Bash。沒有它 Agent 無法運作。請從以下網址安裝 Git for Windows",
|
"description": "在 Windows 上執行 Agent 需要 Git Bash。沒有它 Agent 無法運作。請從以下網址安裝 Git for Windows",
|
||||||
"recheck": "重新偵測 Git Bash 安裝",
|
"recheck": "重新偵測 Git Bash 安裝",
|
||||||
|
"required": "在 Windows 上需要設定 Git Bash 路徑",
|
||||||
"title": "需要 Git Bash"
|
"title": "需要 Git Bash"
|
||||||
},
|
},
|
||||||
"found": {
|
"found": {
|
||||||
@ -51,7 +53,9 @@
|
|||||||
"invalidPath": "選擇的檔案不是有效的 Git Bash 可執行檔(bash.exe)。",
|
"invalidPath": "選擇的檔案不是有效的 Git Bash 可執行檔(bash.exe)。",
|
||||||
"title": "選擇 Git Bash 可執行檔"
|
"title": "選擇 Git Bash 可執行檔"
|
||||||
},
|
},
|
||||||
"success": "成功偵測到 Git Bash!"
|
"placeholder": "選擇 bash.exe 路徑",
|
||||||
|
"success": "成功偵測到 Git Bash!",
|
||||||
|
"tooltip": "在 Windows 上執行 Agent 需要 Git Bash。如未安裝,請從 git-scm.com 下載安裝。"
|
||||||
},
|
},
|
||||||
"input": {
|
"input": {
|
||||||
"placeholder": "在這裡輸入您的訊息,使用 {{key}} 傳送 - @ 選擇路徑,/ 選擇命令"
|
"placeholder": "在這裡輸入您的訊息,使用 {{key}} 傳送 - @ 選擇路徑,/ 選擇命令"
|
||||||
|
|||||||
@ -32,6 +32,7 @@
|
|||||||
},
|
},
|
||||||
"gitBash": {
|
"gitBash": {
|
||||||
"autoDetected": "Automatisch ermitteltes Git Bash wird verwendet",
|
"autoDetected": "Automatisch ermitteltes Git Bash wird verwendet",
|
||||||
|
"autoDiscoveredHint": "[to be translated]:Auto-discovered",
|
||||||
"clear": {
|
"clear": {
|
||||||
"button": "Benutzerdefinierten Pfad löschen"
|
"button": "Benutzerdefinierten Pfad löschen"
|
||||||
},
|
},
|
||||||
@ -39,6 +40,7 @@
|
|||||||
"error": {
|
"error": {
|
||||||
"description": "Git Bash ist erforderlich, um Agents unter Windows auszuführen. Der Agent kann ohne es nicht funktionieren. Bitte installieren Sie Git für Windows von",
|
"description": "Git Bash ist erforderlich, um Agents unter Windows auszuführen. Der Agent kann ohne es nicht funktionieren. Bitte installieren Sie Git für Windows von",
|
||||||
"recheck": "Überprüfe die Git Bash-Installation erneut",
|
"recheck": "Überprüfe die Git Bash-Installation erneut",
|
||||||
|
"required": "[to be translated]:Git Bash path is required on Windows",
|
||||||
"title": "Git Bash erforderlich"
|
"title": "Git Bash erforderlich"
|
||||||
},
|
},
|
||||||
"found": {
|
"found": {
|
||||||
@ -51,7 +53,9 @@
|
|||||||
"invalidPath": "Die ausgewählte Datei ist keine gültige Git Bash ausführbare Datei (bash.exe).",
|
"invalidPath": "Die ausgewählte Datei ist keine gültige Git Bash ausführbare Datei (bash.exe).",
|
||||||
"title": "Git Bash ausführbare Datei auswählen"
|
"title": "Git Bash ausführbare Datei auswählen"
|
||||||
},
|
},
|
||||||
"success": "Git Bash erfolgreich erkannt!"
|
"placeholder": "[to be translated]:Select bash.exe path",
|
||||||
|
"success": "Git Bash erfolgreich erkannt!",
|
||||||
|
"tooltip": "[to be translated]:Git Bash is required to run agents on Windows. Install from git-scm.com if not available."
|
||||||
},
|
},
|
||||||
"input": {
|
"input": {
|
||||||
"placeholder": "Gib hier deine Nachricht ein, senden mit {{key}} – @ Pfad auswählen, / Befehl auswählen"
|
"placeholder": "Gib hier deine Nachricht ein, senden mit {{key}} – @ Pfad auswählen, / Befehl auswählen"
|
||||||
|
|||||||
@ -32,6 +32,7 @@
|
|||||||
},
|
},
|
||||||
"gitBash": {
|
"gitBash": {
|
||||||
"autoDetected": "Χρησιμοποιείται αυτόματα εντοπισμένο Git Bash",
|
"autoDetected": "Χρησιμοποιείται αυτόματα εντοπισμένο Git Bash",
|
||||||
|
"autoDiscoveredHint": "[to be translated]:Auto-discovered",
|
||||||
"clear": {
|
"clear": {
|
||||||
"button": "Διαγραφή προσαρμοσμένης διαδρομής"
|
"button": "Διαγραφή προσαρμοσμένης διαδρομής"
|
||||||
},
|
},
|
||||||
@ -39,6 +40,7 @@
|
|||||||
"error": {
|
"error": {
|
||||||
"description": "Το Git Bash απαιτείται για την εκτέλεση πρακτόρων στα Windows. Ο πράκτορας δεν μπορεί να λειτουργήσει χωρίς αυτό. Παρακαλούμε εγκαταστήστε το Git για Windows από",
|
"description": "Το Git Bash απαιτείται για την εκτέλεση πρακτόρων στα Windows. Ο πράκτορας δεν μπορεί να λειτουργήσει χωρίς αυτό. Παρακαλούμε εγκαταστήστε το Git για Windows από",
|
||||||
"recheck": "Επανέλεγχος Εγκατάστασης του Git Bash",
|
"recheck": "Επανέλεγχος Εγκατάστασης του Git Bash",
|
||||||
|
"required": "[to be translated]:Git Bash path is required on Windows",
|
||||||
"title": "Απαιτείται Git Bash"
|
"title": "Απαιτείται Git Bash"
|
||||||
},
|
},
|
||||||
"found": {
|
"found": {
|
||||||
@ -51,7 +53,9 @@
|
|||||||
"invalidPath": "Το επιλεγμένο αρχείο δεν είναι έγκυρο εκτελέσιμο Git Bash (bash.exe).",
|
"invalidPath": "Το επιλεγμένο αρχείο δεν είναι έγκυρο εκτελέσιμο Git Bash (bash.exe).",
|
||||||
"title": "Επιλογή εκτελέσιμου Git Bash"
|
"title": "Επιλογή εκτελέσιμου Git Bash"
|
||||||
},
|
},
|
||||||
"success": "Το Git Bash εντοπίστηκε με επιτυχία!"
|
"placeholder": "[to be translated]:Select bash.exe path",
|
||||||
|
"success": "Το Git Bash εντοπίστηκε με επιτυχία!",
|
||||||
|
"tooltip": "[to be translated]:Git Bash is required to run agents on Windows. Install from git-scm.com if not available."
|
||||||
},
|
},
|
||||||
"input": {
|
"input": {
|
||||||
"placeholder": "Εισάγετε το μήνυμά σας εδώ, στείλτε με {{key}} - @ επιλέξτε διαδρομή, / επιλέξτε εντολή"
|
"placeholder": "Εισάγετε το μήνυμά σας εδώ, στείλτε με {{key}} - @ επιλέξτε διαδρομή, / επιλέξτε εντολή"
|
||||||
|
|||||||
@ -32,6 +32,7 @@
|
|||||||
},
|
},
|
||||||
"gitBash": {
|
"gitBash": {
|
||||||
"autoDetected": "Usando Git Bash detectado automáticamente",
|
"autoDetected": "Usando Git Bash detectado automáticamente",
|
||||||
|
"autoDiscoveredHint": "[to be translated]:Auto-discovered",
|
||||||
"clear": {
|
"clear": {
|
||||||
"button": "Borrar ruta personalizada"
|
"button": "Borrar ruta personalizada"
|
||||||
},
|
},
|
||||||
@ -39,6 +40,7 @@
|
|||||||
"error": {
|
"error": {
|
||||||
"description": "Se requiere Git Bash para ejecutar agentes en Windows. El agente no puede funcionar sin él. Instale Git para Windows desde",
|
"description": "Se requiere Git Bash para ejecutar agentes en Windows. El agente no puede funcionar sin él. Instale Git para Windows desde",
|
||||||
"recheck": "Volver a verificar la instalación de Git Bash",
|
"recheck": "Volver a verificar la instalación de Git Bash",
|
||||||
|
"required": "[to be translated]:Git Bash path is required on Windows",
|
||||||
"title": "Git Bash Requerido"
|
"title": "Git Bash Requerido"
|
||||||
},
|
},
|
||||||
"found": {
|
"found": {
|
||||||
@ -51,7 +53,9 @@
|
|||||||
"invalidPath": "El archivo seleccionado no es un ejecutable válido de Git Bash (bash.exe).",
|
"invalidPath": "El archivo seleccionado no es un ejecutable válido de Git Bash (bash.exe).",
|
||||||
"title": "Seleccionar ejecutable de Git Bash"
|
"title": "Seleccionar ejecutable de Git Bash"
|
||||||
},
|
},
|
||||||
"success": "¡Git Bash detectado con éxito!"
|
"placeholder": "[to be translated]:Select bash.exe path",
|
||||||
|
"success": "¡Git Bash detectado con éxito!",
|
||||||
|
"tooltip": "[to be translated]:Git Bash is required to run agents on Windows. Install from git-scm.com if not available."
|
||||||
},
|
},
|
||||||
"input": {
|
"input": {
|
||||||
"placeholder": "Introduce tu mensaje aquí, envía con {{key}} - @ seleccionar ruta, / seleccionar comando"
|
"placeholder": "Introduce tu mensaje aquí, envía con {{key}} - @ seleccionar ruta, / seleccionar comando"
|
||||||
|
|||||||
@ -32,6 +32,7 @@
|
|||||||
},
|
},
|
||||||
"gitBash": {
|
"gitBash": {
|
||||||
"autoDetected": "Utilisation de Git Bash détecté automatiquement",
|
"autoDetected": "Utilisation de Git Bash détecté automatiquement",
|
||||||
|
"autoDiscoveredHint": "[to be translated]:Auto-discovered",
|
||||||
"clear": {
|
"clear": {
|
||||||
"button": "Effacer le chemin personnalisé"
|
"button": "Effacer le chemin personnalisé"
|
||||||
},
|
},
|
||||||
@ -39,6 +40,7 @@
|
|||||||
"error": {
|
"error": {
|
||||||
"description": "Git Bash est requis pour exécuter des agents sur Windows. L'agent ne peut pas fonctionner sans. Veuillez installer Git pour Windows depuis",
|
"description": "Git Bash est requis pour exécuter des agents sur Windows. L'agent ne peut pas fonctionner sans. Veuillez installer Git pour Windows depuis",
|
||||||
"recheck": "Revérifier l'installation de Git Bash",
|
"recheck": "Revérifier l'installation de Git Bash",
|
||||||
|
"required": "[to be translated]:Git Bash path is required on Windows",
|
||||||
"title": "Git Bash requis"
|
"title": "Git Bash requis"
|
||||||
},
|
},
|
||||||
"found": {
|
"found": {
|
||||||
@ -51,7 +53,9 @@
|
|||||||
"invalidPath": "Le fichier sélectionné n'est pas un exécutable Git Bash valide (bash.exe).",
|
"invalidPath": "Le fichier sélectionné n'est pas un exécutable Git Bash valide (bash.exe).",
|
||||||
"title": "Sélectionner l'exécutable Git Bash"
|
"title": "Sélectionner l'exécutable Git Bash"
|
||||||
},
|
},
|
||||||
"success": "Git Bash détecté avec succès !"
|
"placeholder": "[to be translated]:Select bash.exe path",
|
||||||
|
"success": "Git Bash détecté avec succès !",
|
||||||
|
"tooltip": "[to be translated]:Git Bash is required to run agents on Windows. Install from git-scm.com if not available."
|
||||||
},
|
},
|
||||||
"input": {
|
"input": {
|
||||||
"placeholder": "Entrez votre message ici, envoyez avec {{key}} - @ sélectionner le chemin, / sélectionner la commande"
|
"placeholder": "Entrez votre message ici, envoyez avec {{key}} - @ sélectionner le chemin, / sélectionner la commande"
|
||||||
|
|||||||
@ -32,6 +32,7 @@
|
|||||||
},
|
},
|
||||||
"gitBash": {
|
"gitBash": {
|
||||||
"autoDetected": "自動検出されたGit Bashを使用中",
|
"autoDetected": "自動検出されたGit Bashを使用中",
|
||||||
|
"autoDiscoveredHint": "[to be translated]:Auto-discovered",
|
||||||
"clear": {
|
"clear": {
|
||||||
"button": "カスタムパスをクリア"
|
"button": "カスタムパスをクリア"
|
||||||
},
|
},
|
||||||
@ -39,6 +40,7 @@
|
|||||||
"error": {
|
"error": {
|
||||||
"description": "Windowsでエージェントを実行するにはGit Bashが必要です。これがないとエージェントは動作しません。以下からGit for Windowsをインストールしてください。",
|
"description": "Windowsでエージェントを実行するにはGit Bashが必要です。これがないとエージェントは動作しません。以下からGit for Windowsをインストールしてください。",
|
||||||
"recheck": "Git Bashのインストールを再確認してください",
|
"recheck": "Git Bashのインストールを再確認してください",
|
||||||
|
"required": "[to be translated]:Git Bash path is required on Windows",
|
||||||
"title": "Git Bashが必要です"
|
"title": "Git Bashが必要です"
|
||||||
},
|
},
|
||||||
"found": {
|
"found": {
|
||||||
@ -51,7 +53,9 @@
|
|||||||
"invalidPath": "選択されたファイルは有効なGit Bash実行ファイル(bash.exe)ではありません。",
|
"invalidPath": "選択されたファイルは有効なGit Bash実行ファイル(bash.exe)ではありません。",
|
||||||
"title": "Git Bash実行ファイルを選択"
|
"title": "Git Bash実行ファイルを選択"
|
||||||
},
|
},
|
||||||
"success": "Git Bashが正常に検出されました!"
|
"placeholder": "[to be translated]:Select bash.exe path",
|
||||||
|
"success": "Git Bashが正常に検出されました!",
|
||||||
|
"tooltip": "[to be translated]:Git Bash is required to run agents on Windows. Install from git-scm.com if not available."
|
||||||
},
|
},
|
||||||
"input": {
|
"input": {
|
||||||
"placeholder": "メッセージをここに入力し、{{key}}で送信 - @でパスを選択、/でコマンドを選択"
|
"placeholder": "メッセージをここに入力し、{{key}}で送信 - @でパスを選択、/でコマンドを選択"
|
||||||
|
|||||||
@ -32,6 +32,7 @@
|
|||||||
},
|
},
|
||||||
"gitBash": {
|
"gitBash": {
|
||||||
"autoDetected": "Usando Git Bash detectado automaticamente",
|
"autoDetected": "Usando Git Bash detectado automaticamente",
|
||||||
|
"autoDiscoveredHint": "[to be translated]:Auto-discovered",
|
||||||
"clear": {
|
"clear": {
|
||||||
"button": "Limpar caminho personalizado"
|
"button": "Limpar caminho personalizado"
|
||||||
},
|
},
|
||||||
@ -39,6 +40,7 @@
|
|||||||
"error": {
|
"error": {
|
||||||
"description": "O Git Bash é necessário para executar agentes no Windows. O agente não pode funcionar sem ele. Por favor, instale o Git para Windows a partir de",
|
"description": "O Git Bash é necessário para executar agentes no Windows. O agente não pode funcionar sem ele. Por favor, instale o Git para Windows a partir de",
|
||||||
"recheck": "Reverificar a Instalação do Git Bash",
|
"recheck": "Reverificar a Instalação do Git Bash",
|
||||||
|
"required": "[to be translated]:Git Bash path is required on Windows",
|
||||||
"title": "Git Bash Necessário"
|
"title": "Git Bash Necessário"
|
||||||
},
|
},
|
||||||
"found": {
|
"found": {
|
||||||
@ -51,7 +53,9 @@
|
|||||||
"invalidPath": "O arquivo selecionado não é um executável válido do Git Bash (bash.exe).",
|
"invalidPath": "O arquivo selecionado não é um executável válido do Git Bash (bash.exe).",
|
||||||
"title": "Selecionar executável do Git Bash"
|
"title": "Selecionar executável do Git Bash"
|
||||||
},
|
},
|
||||||
"success": "Git Bash detectado com sucesso!"
|
"placeholder": "[to be translated]:Select bash.exe path",
|
||||||
|
"success": "Git Bash detectado com sucesso!",
|
||||||
|
"tooltip": "[to be translated]:Git Bash is required to run agents on Windows. Install from git-scm.com if not available."
|
||||||
},
|
},
|
||||||
"input": {
|
"input": {
|
||||||
"placeholder": "Digite sua mensagem aqui, envie com {{key}} - @ selecionar caminho, / selecionar comando"
|
"placeholder": "Digite sua mensagem aqui, envie com {{key}} - @ selecionar caminho, / selecionar comando"
|
||||||
|
|||||||
@ -32,6 +32,7 @@
|
|||||||
},
|
},
|
||||||
"gitBash": {
|
"gitBash": {
|
||||||
"autoDetected": "Используется автоматически обнаруженный Git Bash",
|
"autoDetected": "Используется автоматически обнаруженный Git Bash",
|
||||||
|
"autoDiscoveredHint": "[to be translated]:Auto-discovered",
|
||||||
"clear": {
|
"clear": {
|
||||||
"button": "Очистить пользовательский путь"
|
"button": "Очистить пользовательский путь"
|
||||||
},
|
},
|
||||||
@ -39,6 +40,7 @@
|
|||||||
"error": {
|
"error": {
|
||||||
"description": "Для запуска агентов в Windows требуется Git Bash. Без него агент не может работать. Пожалуйста, установите Git для Windows с",
|
"description": "Для запуска агентов в Windows требуется Git Bash. Без него агент не может работать. Пожалуйста, установите Git для Windows с",
|
||||||
"recheck": "Повторная проверка установки Git Bash",
|
"recheck": "Повторная проверка установки Git Bash",
|
||||||
|
"required": "[to be translated]:Git Bash path is required on Windows",
|
||||||
"title": "Требуется Git Bash"
|
"title": "Требуется Git Bash"
|
||||||
},
|
},
|
||||||
"found": {
|
"found": {
|
||||||
@ -51,7 +53,9 @@
|
|||||||
"invalidPath": "Выбранный файл не является допустимым исполняемым файлом Git Bash (bash.exe).",
|
"invalidPath": "Выбранный файл не является допустимым исполняемым файлом Git Bash (bash.exe).",
|
||||||
"title": "Выберите исполняемый файл Git Bash"
|
"title": "Выберите исполняемый файл Git Bash"
|
||||||
},
|
},
|
||||||
"success": "Git Bash успешно обнаружен!"
|
"placeholder": "[to be translated]:Select bash.exe path",
|
||||||
|
"success": "Git Bash успешно обнаружен!",
|
||||||
|
"tooltip": "[to be translated]:Git Bash is required to run agents on Windows. Install from git-scm.com if not available."
|
||||||
},
|
},
|
||||||
"input": {
|
"input": {
|
||||||
"placeholder": "Введите ваше сообщение здесь, отправьте с помощью {{key}} — @ выбрать путь, / выбрать команду"
|
"placeholder": "Введите ваше сообщение здесь, отправьте с помощью {{key}} — @ выбрать путь, / выбрать команду"
|
||||||
|
|||||||
@ -61,9 +61,14 @@ const BuiltinMCPServerList: FC = () => {
|
|||||||
{getMcpTypeLabel(server.type ?? 'stdio')}
|
{getMcpTypeLabel(server.type ?? 'stdio')}
|
||||||
</Tag>
|
</Tag>
|
||||||
{server?.shouldConfig && (
|
{server?.shouldConfig && (
|
||||||
<Tag color="warning" style={{ borderRadius: 20, margin: 0, fontWeight: 500 }}>
|
<a
|
||||||
{t('settings.mcp.requiresConfig')}
|
href="https://docs.cherry-ai.com/advanced-basic/mcp/buildin"
|
||||||
</Tag>
|
target="_blank"
|
||||||
|
rel="noopener noreferrer">
|
||||||
|
<Tag color="warning" style={{ borderRadius: 20, margin: 0, fontWeight: 500 }}>
|
||||||
|
{t('settings.mcp.requiresConfig')}
|
||||||
|
</Tag>
|
||||||
|
</a>
|
||||||
)}
|
)}
|
||||||
</ServerFooter>
|
</ServerFooter>
|
||||||
</ServerCard>
|
</ServerCard>
|
||||||
|
|||||||
@ -34,6 +34,10 @@ import {
|
|||||||
getProviderByModel,
|
getProviderByModel,
|
||||||
getQuickModel
|
getQuickModel
|
||||||
} from './AssistantService'
|
} from './AssistantService'
|
||||||
|
import { ConversationService } from './ConversationService'
|
||||||
|
import { injectUserMessageWithKnowledgeSearchPrompt } from './KnowledgeService'
|
||||||
|
import type { BlockManager } from './messageStreaming'
|
||||||
|
import type { StreamProcessorCallbacks } from './StreamProcessingService'
|
||||||
// import { processKnowledgeSearch } from './KnowledgeService'
|
// import { processKnowledgeSearch } from './KnowledgeService'
|
||||||
// import {
|
// import {
|
||||||
// filterContextMessages,
|
// filterContextMessages,
|
||||||
@ -79,6 +83,59 @@ export async function fetchMcpTools(assistant: Assistant) {
|
|||||||
return mcpTools
|
return mcpTools
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 将用户消息转换为LLM可以理解的格式并发送请求
|
||||||
|
* @param request - 包含消息内容和助手信息的请求对象
|
||||||
|
* @param onChunkReceived - 接收流式响应数据的回调函数
|
||||||
|
*/
|
||||||
|
// 目前先按照函数来写,后续如果有需要到class的地方就改回来
|
||||||
|
export async function transformMessagesAndFetch(
|
||||||
|
request: {
|
||||||
|
messages: Message[]
|
||||||
|
assistant: Assistant
|
||||||
|
blockManager: BlockManager
|
||||||
|
assistantMsgId: string
|
||||||
|
callbacks: StreamProcessorCallbacks
|
||||||
|
topicId?: string // 添加 topicId 用于 trace
|
||||||
|
options: {
|
||||||
|
signal?: AbortSignal
|
||||||
|
timeout?: number
|
||||||
|
headers?: Record<string, string>
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onChunkReceived: (chunk: Chunk) => void
|
||||||
|
) {
|
||||||
|
const { messages, assistant } = request
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { modelMessages, uiMessages } = await ConversationService.prepareMessagesForModel(messages, assistant)
|
||||||
|
|
||||||
|
// replace prompt variables
|
||||||
|
assistant.prompt = await replacePromptVariables(assistant.prompt, assistant.model?.name)
|
||||||
|
|
||||||
|
// inject knowledge search prompt into model messages
|
||||||
|
await injectUserMessageWithKnowledgeSearchPrompt({
|
||||||
|
modelMessages,
|
||||||
|
assistant,
|
||||||
|
assistantMsgId: request.assistantMsgId,
|
||||||
|
topicId: request.topicId,
|
||||||
|
blockManager: request.blockManager,
|
||||||
|
setCitationBlockId: request.callbacks.setCitationBlockId!
|
||||||
|
})
|
||||||
|
|
||||||
|
await fetchChatCompletion({
|
||||||
|
messages: modelMessages,
|
||||||
|
assistant: assistant,
|
||||||
|
topicId: request.topicId,
|
||||||
|
requestOptions: request.options,
|
||||||
|
uiMessages,
|
||||||
|
onChunkReceived
|
||||||
|
})
|
||||||
|
} catch (error: any) {
|
||||||
|
onChunkReceived({ type: ChunkType.ERROR, error })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export async function fetchChatCompletion({
|
export async function fetchChatCompletion({
|
||||||
messages,
|
messages,
|
||||||
prompt,
|
prompt,
|
||||||
|
|||||||
@ -2,10 +2,13 @@ import { loggerService } from '@logger'
|
|||||||
import type { Span } from '@opentelemetry/api'
|
import type { Span } from '@opentelemetry/api'
|
||||||
import { ModernAiProvider } from '@renderer/aiCore'
|
import { ModernAiProvider } from '@renderer/aiCore'
|
||||||
import AiProvider from '@renderer/aiCore/legacy'
|
import AiProvider from '@renderer/aiCore/legacy'
|
||||||
|
import { getMessageContent } from '@renderer/aiCore/plugins/searchOrchestrationPlugin'
|
||||||
import { DEFAULT_KNOWLEDGE_DOCUMENT_COUNT, DEFAULT_KNOWLEDGE_THRESHOLD } from '@renderer/config/constant'
|
import { DEFAULT_KNOWLEDGE_DOCUMENT_COUNT, DEFAULT_KNOWLEDGE_THRESHOLD } from '@renderer/config/constant'
|
||||||
import { getEmbeddingMaxContext } from '@renderer/config/embedings'
|
import { getEmbeddingMaxContext } from '@renderer/config/embedings'
|
||||||
|
import { REFERENCE_PROMPT } from '@renderer/config/prompts'
|
||||||
import { addSpan, endSpan } from '@renderer/services/SpanManagerService'
|
import { addSpan, endSpan } from '@renderer/services/SpanManagerService'
|
||||||
import store from '@renderer/store'
|
import store from '@renderer/store'
|
||||||
|
import type { Assistant } from '@renderer/types'
|
||||||
import {
|
import {
|
||||||
type FileMetadata,
|
type FileMetadata,
|
||||||
type KnowledgeBase,
|
type KnowledgeBase,
|
||||||
@ -16,13 +19,17 @@ import {
|
|||||||
} from '@renderer/types'
|
} from '@renderer/types'
|
||||||
import type { Chunk } from '@renderer/types/chunk'
|
import type { Chunk } from '@renderer/types/chunk'
|
||||||
import { ChunkType } from '@renderer/types/chunk'
|
import { ChunkType } from '@renderer/types/chunk'
|
||||||
|
import { MessageBlockStatus, MessageBlockType } from '@renderer/types/newMessage'
|
||||||
import { routeToEndpoint } from '@renderer/utils'
|
import { routeToEndpoint } from '@renderer/utils'
|
||||||
import type { ExtractResults } from '@renderer/utils/extract'
|
import type { ExtractResults } from '@renderer/utils/extract'
|
||||||
|
import { createCitationBlock } from '@renderer/utils/messageUtils/create'
|
||||||
import { isAzureOpenAIProvider, isGeminiProvider } from '@renderer/utils/provider'
|
import { isAzureOpenAIProvider, isGeminiProvider } from '@renderer/utils/provider'
|
||||||
|
import type { ModelMessage, UserModelMessage } from 'ai'
|
||||||
import { isEmpty } from 'lodash'
|
import { isEmpty } from 'lodash'
|
||||||
|
|
||||||
import { getProviderByModel } from './AssistantService'
|
import { getProviderByModel } from './AssistantService'
|
||||||
import FileManager from './FileManager'
|
import FileManager from './FileManager'
|
||||||
|
import type { BlockManager } from './messageStreaming'
|
||||||
|
|
||||||
const logger = loggerService.withContext('RendererKnowledgeService')
|
const logger = loggerService.withContext('RendererKnowledgeService')
|
||||||
|
|
||||||
@ -338,3 +345,128 @@ export function processKnowledgeReferences(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const injectUserMessageWithKnowledgeSearchPrompt = async ({
|
||||||
|
modelMessages,
|
||||||
|
assistant,
|
||||||
|
assistantMsgId,
|
||||||
|
topicId,
|
||||||
|
blockManager,
|
||||||
|
setCitationBlockId
|
||||||
|
}: {
|
||||||
|
modelMessages: ModelMessage[]
|
||||||
|
assistant: Assistant
|
||||||
|
assistantMsgId: string
|
||||||
|
topicId?: string
|
||||||
|
blockManager: BlockManager
|
||||||
|
setCitationBlockId: (blockId: string) => void
|
||||||
|
}) => {
|
||||||
|
if (assistant.knowledge_bases?.length && modelMessages.length > 0) {
|
||||||
|
const lastUserMessage = modelMessages[modelMessages.length - 1]
|
||||||
|
const isUserMessage = lastUserMessage.role === 'user'
|
||||||
|
|
||||||
|
if (!isUserMessage) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const knowledgeReferences = await getKnowledgeReferences({
|
||||||
|
assistant,
|
||||||
|
lastUserMessage,
|
||||||
|
topicId: topicId
|
||||||
|
})
|
||||||
|
|
||||||
|
if (knowledgeReferences.length === 0) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
await createKnowledgeReferencesBlock({
|
||||||
|
assistantMsgId,
|
||||||
|
knowledgeReferences,
|
||||||
|
blockManager,
|
||||||
|
setCitationBlockId
|
||||||
|
})
|
||||||
|
|
||||||
|
const question = getMessageContent(lastUserMessage) || ''
|
||||||
|
const references = JSON.stringify(knowledgeReferences, null, 2)
|
||||||
|
|
||||||
|
const knowledgeSearchPrompt = REFERENCE_PROMPT.replace('{question}', question).replace('{references}', references)
|
||||||
|
|
||||||
|
if (typeof lastUserMessage.content === 'string') {
|
||||||
|
lastUserMessage.content = knowledgeSearchPrompt
|
||||||
|
} else if (Array.isArray(lastUserMessage.content)) {
|
||||||
|
const textPart = lastUserMessage.content.find((part) => part.type === 'text')
|
||||||
|
if (textPart) {
|
||||||
|
textPart.text = knowledgeSearchPrompt
|
||||||
|
} else {
|
||||||
|
lastUserMessage.content.push({
|
||||||
|
type: 'text',
|
||||||
|
text: knowledgeSearchPrompt
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getKnowledgeReferences = async ({
|
||||||
|
assistant,
|
||||||
|
lastUserMessage,
|
||||||
|
topicId
|
||||||
|
}: {
|
||||||
|
assistant: Assistant
|
||||||
|
lastUserMessage: UserModelMessage
|
||||||
|
topicId?: string
|
||||||
|
}) => {
|
||||||
|
// 如果助手没有知识库,返回空字符串
|
||||||
|
if (!assistant || isEmpty(assistant.knowledge_bases)) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
// 获取知识库ID
|
||||||
|
const knowledgeBaseIds = assistant.knowledge_bases?.map((base) => base.id)
|
||||||
|
|
||||||
|
// 获取用户消息内容
|
||||||
|
const question = getMessageContent(lastUserMessage) || ''
|
||||||
|
|
||||||
|
// 获取知识库引用
|
||||||
|
const knowledgeReferences = await processKnowledgeSearch(
|
||||||
|
{
|
||||||
|
knowledge: {
|
||||||
|
question: [question],
|
||||||
|
rewrite: ''
|
||||||
|
}
|
||||||
|
},
|
||||||
|
knowledgeBaseIds,
|
||||||
|
topicId!
|
||||||
|
)
|
||||||
|
|
||||||
|
// 返回提示词
|
||||||
|
return knowledgeReferences
|
||||||
|
}
|
||||||
|
|
||||||
|
export const createKnowledgeReferencesBlock = async ({
|
||||||
|
assistantMsgId,
|
||||||
|
knowledgeReferences,
|
||||||
|
blockManager,
|
||||||
|
setCitationBlockId
|
||||||
|
}: {
|
||||||
|
assistantMsgId: string
|
||||||
|
knowledgeReferences: KnowledgeReference[]
|
||||||
|
blockManager: BlockManager
|
||||||
|
setCitationBlockId: (blockId: string) => void
|
||||||
|
}) => {
|
||||||
|
// 创建引用块
|
||||||
|
const citationBlock = createCitationBlock(
|
||||||
|
assistantMsgId,
|
||||||
|
{ knowledge: knowledgeReferences },
|
||||||
|
{ status: MessageBlockStatus.SUCCESS }
|
||||||
|
)
|
||||||
|
|
||||||
|
// 处理引用块
|
||||||
|
blockManager.handleBlockTransition(citationBlock, MessageBlockType.CITATION)
|
||||||
|
|
||||||
|
// 设置引用块ID
|
||||||
|
setCitationBlockId(citationBlock.id)
|
||||||
|
|
||||||
|
// 返回引用块
|
||||||
|
return citationBlock
|
||||||
|
}
|
||||||
|
|||||||
@ -1,91 +0,0 @@
|
|||||||
import type { Assistant, Message } from '@renderer/types'
|
|
||||||
import type { Chunk } from '@renderer/types/chunk'
|
|
||||||
import { ChunkType } from '@renderer/types/chunk'
|
|
||||||
import { replacePromptVariables } from '@renderer/utils/prompt'
|
|
||||||
|
|
||||||
import { fetchChatCompletion } from './ApiService'
|
|
||||||
import { ConversationService } from './ConversationService'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The request object for handling a user message.
|
|
||||||
*/
|
|
||||||
export interface OrchestrationRequest {
|
|
||||||
messages: Message[]
|
|
||||||
assistant: Assistant
|
|
||||||
options: {
|
|
||||||
signal?: AbortSignal
|
|
||||||
timeout?: number
|
|
||||||
headers?: Record<string, string>
|
|
||||||
}
|
|
||||||
topicId?: string // 添加 topicId 用于 trace
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The OrchestrationService is responsible for orchestrating the different services
|
|
||||||
* to handle a user's message. It contains the core logic of the application.
|
|
||||||
*/
|
|
||||||
// NOTE:暂时没有用到这个类
|
|
||||||
export class OrchestrationService {
|
|
||||||
constructor() {
|
|
||||||
// In the future, this could be a singleton, but for now, a new instance is fine.
|
|
||||||
// this.conversationService = new ConversationService()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This is the core method to handle user messages.
|
|
||||||
* It takes the message context and an events object for callbacks,
|
|
||||||
* and orchestrates the call to the LLM.
|
|
||||||
* The logic is moved from `messageThunk.ts`.
|
|
||||||
* @param request The orchestration request containing messages and assistant info.
|
|
||||||
* @param events A set of callbacks to report progress and results to the UI layer.
|
|
||||||
*/
|
|
||||||
async transformMessagesAndFetch(request: OrchestrationRequest, onChunkReceived: (chunk: Chunk) => void) {
|
|
||||||
const { messages, assistant } = request
|
|
||||||
|
|
||||||
try {
|
|
||||||
const { modelMessages, uiMessages } = await ConversationService.prepareMessagesForModel(messages, assistant)
|
|
||||||
|
|
||||||
await fetchChatCompletion({
|
|
||||||
messages: modelMessages,
|
|
||||||
assistant: assistant,
|
|
||||||
requestOptions: request.options,
|
|
||||||
onChunkReceived,
|
|
||||||
topicId: request.topicId,
|
|
||||||
uiMessages: uiMessages
|
|
||||||
})
|
|
||||||
} catch (error: any) {
|
|
||||||
onChunkReceived({ type: ChunkType.ERROR, error })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 将用户消息转换为LLM可以理解的格式并发送请求
|
|
||||||
* @param request - 包含消息内容和助手信息的请求对象
|
|
||||||
* @param onChunkReceived - 接收流式响应数据的回调函数
|
|
||||||
*/
|
|
||||||
// 目前先按照函数来写,后续如果有需要到class的地方就改回来
|
|
||||||
export async function transformMessagesAndFetch(
|
|
||||||
request: OrchestrationRequest,
|
|
||||||
onChunkReceived: (chunk: Chunk) => void
|
|
||||||
) {
|
|
||||||
const { messages, assistant } = request
|
|
||||||
|
|
||||||
try {
|
|
||||||
const { modelMessages, uiMessages } = await ConversationService.prepareMessagesForModel(messages, assistant)
|
|
||||||
|
|
||||||
// replace prompt variables
|
|
||||||
assistant.prompt = await replacePromptVariables(assistant.prompt, assistant.model?.name)
|
|
||||||
|
|
||||||
await fetchChatCompletion({
|
|
||||||
messages: modelMessages,
|
|
||||||
assistant: assistant,
|
|
||||||
requestOptions: request.options,
|
|
||||||
onChunkReceived,
|
|
||||||
topicId: request.topicId,
|
|
||||||
uiMessages
|
|
||||||
})
|
|
||||||
} catch (error: any) {
|
|
||||||
onChunkReceived({ type: ChunkType.ERROR, error })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -34,6 +34,10 @@ export interface StreamProcessorCallbacks {
|
|||||||
onLLMWebSearchInProgress?: () => void
|
onLLMWebSearchInProgress?: () => void
|
||||||
// LLM Web search complete
|
// LLM Web search complete
|
||||||
onLLMWebSearchComplete?: (llmWebSearchResult: WebSearchResponse) => void
|
onLLMWebSearchComplete?: (llmWebSearchResult: WebSearchResponse) => void
|
||||||
|
// Get citation block ID
|
||||||
|
getCitationBlockId?: () => string | null
|
||||||
|
// Set citation block ID
|
||||||
|
setCitationBlockId?: (blockId: string) => void
|
||||||
// Image generation chunk received
|
// Image generation chunk received
|
||||||
onImageCreated?: () => void
|
onImageCreated?: () => void
|
||||||
onImageDelta?: (imageData: GenerateImageResponse) => void
|
onImageDelta?: (imageData: GenerateImageResponse) => void
|
||||||
|
|||||||
@ -121,6 +121,11 @@ export const createCitationCallbacks = (deps: CitationCallbacksDependencies) =>
|
|||||||
},
|
},
|
||||||
|
|
||||||
// 暴露给外部的方法,用于textCallbacks中获取citationBlockId
|
// 暴露给外部的方法,用于textCallbacks中获取citationBlockId
|
||||||
getCitationBlockId: () => citationBlockId
|
getCitationBlockId: () => citationBlockId,
|
||||||
|
|
||||||
|
// 暴露给外部的方法,用于 KnowledgeService 中设置 citationBlockId
|
||||||
|
setCitationBlockId: (blockId: string) => {
|
||||||
|
citationBlockId = blockId
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -2,12 +2,11 @@ import { loggerService } from '@logger'
|
|||||||
import { AiSdkToChunkAdapter } from '@renderer/aiCore/chunk/AiSdkToChunkAdapter'
|
import { AiSdkToChunkAdapter } from '@renderer/aiCore/chunk/AiSdkToChunkAdapter'
|
||||||
import { AgentApiClient } from '@renderer/api/agent'
|
import { AgentApiClient } from '@renderer/api/agent'
|
||||||
import db from '@renderer/databases'
|
import db from '@renderer/databases'
|
||||||
import { fetchMessagesSummary } from '@renderer/services/ApiService'
|
import { fetchMessagesSummary, transformMessagesAndFetch } from '@renderer/services/ApiService'
|
||||||
import { DbService } from '@renderer/services/db/DbService'
|
import { DbService } from '@renderer/services/db/DbService'
|
||||||
import FileManager from '@renderer/services/FileManager'
|
import FileManager from '@renderer/services/FileManager'
|
||||||
import { BlockManager } from '@renderer/services/messageStreaming/BlockManager'
|
import { BlockManager } from '@renderer/services/messageStreaming/BlockManager'
|
||||||
import { createCallbacks } from '@renderer/services/messageStreaming/callbacks'
|
import { createCallbacks } from '@renderer/services/messageStreaming/callbacks'
|
||||||
import { transformMessagesAndFetch } from '@renderer/services/OrchestrateService'
|
|
||||||
import { endSpan } from '@renderer/services/SpanManagerService'
|
import { endSpan } from '@renderer/services/SpanManagerService'
|
||||||
import { createStreamProcessor, type StreamProcessorCallbacks } from '@renderer/services/StreamProcessingService'
|
import { createStreamProcessor, type StreamProcessorCallbacks } from '@renderer/services/StreamProcessingService'
|
||||||
import store from '@renderer/store'
|
import store from '@renderer/store'
|
||||||
@ -814,6 +813,9 @@ const fetchAndProcessAssistantResponseImpl = async (
|
|||||||
messages: messagesForContext,
|
messages: messagesForContext,
|
||||||
assistant,
|
assistant,
|
||||||
topicId,
|
topicId,
|
||||||
|
blockManager,
|
||||||
|
assistantMsgId,
|
||||||
|
callbacks,
|
||||||
options: {
|
options: {
|
||||||
signal: abortController.signal,
|
signal: abortController.signal,
|
||||||
timeout: 30000,
|
timeout: 30000,
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user