mirror of
https://github.com/CherryHQ/cherry-studio.git
synced 2025-12-24 10:40:07 +08:00
Merge branch 'main' of github.com:CherryHQ/cherry-studio into v2
This commit is contained in:
commit
dee397f6ac
2
.github/workflows/auto-i18n.yml
vendored
2
.github/workflows/auto-i18n.yml
vendored
@ -26,7 +26,7 @@ jobs:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
|
||||
- name: 📦 Setting Node.js
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
|
||||
2
.github/workflows/claude-code-review.yml
vendored
2
.github/workflows/claude-code-review.yml
vendored
@ -27,7 +27,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
|
||||
2
.github/workflows/claude-translator.yml
vendored
2
.github/workflows/claude-translator.yml
vendored
@ -29,7 +29,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
|
||||
2
.github/workflows/claude.yml
vendored
2
.github/workflows/claude.yml
vendored
@ -37,7 +37,7 @@ jobs:
|
||||
actions: read # Required for Claude to read CI results on PRs
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
|
||||
2
.github/workflows/delete-branch.yml
vendored
2
.github/workflows/delete-branch.yml
vendored
@ -12,7 +12,7 @@ jobs:
|
||||
if: github.event.pull_request.merged == true && github.event.pull_request.head.repo.full_name == github.repository
|
||||
steps:
|
||||
- name: Delete merged branch
|
||||
uses: actions/github-script@v7
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
github.rest.git.deleteRef({
|
||||
|
||||
2
.github/workflows/nightly-build.yml
vendored
2
.github/workflows/nightly-build.yml
vendored
@ -56,7 +56,7 @@ jobs:
|
||||
ref: main
|
||||
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
|
||||
2
.github/workflows/pr-ci.yml
vendored
2
.github/workflows/pr-ci.yml
vendored
@ -24,7 +24,7 @@ jobs:
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
|
||||
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
@ -47,7 +47,7 @@ jobs:
|
||||
npm version "$VERSION" --no-git-tag-version --allow-same-version
|
||||
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
|
||||
12
package.json
12
package.json
@ -99,10 +99,10 @@
|
||||
"@agentic/exa": "^7.3.3",
|
||||
"@agentic/searxng": "^7.3.3",
|
||||
"@agentic/tavily": "^7.3.3",
|
||||
"@ai-sdk/amazon-bedrock": "^3.0.21",
|
||||
"@ai-sdk/google-vertex": "^3.0.27",
|
||||
"@ai-sdk/mistral": "^2.0.14",
|
||||
"@ai-sdk/perplexity": "^2.0.9",
|
||||
"@ai-sdk/amazon-bedrock": "^3.0.29",
|
||||
"@ai-sdk/google-vertex": "^3.0.33",
|
||||
"@ai-sdk/mistral": "^2.0.17",
|
||||
"@ai-sdk/perplexity": "^2.0.11",
|
||||
"@ant-design/v5-patch-for-react-19": "^1.0.3",
|
||||
"@anthropic-ai/sdk": "^0.41.0",
|
||||
"@anthropic-ai/vertex-sdk": "patch:@anthropic-ai/vertex-sdk@npm%3A0.11.4#~/.yarn/patches/@anthropic-ai-vertex-sdk-npm-0.11.4-c19cb41edb.patch",
|
||||
@ -217,7 +217,7 @@
|
||||
"@viz-js/lang-dot": "^1.0.5",
|
||||
"@viz-js/viz": "^3.14.0",
|
||||
"@xyflow/react": "^12.4.4",
|
||||
"ai": "^5.0.44",
|
||||
"ai": "^5.0.59",
|
||||
"antd": "patch:antd@npm%3A5.27.0#~/.yarn/patches/antd-npm-5.27.0-aa91c36546.patch",
|
||||
"archiver": "^7.0.1",
|
||||
"async-mutex": "^0.5.0",
|
||||
@ -242,7 +242,7 @@
|
||||
"dotenv-cli": "^7.4.2",
|
||||
"drizzle-kit": "^0.31.4",
|
||||
"drizzle-orm": "^0.44.2",
|
||||
"electron": "37.4.0",
|
||||
"electron": "37.6.0",
|
||||
"electron-builder": "26.0.15",
|
||||
"electron-devtools-installer": "^3.2.0",
|
||||
"electron-store": "^8.2.0",
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@cherrystudio/ai-core",
|
||||
"version": "1.0.0-alpha.18",
|
||||
"version": "1.0.1",
|
||||
"description": "Cherry Studio AI Core - Unified AI Provider Interface Based on Vercel AI SDK",
|
||||
"main": "dist/index.js",
|
||||
"module": "dist/index.mjs",
|
||||
@ -36,14 +36,14 @@
|
||||
"ai": "^5.0.26"
|
||||
},
|
||||
"dependencies": {
|
||||
"@ai-sdk/anthropic": "^2.0.17",
|
||||
"@ai-sdk/azure": "^2.0.30",
|
||||
"@ai-sdk/deepseek": "^1.0.17",
|
||||
"@ai-sdk/openai": "^2.0.30",
|
||||
"@ai-sdk/openai-compatible": "^1.0.17",
|
||||
"@ai-sdk/anthropic": "^2.0.22",
|
||||
"@ai-sdk/azure": "^2.0.42",
|
||||
"@ai-sdk/deepseek": "^1.0.20",
|
||||
"@ai-sdk/openai": "^2.0.42",
|
||||
"@ai-sdk/openai-compatible": "^1.0.19",
|
||||
"@ai-sdk/provider": "^2.0.0",
|
||||
"@ai-sdk/provider-utils": "^3.0.9",
|
||||
"@ai-sdk/xai": "^2.0.18",
|
||||
"@ai-sdk/provider-utils": "^3.0.10",
|
||||
"@ai-sdk/xai": "^2.0.23",
|
||||
"zod": "^4.1.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@ -34,6 +34,7 @@ export enum IpcChannel {
|
||||
App_GetBinaryPath = 'app:get-binary-path',
|
||||
App_InstallUvBinary = 'app:install-uv-binary',
|
||||
App_InstallBunBinary = 'app:install-bun-binary',
|
||||
App_InstallOvmsBinary = 'app:install-ovms-binary',
|
||||
App_LogToMain = 'app:log-to-main',
|
||||
App_SaveData = 'app:save-data',
|
||||
App_GetDiskInfo = 'app:get-disk-info',
|
||||
@ -236,6 +237,7 @@ export enum IpcChannel {
|
||||
// system
|
||||
System_GetDeviceType = 'system:getDeviceType',
|
||||
System_GetHostname = 'system:getHostname',
|
||||
System_GetCpuName = 'system:getCpuName',
|
||||
|
||||
// DevTools
|
||||
System_ToggleDevTools = 'system:toggleDevTools',
|
||||
@ -362,6 +364,15 @@ export enum IpcChannel {
|
||||
// OCR
|
||||
OCR_ocr = 'ocr:ocr',
|
||||
|
||||
// OVMS
|
||||
Ovms_AddModel = 'ovms:add-model',
|
||||
Ovms_StopAddModel = 'ovms:stop-addmodel',
|
||||
Ovms_GetModels = 'ovms:get-models',
|
||||
Ovms_IsRunning = 'ovms:is-running',
|
||||
Ovms_GetStatus = 'ovms:get-status',
|
||||
Ovms_RunOVMS = 'ovms:run-ovms',
|
||||
Ovms_StopOVMS = 'ovms:stop-ovms',
|
||||
|
||||
// CherryAI
|
||||
Cherryai_GetSignature = 'cherryai:get-signature'
|
||||
}
|
||||
|
||||
@ -217,7 +217,8 @@ export enum codeTools {
|
||||
claudeCode = 'claude-code',
|
||||
geminiCli = 'gemini-cli',
|
||||
openaiCodex = 'openai-codex',
|
||||
iFlowCli = 'iflow-cli'
|
||||
iFlowCli = 'iflow-cli',
|
||||
githubCopilotCli = 'github-copilot-cli'
|
||||
}
|
||||
|
||||
export enum terminalApps {
|
||||
|
||||
@ -1,5 +1,7 @@
|
||||
const https = require('https')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const { execSync } = require('child_process')
|
||||
|
||||
/**
|
||||
* Downloads a file from a URL with redirect handling
|
||||
@ -32,4 +34,39 @@ async function downloadWithRedirects(url, destinationPath) {
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = { downloadWithRedirects }
|
||||
/**
|
||||
* Downloads a file using PowerShell Invoke-WebRequest command
|
||||
* @param {string} url The URL to download from
|
||||
* @param {string} destinationPath The path to save the file to
|
||||
* @returns {Promise<boolean>} Promise that resolves to true if download succeeds
|
||||
*/
|
||||
async function downloadWithPowerShell(url, destinationPath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
try {
|
||||
// Only support windows platform for PowerShell download
|
||||
if (process.platform !== 'win32') {
|
||||
return reject(new Error('PowerShell download is only supported on Windows'))
|
||||
}
|
||||
|
||||
const outputDir = path.dirname(destinationPath)
|
||||
fs.mkdirSync(outputDir, { recursive: true })
|
||||
|
||||
// PowerShell command to download the file with progress disabled for faster download
|
||||
const psCommand = `powershell -Command "$ProgressPreference = 'SilentlyContinue'; Invoke-WebRequest '${url}' -OutFile '${destinationPath}'"`
|
||||
|
||||
console.log(`Downloading with PowerShell: ${url}`)
|
||||
execSync(psCommand, { stdio: 'inherit' })
|
||||
|
||||
if (fs.existsSync(destinationPath)) {
|
||||
console.log(`Download completed: ${destinationPath}`)
|
||||
resolve(true)
|
||||
} else {
|
||||
reject(new Error('Download failed: File not found after download'))
|
||||
}
|
||||
} catch (error) {
|
||||
reject(new Error(`PowerShell download failed: ${error.message}`))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = { downloadWithRedirects, downloadWithPowerShell }
|
||||
|
||||
177
resources/scripts/install-ovms.js
Normal file
177
resources/scripts/install-ovms.js
Normal file
@ -0,0 +1,177 @@
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const os = require('os')
|
||||
const { execSync } = require('child_process')
|
||||
const { downloadWithPowerShell } = require('./download')
|
||||
|
||||
// Base URL for downloading OVMS binaries
|
||||
const OVMS_PKG_NAME = 'ovms250911.zip'
|
||||
const OVMS_RELEASE_BASE_URL = [`https://gitcode.com/gcw_ggDjjkY3/kjfile/releases/download/download/${OVMS_PKG_NAME}`]
|
||||
|
||||
/**
|
||||
* Downloads and extracts the OVMS binary for the specified platform
|
||||
*/
|
||||
async function downloadOvmsBinary() {
|
||||
// Create output directory structure - OVMS goes into its own subdirectory
|
||||
const csDir = path.join(os.homedir(), '.cherrystudio')
|
||||
|
||||
// Ensure directories exist
|
||||
fs.mkdirSync(csDir, { recursive: true })
|
||||
|
||||
const csOvmsDir = path.join(csDir, 'ovms')
|
||||
// Delete existing OVMS directory if it exists
|
||||
if (fs.existsSync(csOvmsDir)) {
|
||||
fs.rmSync(csOvmsDir, { recursive: true })
|
||||
}
|
||||
|
||||
const tempdir = os.tmpdir()
|
||||
const tempFilename = path.join(tempdir, 'ovms.zip')
|
||||
|
||||
// Try each URL until one succeeds
|
||||
let downloadSuccess = false
|
||||
let lastError = null
|
||||
|
||||
for (let i = 0; i < OVMS_RELEASE_BASE_URL.length; i++) {
|
||||
const downloadUrl = OVMS_RELEASE_BASE_URL[i]
|
||||
console.log(`Attempting download from URL ${i + 1}/${OVMS_RELEASE_BASE_URL.length}: ${downloadUrl}`)
|
||||
|
||||
try {
|
||||
console.log(`Downloading OVMS from ${downloadUrl} to ${tempFilename}...`)
|
||||
|
||||
// Try PowerShell download first, fallback to Node.js download if it fails
|
||||
await downloadWithPowerShell(downloadUrl, tempFilename)
|
||||
|
||||
// If we get here, download was successful
|
||||
downloadSuccess = true
|
||||
console.log(`Successfully downloaded from: ${downloadUrl}`)
|
||||
break
|
||||
} catch (error) {
|
||||
console.warn(`Download failed from ${downloadUrl}: ${error.message}`)
|
||||
lastError = error
|
||||
|
||||
// Clean up failed download file if it exists
|
||||
if (fs.existsSync(tempFilename)) {
|
||||
try {
|
||||
fs.unlinkSync(tempFilename)
|
||||
} catch (cleanupError) {
|
||||
console.warn(`Failed to clean up temporary file: ${cleanupError.message}`)
|
||||
}
|
||||
}
|
||||
|
||||
// Continue to next URL if this one failed
|
||||
if (i < OVMS_RELEASE_BASE_URL.length - 1) {
|
||||
console.log(`Trying next URL...`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if any download succeeded
|
||||
if (!downloadSuccess) {
|
||||
console.error(`All download URLs failed. Last error: ${lastError?.message || 'Unknown error'}`)
|
||||
return 103
|
||||
}
|
||||
|
||||
try {
|
||||
console.log(`Extracting to ${csDir}...`)
|
||||
|
||||
// Use tar.exe to extract the ZIP file
|
||||
console.log(`Extracting OVMS to ${csDir}...`)
|
||||
execSync(`tar -xf ${tempFilename} -C ${csDir}`, { stdio: 'inherit' })
|
||||
console.log(`OVMS extracted to ${csDir}`)
|
||||
|
||||
// Clean up temporary file
|
||||
fs.unlinkSync(tempFilename)
|
||||
console.log(`Installation directory: ${csDir}`)
|
||||
} catch (error) {
|
||||
console.error(`Error installing OVMS: ${error.message}`)
|
||||
if (fs.existsSync(tempFilename)) {
|
||||
fs.unlinkSync(tempFilename)
|
||||
}
|
||||
|
||||
// Check if ovmsDir is empty and remove it if so
|
||||
try {
|
||||
const ovmsDir = path.join(csDir, 'ovms')
|
||||
const files = fs.readdirSync(ovmsDir)
|
||||
if (files.length === 0) {
|
||||
fs.rmSync(ovmsDir, { recursive: true })
|
||||
console.log(`Removed empty directory: ${ovmsDir}`)
|
||||
}
|
||||
} catch (cleanupError) {
|
||||
console.warn(`Warning: Failed to clean up directory: ${cleanupError.message}`)
|
||||
return 105
|
||||
}
|
||||
|
||||
return 104
|
||||
}
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the CPU Name and ID
|
||||
*/
|
||||
function getCpuInfo() {
|
||||
const cpuInfo = {
|
||||
name: '',
|
||||
id: ''
|
||||
}
|
||||
|
||||
// Use PowerShell to get CPU information
|
||||
try {
|
||||
const psCommand = `powershell -Command "Get-CimInstance -ClassName Win32_Processor | Select-Object Name, DeviceID | ConvertTo-Json"`
|
||||
const psOutput = execSync(psCommand).toString()
|
||||
const cpuData = JSON.parse(psOutput)
|
||||
|
||||
if (Array.isArray(cpuData)) {
|
||||
cpuInfo.name = cpuData[0].Name || ''
|
||||
cpuInfo.id = cpuData[0].DeviceID || ''
|
||||
} else {
|
||||
cpuInfo.name = cpuData.Name || ''
|
||||
cpuInfo.id = cpuData.DeviceID || ''
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Failed to get CPU info: ${error.message}`)
|
||||
}
|
||||
|
||||
return cpuInfo
|
||||
}
|
||||
|
||||
/**
|
||||
* Main function to install OVMS
|
||||
*/
|
||||
async function installOvms() {
|
||||
const platform = os.platform()
|
||||
console.log(`Detected platform: ${platform}`)
|
||||
|
||||
const cpuName = getCpuInfo().name
|
||||
console.log(`CPU Name: ${cpuName}`)
|
||||
|
||||
// Check if CPU name contains "Ultra"
|
||||
if (!cpuName.toLowerCase().includes('intel') || !cpuName.toLowerCase().includes('ultra')) {
|
||||
console.error('OVMS installation requires an Intel(R) Core(TM) Ultra CPU.')
|
||||
return 101
|
||||
}
|
||||
|
||||
// only support windows
|
||||
if (platform !== 'win32') {
|
||||
console.error('OVMS installation is only supported on Windows.')
|
||||
return 102
|
||||
}
|
||||
|
||||
return await downloadOvmsBinary()
|
||||
}
|
||||
|
||||
// Run the installation
|
||||
installOvms()
|
||||
.then((retcode) => {
|
||||
if (retcode === 0) {
|
||||
console.log('OVMS installation successful')
|
||||
} else {
|
||||
console.error('OVMS installation failed')
|
||||
}
|
||||
process.exit(retcode)
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error('OVMS installation failed:', error)
|
||||
process.exit(100)
|
||||
})
|
||||
@ -39,6 +39,7 @@ import NotificationService from './services/NotificationService'
|
||||
import * as NutstoreService from './services/NutstoreService'
|
||||
import ObsidianVaultService from './services/ObsidianVaultService'
|
||||
import { ocrService } from './services/ocr/OcrService'
|
||||
import OvmsManager from './services/OvmsManager'
|
||||
import { proxyManager } from './services/ProxyManager'
|
||||
import { pythonService } from './services/PythonService'
|
||||
import { FileServiceManager } from './services/remotefile/FileServiceManager'
|
||||
@ -84,6 +85,7 @@ const obsidianVaultService = new ObsidianVaultService()
|
||||
const vertexAIService = VertexAIService.getInstance()
|
||||
const memoryService = MemoryService.getInstance()
|
||||
const dxtService = new DxtService()
|
||||
const ovmsManager = new OvmsManager()
|
||||
|
||||
export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
const appUpdater = new AppUpdater()
|
||||
@ -433,6 +435,7 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
// system
|
||||
ipcMain.handle(IpcChannel.System_GetDeviceType, () => (isMac ? 'mac' : isWin ? 'windows' : 'linux'))
|
||||
ipcMain.handle(IpcChannel.System_GetHostname, () => require('os').hostname())
|
||||
ipcMain.handle(IpcChannel.System_GetCpuName, () => require('os').cpus()[0].model)
|
||||
ipcMain.handle(IpcChannel.System_ToggleDevTools, (e) => {
|
||||
const win = BrowserWindow.fromWebContents(e.sender)
|
||||
win && win.webContents.toggleDevTools()
|
||||
@ -711,6 +714,7 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
ipcMain.handle(IpcChannel.App_GetBinaryPath, (_, name: string) => getBinaryPath(name))
|
||||
ipcMain.handle(IpcChannel.App_InstallUvBinary, () => runInstallScript('install-uv.js'))
|
||||
ipcMain.handle(IpcChannel.App_InstallBunBinary, () => runInstallScript('install-bun.js'))
|
||||
ipcMain.handle(IpcChannel.App_InstallOvmsBinary, () => runInstallScript('install-ovms.js'))
|
||||
|
||||
//copilot
|
||||
ipcMain.handle(IpcChannel.Copilot_GetAuthMessage, CopilotService.getAuthMessage.bind(CopilotService))
|
||||
@ -842,6 +846,17 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
ocrService.ocr(file, provider)
|
||||
)
|
||||
|
||||
// OVMS
|
||||
ipcMain.handle(IpcChannel.Ovms_AddModel, (_, modelName: string, modelId: string, modelSource: string, task: string) =>
|
||||
ovmsManager.addModel(modelName, modelId, modelSource, task)
|
||||
)
|
||||
ipcMain.handle(IpcChannel.Ovms_StopAddModel, () => ovmsManager.stopAddModel())
|
||||
ipcMain.handle(IpcChannel.Ovms_GetModels, () => ovmsManager.getModels())
|
||||
ipcMain.handle(IpcChannel.Ovms_IsRunning, () => ovmsManager.initializeOvms())
|
||||
ipcMain.handle(IpcChannel.Ovms_GetStatus, () => ovmsManager.getOvmsStatus())
|
||||
ipcMain.handle(IpcChannel.Ovms_RunOVMS, () => ovmsManager.runOvms())
|
||||
ipcMain.handle(IpcChannel.Ovms_StopOVMS, () => ovmsManager.stopOvms())
|
||||
|
||||
// CherryAI
|
||||
ipcMain.handle(IpcChannel.Cherryai_GetSignature, (_, params) => generateSignature(params))
|
||||
|
||||
|
||||
@ -30,7 +30,10 @@ interface VersionInfo {
|
||||
|
||||
class CodeToolsService {
|
||||
private versionCache: Map<string, { version: string; timestamp: number }> = new Map()
|
||||
private terminalsCache: { terminals: TerminalConfig[]; timestamp: number } | null = null
|
||||
private terminalsCache: {
|
||||
terminals: TerminalConfig[]
|
||||
timestamp: number
|
||||
} | null = null
|
||||
private customTerminalPaths: Map<string, string> = new Map() // Store user-configured terminal paths
|
||||
private readonly CACHE_DURATION = 1000 * 60 * 30 // 30 minutes cache
|
||||
private readonly TERMINALS_CACHE_DURATION = 1000 * 60 * 5 // 5 minutes cache for terminals
|
||||
@ -81,6 +84,8 @@ class CodeToolsService {
|
||||
return '@qwen-code/qwen-code'
|
||||
case codeTools.iFlowCli:
|
||||
return '@iflow-ai/iflow-cli'
|
||||
case codeTools.githubCopilotCli:
|
||||
return '@github/copilot'
|
||||
default:
|
||||
throw new Error(`Unsupported CLI tool: ${cliTool}`)
|
||||
}
|
||||
@ -98,6 +103,8 @@ class CodeToolsService {
|
||||
return 'qwen'
|
||||
case codeTools.iFlowCli:
|
||||
return 'iflow'
|
||||
case codeTools.githubCopilotCli:
|
||||
return 'copilot'
|
||||
default:
|
||||
throw new Error(`Unsupported CLI tool: ${cliTool}`)
|
||||
}
|
||||
@ -143,7 +150,9 @@ class CodeToolsService {
|
||||
case terminalApps.powershell:
|
||||
// Check for PowerShell in PATH
|
||||
try {
|
||||
await execAsync('powershell -Command "Get-Host"', { timeout: 3000 })
|
||||
await execAsync('powershell -Command "Get-Host"', {
|
||||
timeout: 3000
|
||||
})
|
||||
return terminal
|
||||
} catch {
|
||||
try {
|
||||
@ -383,7 +392,9 @@ class CodeToolsService {
|
||||
const binDir = path.join(os.homedir(), '.cherrystudio', 'bin')
|
||||
const executablePath = path.join(binDir, executableName + (isWin ? '.exe' : ''))
|
||||
|
||||
const { stdout } = await execAsync(`"${executablePath}" --version`, { timeout: 10000 })
|
||||
const { stdout } = await execAsync(`"${executablePath}" --version`, {
|
||||
timeout: 10000
|
||||
})
|
||||
// Extract version number from output (format may vary by tool)
|
||||
const versionMatch = stdout.trim().match(/\d+\.\d+\.\d+/)
|
||||
installedVersion = versionMatch ? versionMatch[0] : stdout.trim().split(' ')[0]
|
||||
@ -424,7 +435,10 @@ class CodeToolsService {
|
||||
logger.info(`${packageName} latest version: ${latestVersion}`)
|
||||
|
||||
// Cache the result
|
||||
this.versionCache.set(cacheKey, { version: latestVersion!, timestamp: now })
|
||||
this.versionCache.set(cacheKey, {
|
||||
version: latestVersion!,
|
||||
timestamp: now
|
||||
})
|
||||
logger.debug(`Cached latest version for ${packageName}`)
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to get latest version for ${packageName}:`, error as Error)
|
||||
|
||||
586
src/main/services/OvmsManager.ts
Normal file
586
src/main/services/OvmsManager.ts
Normal file
@ -0,0 +1,586 @@
|
||||
import { exec } from 'node:child_process'
|
||||
import { homedir } from 'node:os'
|
||||
import { promisify } from 'node:util'
|
||||
|
||||
import { loggerService } from '@logger'
|
||||
import * as fs from 'fs-extra'
|
||||
import * as path from 'path'
|
||||
|
||||
const logger = loggerService.withContext('OvmsManager')
|
||||
|
||||
const execAsync = promisify(exec)
|
||||
|
||||
interface OvmsProcess {
|
||||
pid: number
|
||||
path: string
|
||||
workingDirectory: string
|
||||
}
|
||||
|
||||
interface ModelConfig {
|
||||
name: string
|
||||
base_path: string
|
||||
}
|
||||
|
||||
interface OvmsConfig {
|
||||
mediapipe_config_list: ModelConfig[]
|
||||
}
|
||||
|
||||
class OvmsManager {
|
||||
private ovms: OvmsProcess | null = null
|
||||
|
||||
/**
|
||||
* Recursively terminate a process and all its child processes
|
||||
* @param pid Process ID to terminate
|
||||
* @returns Promise<{ success: boolean; message?: string }>
|
||||
*/
|
||||
private async terminalProcess(pid: number): Promise<{ success: boolean; message?: string }> {
|
||||
try {
|
||||
// Check if the process is running
|
||||
const processCheckCommand = `Get-Process -Id ${pid} -ErrorAction SilentlyContinue | Select-Object Id | ConvertTo-Json`
|
||||
const { stdout: processStdout } = await execAsync(`powershell -Command "${processCheckCommand}"`)
|
||||
|
||||
if (!processStdout.trim()) {
|
||||
logger.info(`Process with PID ${pid} is not running`)
|
||||
return { success: true, message: `Process with PID ${pid} is not running` }
|
||||
}
|
||||
|
||||
// Find child processes
|
||||
const childProcessCommand = `Get-WmiObject -Class Win32_Process | Where-Object { $_.ParentProcessId -eq ${pid} } | Select-Object ProcessId | ConvertTo-Json`
|
||||
const { stdout: childStdout } = await execAsync(`powershell -Command "${childProcessCommand}"`)
|
||||
|
||||
// If there are child processes, terminate them first
|
||||
if (childStdout.trim()) {
|
||||
const childProcesses = JSON.parse(childStdout)
|
||||
const childList = Array.isArray(childProcesses) ? childProcesses : [childProcesses]
|
||||
|
||||
logger.info(`Found ${childList.length} child processes for PID ${pid}`)
|
||||
|
||||
// Recursively terminate each child process
|
||||
for (const childProcess of childList) {
|
||||
const childPid = childProcess.ProcessId
|
||||
logger.info(`Terminating child process PID: ${childPid}`)
|
||||
await this.terminalProcess(childPid)
|
||||
}
|
||||
} else {
|
||||
logger.info(`No child processes found for PID ${pid}`)
|
||||
}
|
||||
|
||||
// Finally, terminate the parent process
|
||||
const killCommand = `Stop-Process -Id ${pid} -Force -ErrorAction SilentlyContinue`
|
||||
await execAsync(`powershell -Command "${killCommand}"`)
|
||||
logger.info(`Terminated process with PID: ${pid}`)
|
||||
|
||||
// Wait for the process to disappear with 5-second timeout
|
||||
const timeout = 5000 // 5 seconds
|
||||
const startTime = Date.now()
|
||||
|
||||
while (Date.now() - startTime < timeout) {
|
||||
const checkCommand = `Get-Process -Id ${pid} -ErrorAction SilentlyContinue | Select-Object Id | ConvertTo-Json`
|
||||
const { stdout: checkStdout } = await execAsync(`powershell -Command "${checkCommand}"`)
|
||||
|
||||
if (!checkStdout.trim()) {
|
||||
logger.info(`Process with PID ${pid} has disappeared`)
|
||||
return { success: true, message: `Process ${pid} and all child processes terminated successfully` }
|
||||
}
|
||||
|
||||
// Wait 300ms before checking again
|
||||
await new Promise((resolve) => setTimeout(resolve, 300))
|
||||
}
|
||||
|
||||
logger.warn(`Process with PID ${pid} did not disappear within timeout`)
|
||||
return { success: false, message: `Process ${pid} did not disappear within 5 seconds` }
|
||||
} catch (error) {
|
||||
logger.error(`Failed to terminate process ${pid}:`, error as Error)
|
||||
return { success: false, message: `Failed to terminate process ${pid}` }
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop OVMS process if it's running
|
||||
* @returns Promise<{ success: boolean; message?: string }>
|
||||
*/
|
||||
public async stopOvms(): Promise<{ success: boolean; message?: string }> {
|
||||
try {
|
||||
// Check if OVMS process is running
|
||||
const psCommand = `Get-Process -Name "ovms" -ErrorAction SilentlyContinue | Select-Object Id, Path | ConvertTo-Json`
|
||||
const { stdout } = await execAsync(`powershell -Command "${psCommand}"`)
|
||||
|
||||
if (!stdout.trim()) {
|
||||
logger.info('OVMS process is not running')
|
||||
return { success: true, message: 'OVMS process is not running' }
|
||||
}
|
||||
|
||||
const processes = JSON.parse(stdout)
|
||||
const processList = Array.isArray(processes) ? processes : [processes]
|
||||
|
||||
if (processList.length === 0) {
|
||||
logger.info('OVMS process is not running')
|
||||
return { success: true, message: 'OVMS process is not running' }
|
||||
}
|
||||
|
||||
// Terminate all OVMS processes using terminalProcess
|
||||
for (const process of processList) {
|
||||
const result = await this.terminalProcess(process.Id)
|
||||
if (!result.success) {
|
||||
logger.error(`Failed to terminate OVMS process with PID: ${process.Id}, ${result.message}`)
|
||||
return { success: false, message: `Failed to terminate OVMS process: ${result.message}` }
|
||||
}
|
||||
logger.info(`Terminated OVMS process with PID: ${process.Id}`)
|
||||
}
|
||||
|
||||
// Reset the ovms instance
|
||||
this.ovms = null
|
||||
|
||||
logger.info('OVMS process stopped successfully')
|
||||
return { success: true, message: 'OVMS process stopped successfully' }
|
||||
} catch (error) {
|
||||
logger.error(`Failed to stop OVMS process: ${error}`)
|
||||
return { success: false, message: 'Failed to stop OVMS process' }
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Run OVMS by ensuring config.json exists and executing run.bat
|
||||
* @returns Promise<{ success: boolean; message?: string }>
|
||||
*/
|
||||
public async runOvms(): Promise<{ success: boolean; message?: string }> {
|
||||
const homeDir = homedir()
|
||||
const ovmsDir = path.join(homeDir, '.cherrystudio', 'ovms', 'ovms')
|
||||
const configPath = path.join(ovmsDir, 'models', 'config.json')
|
||||
const runBatPath = path.join(ovmsDir, 'run.bat')
|
||||
|
||||
try {
|
||||
// Check if config.json exists, if not create it with default content
|
||||
if (!(await fs.pathExists(configPath))) {
|
||||
logger.info(`Config file does not exist, creating: ${configPath}`)
|
||||
|
||||
// Ensure the models directory exists
|
||||
await fs.ensureDir(path.dirname(configPath))
|
||||
|
||||
// Create config.json with default content
|
||||
const defaultConfig = {
|
||||
mediapipe_config_list: [],
|
||||
model_config_list: []
|
||||
}
|
||||
|
||||
await fs.writeJson(configPath, defaultConfig, { spaces: 2 })
|
||||
logger.info(`Config file created: ${configPath}`)
|
||||
}
|
||||
|
||||
// Check if run.bat exists
|
||||
if (!(await fs.pathExists(runBatPath))) {
|
||||
logger.error(`run.bat not found at: ${runBatPath}`)
|
||||
return { success: false, message: 'run.bat not found' }
|
||||
}
|
||||
|
||||
// Run run.bat without waiting for it to complete
|
||||
logger.info(`Starting OVMS with run.bat: ${runBatPath}`)
|
||||
exec(`"${runBatPath}"`, { cwd: ovmsDir }, (error) => {
|
||||
if (error) {
|
||||
logger.error(`Error running run.bat: ${error}`)
|
||||
}
|
||||
})
|
||||
|
||||
logger.info('OVMS started successfully')
|
||||
return { success: true }
|
||||
} catch (error) {
|
||||
logger.error(`Failed to run OVMS: ${error}`)
|
||||
return { success: false, message: 'Failed to run OVMS' }
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get OVMS status - checks installation and running status
|
||||
* @returns 'not-installed' | 'not-running' | 'running'
|
||||
*/
|
||||
public async getOvmsStatus(): Promise<'not-installed' | 'not-running' | 'running'> {
|
||||
const homeDir = homedir()
|
||||
const ovmsPath = path.join(homeDir, '.cherrystudio', 'ovms', 'ovms', 'ovms.exe')
|
||||
|
||||
try {
|
||||
// Check if OVMS executable exists
|
||||
if (!(await fs.pathExists(ovmsPath))) {
|
||||
logger.info(`OVMS executable not found at: ${ovmsPath}`)
|
||||
return 'not-installed'
|
||||
}
|
||||
|
||||
// Check if OVMS process is running
|
||||
//const psCommand = `Get-Process -Name "ovms" -ErrorAction SilentlyContinue | Where-Object { $_.Path -eq "${ovmsPath.replace(/\\/g, '\\\\')}" } | Select-Object Id | ConvertTo-Json`;
|
||||
//const { stdout } = await execAsync(`powershell -Command "${psCommand}"`);
|
||||
const psCommand = `Get-Process -Name "ovms" -ErrorAction SilentlyContinue | Select-Object Id, Path | ConvertTo-Json`
|
||||
const { stdout } = await execAsync(`powershell -Command "${psCommand}"`)
|
||||
|
||||
if (!stdout.trim()) {
|
||||
logger.info('OVMS process not running')
|
||||
return 'not-running'
|
||||
}
|
||||
|
||||
const processes = JSON.parse(stdout)
|
||||
const processList = Array.isArray(processes) ? processes : [processes]
|
||||
|
||||
if (processList.length > 0) {
|
||||
logger.info('OVMS process is running')
|
||||
return 'running'
|
||||
} else {
|
||||
logger.info('OVMS process not running')
|
||||
return 'not-running'
|
||||
}
|
||||
} catch (error) {
|
||||
logger.info(`Failed to check OVMS status: ${error}`)
|
||||
return 'not-running'
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize OVMS by finding the executable path and working directory
|
||||
*/
|
||||
public async initializeOvms(): Promise<boolean> {
|
||||
// Use PowerShell to find ovms.exe processes with their paths
|
||||
const psCommand = `Get-Process -Name "ovms" -ErrorAction SilentlyContinue | Select-Object Id, Path | ConvertTo-Json`
|
||||
const { stdout } = await execAsync(`powershell -Command "${psCommand}"`)
|
||||
|
||||
if (!stdout.trim()) {
|
||||
logger.error('Command to find OVMS process returned no output')
|
||||
return false
|
||||
}
|
||||
logger.debug(`OVMS process output: ${stdout}`)
|
||||
|
||||
const processes = JSON.parse(stdout)
|
||||
const processList = Array.isArray(processes) ? processes : [processes]
|
||||
|
||||
// Find the first process with a valid path
|
||||
for (const process of processList) {
|
||||
this.ovms = {
|
||||
pid: process.Id,
|
||||
path: process.Path,
|
||||
workingDirectory: path.dirname(process.Path)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
return this.ovms !== null
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the Model Name and ID are valid, they are valid only if they are not used in the config.json
|
||||
* @param modelName Name of the model to check
|
||||
* @param modelId ID of the model to check
|
||||
*/
|
||||
public async isNameAndIDAvalid(modelName: string, modelId: string): Promise<boolean> {
|
||||
if (!modelName || !modelId) {
|
||||
logger.error('Model name and ID cannot be empty')
|
||||
return false
|
||||
}
|
||||
|
||||
const homeDir = homedir()
|
||||
const configPath = path.join(homeDir, '.cherrystudio', 'ovms', 'ovms', 'models', 'config.json')
|
||||
try {
|
||||
if (!(await fs.pathExists(configPath))) {
|
||||
logger.warn(`Config file does not exist: ${configPath}`)
|
||||
return false
|
||||
}
|
||||
|
||||
const config: OvmsConfig = await fs.readJson(configPath)
|
||||
if (!config.mediapipe_config_list) {
|
||||
logger.warn(`No mediapipe_config_list found in config: ${configPath}`)
|
||||
return false
|
||||
}
|
||||
|
||||
// Check if the model name or ID already exists in the config
|
||||
const exists = config.mediapipe_config_list.some(
|
||||
(model) => model.name === modelName || model.base_path === modelId
|
||||
)
|
||||
if (exists) {
|
||||
logger.warn(`Model with name "${modelName}" or ID "${modelId}" already exists in the config`)
|
||||
return false
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Failed to check model existence: ${error}`)
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
private async applyModelPath(modelDirPath: string): Promise<boolean> {
|
||||
const homeDir = homedir()
|
||||
const patchDir = path.join(homeDir, '.cherrystudio', 'ovms', 'patch')
|
||||
if (!(await fs.pathExists(patchDir))) {
|
||||
return true
|
||||
}
|
||||
|
||||
const modelId = path.basename(modelDirPath)
|
||||
|
||||
// get all sub directories in patchDir
|
||||
const patchs = await fs.readdir(patchDir)
|
||||
for (const patch of patchs) {
|
||||
const fullPatchPath = path.join(patchDir, patch)
|
||||
|
||||
if (fs.lstatSync(fullPatchPath).isDirectory()) {
|
||||
if (modelId.toLowerCase().includes(patch.toLowerCase())) {
|
||||
// copy all files from fullPath to modelDirPath
|
||||
try {
|
||||
const files = await fs.readdir(fullPatchPath)
|
||||
for (const file of files) {
|
||||
const srcFile = path.join(fullPatchPath, file)
|
||||
const destFile = path.join(modelDirPath, file)
|
||||
await fs.copyFile(srcFile, destFile)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Failed to copy files from ${fullPatchPath} to ${modelDirPath}: ${error}`)
|
||||
return false
|
||||
}
|
||||
logger.info(`Applied patchs for model ${modelId}`)
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a model to OVMS by downloading it
|
||||
* @param modelName Name of the model to add
|
||||
* @param modelId ID of the model to download
|
||||
* @param modelSource Model Source: huggingface, hf-mirror and modelscope, default is huggingface
|
||||
* @param task Task type: text_generation, embedding, rerank, image_generation
|
||||
*/
|
||||
public async addModel(
|
||||
modelName: string,
|
||||
modelId: string,
|
||||
modelSource: string,
|
||||
task: string = 'text_generation'
|
||||
): Promise<{ success: boolean; message?: string }> {
|
||||
logger.info(`Adding model: ${modelName} with ID: ${modelId}, Source: ${modelSource}, Task: ${task}`)
|
||||
|
||||
const homeDir = homedir()
|
||||
const ovdndDir = path.join(homeDir, '.cherrystudio', 'ovms', 'ovms')
|
||||
const pathModel = path.join(ovdndDir, 'models', modelId)
|
||||
|
||||
try {
|
||||
// check the ovdnDir+'models'+modelId exist or not
|
||||
if (await fs.pathExists(pathModel)) {
|
||||
logger.error(`Model with ID ${modelId} already exists`)
|
||||
return { success: false, message: 'Model ID already exists!' }
|
||||
}
|
||||
|
||||
// remove the model directory if it exists
|
||||
if (await fs.pathExists(pathModel)) {
|
||||
logger.info(`Removing existing model directory: ${pathModel}`)
|
||||
await fs.remove(pathModel)
|
||||
}
|
||||
|
||||
// Use ovdnd.exe for downloading instead of ovms.exe
|
||||
const ovdndPath = path.join(ovdndDir, 'ovdnd.exe')
|
||||
const command =
|
||||
`"${ovdndPath}" --pull ` +
|
||||
`--model_repository_path "${ovdndDir}/models" ` +
|
||||
`--source_model "${modelId}" ` +
|
||||
`--model_name "${modelName}" ` +
|
||||
`--target_device GPU ` +
|
||||
`--task ${task} ` +
|
||||
`--overwrite_models`
|
||||
|
||||
const env: Record<string, string | undefined> = {
|
||||
...process.env,
|
||||
OVMS_DIR: ovdndDir,
|
||||
PYTHONHOME: path.join(ovdndDir, 'python'),
|
||||
PATH: `${process.env.PATH};${ovdndDir};${path.join(ovdndDir, 'python')}`
|
||||
}
|
||||
|
||||
if (modelSource) {
|
||||
env.HF_ENDPOINT = modelSource
|
||||
}
|
||||
|
||||
logger.info(`Running command: ${command} from ${modelSource}`)
|
||||
const { stdout } = await execAsync(command, { env: env, cwd: ovdndDir })
|
||||
|
||||
logger.info('Model download completed')
|
||||
logger.debug(`Command output: ${stdout}`)
|
||||
} catch (error) {
|
||||
// remove ovdnDir+'models'+modelId if it exists
|
||||
if (await fs.pathExists(pathModel)) {
|
||||
logger.info(`Removing failed model directory: ${pathModel}`)
|
||||
await fs.remove(pathModel)
|
||||
}
|
||||
logger.error(`Failed to add model: ${error}`)
|
||||
return {
|
||||
success: false,
|
||||
message: `Download model ${modelId} failed, please check following items and try it again:<p>- the model id</p><p>- network connection and proxy</p>`
|
||||
}
|
||||
}
|
||||
|
||||
// Update config file
|
||||
if (!(await this.updateModelConfig(modelName, modelId))) {
|
||||
logger.error('Failed to update model config')
|
||||
return { success: false, message: 'Failed to update model config' }
|
||||
}
|
||||
|
||||
if (!(await this.applyModelPath(pathModel))) {
|
||||
logger.error('Failed to apply model patchs')
|
||||
return { success: false, message: 'Failed to apply model patchs' }
|
||||
}
|
||||
|
||||
logger.info(`Model ${modelName} added successfully with ID ${modelId}`)
|
||||
return { success: true }
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop the model download process if it's running
|
||||
* @returns Promise<{ success: boolean; message?: string }>
|
||||
*/
|
||||
public async stopAddModel(): Promise<{ success: boolean; message?: string }> {
|
||||
try {
|
||||
// Check if ovdnd.exe process is running
|
||||
const psCommand = `Get-Process -Name "ovdnd" -ErrorAction SilentlyContinue | Select-Object Id, Path | ConvertTo-Json`
|
||||
const { stdout } = await execAsync(`powershell -Command "${psCommand}"`)
|
||||
|
||||
if (!stdout.trim()) {
|
||||
logger.info('ovdnd process is not running')
|
||||
return { success: true, message: 'Model download process is not running' }
|
||||
}
|
||||
|
||||
const processes = JSON.parse(stdout)
|
||||
const processList = Array.isArray(processes) ? processes : [processes]
|
||||
|
||||
if (processList.length === 0) {
|
||||
logger.info('ovdnd process is not running')
|
||||
return { success: true, message: 'Model download process is not running' }
|
||||
}
|
||||
|
||||
// Terminate all ovdnd processes
|
||||
for (const process of processList) {
|
||||
this.terminalProcess(process.Id)
|
||||
}
|
||||
|
||||
logger.info('Model download process stopped successfully')
|
||||
return { success: true, message: 'Model download process stopped successfully' }
|
||||
} catch (error) {
|
||||
logger.error(`Failed to stop model download process: ${error}`)
|
||||
return { success: false, message: 'Failed to stop model download process' }
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* check if the model id exists in the OVMS configuration
|
||||
* @param modelId ID of the model to check
|
||||
*/
|
||||
public async checkModelExists(modelId: string): Promise<boolean> {
|
||||
const homeDir = homedir()
|
||||
const ovmsDir = path.join(homeDir, '.cherrystudio', 'ovms', 'ovms')
|
||||
const configPath = path.join(ovmsDir, 'models', 'config.json')
|
||||
|
||||
try {
|
||||
if (!(await fs.pathExists(configPath))) {
|
||||
logger.warn(`Config file does not exist: ${configPath}`)
|
||||
return false
|
||||
}
|
||||
|
||||
const config: OvmsConfig = await fs.readJson(configPath)
|
||||
if (!config.mediapipe_config_list) {
|
||||
logger.warn('No mediapipe_config_list found in config')
|
||||
return false
|
||||
}
|
||||
|
||||
return config.mediapipe_config_list.some((model) => model.base_path === modelId)
|
||||
} catch (error) {
|
||||
logger.error(`Failed to check model existence: ${error}`)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the model configuration file
|
||||
*/
|
||||
public async updateModelConfig(modelName: string, modelId: string): Promise<boolean> {
|
||||
const homeDir = homedir()
|
||||
const ovmsDir = path.join(homeDir, '.cherrystudio', 'ovms', 'ovms')
|
||||
const configPath = path.join(ovmsDir, 'models', 'config.json')
|
||||
|
||||
try {
|
||||
// Ensure the models directory exists
|
||||
await fs.ensureDir(path.dirname(configPath))
|
||||
let config: OvmsConfig
|
||||
|
||||
// Read existing config or create new one
|
||||
if (await fs.pathExists(configPath)) {
|
||||
config = await fs.readJson(configPath)
|
||||
} else {
|
||||
config = { mediapipe_config_list: [] }
|
||||
}
|
||||
|
||||
// Ensure mediapipe_config_list exists
|
||||
if (!config.mediapipe_config_list) {
|
||||
config.mediapipe_config_list = []
|
||||
}
|
||||
|
||||
// Add new model config
|
||||
const newModelConfig: ModelConfig = {
|
||||
name: modelName,
|
||||
base_path: modelId
|
||||
}
|
||||
|
||||
// Check if model already exists, if so, update it
|
||||
const existingIndex = config.mediapipe_config_list.findIndex((model) => model.base_path === modelId)
|
||||
|
||||
if (existingIndex >= 0) {
|
||||
config.mediapipe_config_list[existingIndex] = newModelConfig
|
||||
logger.info(`Updated existing model config: ${modelName}`)
|
||||
} else {
|
||||
config.mediapipe_config_list.push(newModelConfig)
|
||||
logger.info(`Added new model config: ${modelName}`)
|
||||
}
|
||||
|
||||
// Write config back to file
|
||||
await fs.writeJson(configPath, config, { spaces: 2 })
|
||||
logger.info(`Config file updated: ${configPath}`)
|
||||
} catch (error) {
|
||||
logger.error(`Failed to update model config: ${error}`)
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all models from OVMS config, filtered for image generation models
|
||||
* @returns Array of model configurations
|
||||
*/
|
||||
public async getModels(): Promise<ModelConfig[]> {
|
||||
const homeDir = homedir()
|
||||
const ovmsDir = path.join(homeDir, '.cherrystudio', 'ovms', 'ovms')
|
||||
const configPath = path.join(ovmsDir, 'models', 'config.json')
|
||||
|
||||
try {
|
||||
if (!(await fs.pathExists(configPath))) {
|
||||
logger.warn(`Config file does not exist: ${configPath}`)
|
||||
return []
|
||||
}
|
||||
|
||||
const config: OvmsConfig = await fs.readJson(configPath)
|
||||
if (!config.mediapipe_config_list) {
|
||||
logger.warn('No mediapipe_config_list found in config')
|
||||
return []
|
||||
}
|
||||
|
||||
// Filter models for image generation (SD, Stable-Diffusion, Stable Diffusion, FLUX)
|
||||
const imageGenerationModels = config.mediapipe_config_list.filter((model) => {
|
||||
const modelName = model.name.toLowerCase()
|
||||
return (
|
||||
modelName.startsWith('sd') ||
|
||||
modelName.startsWith('stable-diffusion') ||
|
||||
modelName.startsWith('stable diffusion') ||
|
||||
modelName.startsWith('flux')
|
||||
)
|
||||
})
|
||||
|
||||
logger.info(`Found ${imageGenerationModels.length} image generation models`)
|
||||
return imageGenerationModels
|
||||
} catch (error) {
|
||||
logger.error(`Failed to get models: ${error}`)
|
||||
return []
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default OvmsManager
|
||||
@ -100,7 +100,8 @@ const api = {
|
||||
},
|
||||
system: {
|
||||
getDeviceType: () => ipcRenderer.invoke(IpcChannel.System_GetDeviceType),
|
||||
getHostname: () => ipcRenderer.invoke(IpcChannel.System_GetHostname)
|
||||
getHostname: () => ipcRenderer.invoke(IpcChannel.System_GetHostname),
|
||||
getCpuName: () => ipcRenderer.invoke(IpcChannel.System_GetCpuName)
|
||||
},
|
||||
devTools: {
|
||||
toggle: () => ipcRenderer.invoke(IpcChannel.System_ToggleDevTools)
|
||||
@ -290,6 +291,16 @@ const api = {
|
||||
clearAuthCache: (projectId: string, clientEmail?: string) =>
|
||||
ipcRenderer.invoke(IpcChannel.VertexAI_ClearAuthCache, projectId, clientEmail)
|
||||
},
|
||||
ovms: {
|
||||
addModel: (modelName: string, modelId: string, modelSource: string, task: string) =>
|
||||
ipcRenderer.invoke(IpcChannel.Ovms_AddModel, modelName, modelId, modelSource, task),
|
||||
stopAddModel: () => ipcRenderer.invoke(IpcChannel.Ovms_StopAddModel),
|
||||
getModels: () => ipcRenderer.invoke(IpcChannel.Ovms_GetModels),
|
||||
isRunning: () => ipcRenderer.invoke(IpcChannel.Ovms_IsRunning),
|
||||
getStatus: () => ipcRenderer.invoke(IpcChannel.Ovms_GetStatus),
|
||||
runOvms: () => ipcRenderer.invoke(IpcChannel.Ovms_RunOVMS),
|
||||
stopOvms: () => ipcRenderer.invoke(IpcChannel.Ovms_StopOVMS)
|
||||
},
|
||||
config: {
|
||||
set: (key: string, value: any, isNotify: boolean = false) =>
|
||||
ipcRenderer.invoke(IpcChannel.Config_Set, key, value, isNotify),
|
||||
@ -355,6 +366,7 @@ const api = {
|
||||
getBinaryPath: (name: string) => ipcRenderer.invoke(IpcChannel.App_GetBinaryPath, name),
|
||||
installUVBinary: () => ipcRenderer.invoke(IpcChannel.App_InstallUvBinary),
|
||||
installBunBinary: () => ipcRenderer.invoke(IpcChannel.App_InstallBunBinary),
|
||||
installOvmsBinary: () => ipcRenderer.invoke(IpcChannel.App_InstallOvmsBinary),
|
||||
protocol: {
|
||||
onReceiveData: (callback: (data: { url: string; params: any }) => void) => {
|
||||
const listener = (_event: Electron.IpcRendererEvent, data: { url: string; params: any }) => {
|
||||
|
||||
@ -24,6 +24,8 @@ export class AiSdkToChunkAdapter {
|
||||
private accumulate: boolean | undefined
|
||||
private isFirstChunk = true
|
||||
private enableWebSearch: boolean = false
|
||||
private responseStartTimestamp: number | null = null
|
||||
private firstTokenTimestamp: number | null = null
|
||||
|
||||
constructor(
|
||||
private onChunk: (chunk: Chunk) => void,
|
||||
@ -36,6 +38,17 @@ export class AiSdkToChunkAdapter {
|
||||
this.enableWebSearch = enableWebSearch || false
|
||||
}
|
||||
|
||||
private markFirstTokenIfNeeded() {
|
||||
if (this.firstTokenTimestamp === null && this.responseStartTimestamp !== null) {
|
||||
this.firstTokenTimestamp = Date.now()
|
||||
}
|
||||
}
|
||||
|
||||
private resetTimingState() {
|
||||
this.responseStartTimestamp = null
|
||||
this.firstTokenTimestamp = null
|
||||
}
|
||||
|
||||
/**
|
||||
* 处理 AI SDK 流结果
|
||||
* @param aiSdkResult AI SDK 的流结果对象
|
||||
@ -63,6 +76,8 @@ export class AiSdkToChunkAdapter {
|
||||
webSearchResults: [],
|
||||
reasoningId: ''
|
||||
}
|
||||
this.resetTimingState()
|
||||
this.responseStartTimestamp = Date.now()
|
||||
// Reset link converter state at the start of stream
|
||||
this.isFirstChunk = true
|
||||
|
||||
@ -75,6 +90,7 @@ export class AiSdkToChunkAdapter {
|
||||
if (this.enableWebSearch) {
|
||||
const remainingText = flushLinkConverterBuffer()
|
||||
if (remainingText) {
|
||||
this.markFirstTokenIfNeeded()
|
||||
this.onChunk({
|
||||
type: ChunkType.TEXT_DELTA,
|
||||
text: remainingText
|
||||
@ -89,6 +105,7 @@ export class AiSdkToChunkAdapter {
|
||||
}
|
||||
} finally {
|
||||
reader.releaseLock()
|
||||
this.resetTimingState()
|
||||
}
|
||||
}
|
||||
|
||||
@ -139,6 +156,7 @@ export class AiSdkToChunkAdapter {
|
||||
|
||||
// Only emit chunk if there's text to send
|
||||
if (finalText) {
|
||||
this.markFirstTokenIfNeeded()
|
||||
this.onChunk({
|
||||
type: ChunkType.TEXT_DELTA,
|
||||
text: this.accumulate ? final.text : finalText
|
||||
@ -163,6 +181,9 @@ export class AiSdkToChunkAdapter {
|
||||
break
|
||||
case 'reasoning-delta':
|
||||
final.reasoningContent += chunk.text || ''
|
||||
if (chunk.text) {
|
||||
this.markFirstTokenIfNeeded()
|
||||
}
|
||||
this.onChunk({
|
||||
type: ChunkType.THINKING_DELTA,
|
||||
text: final.reasoningContent || ''
|
||||
@ -262,44 +283,37 @@ export class AiSdkToChunkAdapter {
|
||||
break
|
||||
}
|
||||
|
||||
case 'finish':
|
||||
case 'finish': {
|
||||
const usage = {
|
||||
completion_tokens: chunk.totalUsage?.outputTokens || 0,
|
||||
prompt_tokens: chunk.totalUsage?.inputTokens || 0,
|
||||
total_tokens: chunk.totalUsage?.totalTokens || 0
|
||||
}
|
||||
const metrics = this.buildMetrics(chunk.totalUsage)
|
||||
const baseResponse = {
|
||||
text: final.text || '',
|
||||
reasoning_content: final.reasoningContent || ''
|
||||
}
|
||||
|
||||
this.onChunk({
|
||||
type: ChunkType.BLOCK_COMPLETE,
|
||||
response: {
|
||||
text: final.text || '',
|
||||
reasoning_content: final.reasoningContent || '',
|
||||
usage: {
|
||||
completion_tokens: chunk.totalUsage.outputTokens || 0,
|
||||
prompt_tokens: chunk.totalUsage.inputTokens || 0,
|
||||
total_tokens: chunk.totalUsage.totalTokens || 0
|
||||
},
|
||||
metrics: chunk.totalUsage
|
||||
? {
|
||||
completion_tokens: chunk.totalUsage.outputTokens || 0,
|
||||
time_completion_millsec: 0
|
||||
}
|
||||
: undefined
|
||||
...baseResponse,
|
||||
usage: { ...usage },
|
||||
metrics: metrics ? { ...metrics } : undefined
|
||||
}
|
||||
})
|
||||
this.onChunk({
|
||||
type: ChunkType.LLM_RESPONSE_COMPLETE,
|
||||
response: {
|
||||
text: final.text || '',
|
||||
reasoning_content: final.reasoningContent || '',
|
||||
usage: {
|
||||
completion_tokens: chunk.totalUsage.outputTokens || 0,
|
||||
prompt_tokens: chunk.totalUsage.inputTokens || 0,
|
||||
total_tokens: chunk.totalUsage.totalTokens || 0
|
||||
},
|
||||
metrics: chunk.totalUsage
|
||||
? {
|
||||
completion_tokens: chunk.totalUsage.outputTokens || 0,
|
||||
time_completion_millsec: 0
|
||||
}
|
||||
: undefined
|
||||
...baseResponse,
|
||||
usage: { ...usage },
|
||||
metrics: metrics ? { ...metrics } : undefined
|
||||
}
|
||||
})
|
||||
this.resetTimingState()
|
||||
break
|
||||
}
|
||||
|
||||
// === 源和文件相关事件 ===
|
||||
case 'source':
|
||||
@ -335,6 +349,34 @@ export class AiSdkToChunkAdapter {
|
||||
default:
|
||||
}
|
||||
}
|
||||
|
||||
private buildMetrics(totalUsage?: {
|
||||
inputTokens?: number | null
|
||||
outputTokens?: number | null
|
||||
totalTokens?: number | null
|
||||
}) {
|
||||
if (!totalUsage) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const completionTokens = totalUsage.outputTokens ?? 0
|
||||
const now = Date.now()
|
||||
const start = this.responseStartTimestamp ?? now
|
||||
const firstToken = this.firstTokenTimestamp
|
||||
const timeFirstToken = Math.max(firstToken != null ? firstToken - start : 0, 0)
|
||||
const baseForCompletion = firstToken ?? start
|
||||
let timeCompletion = Math.max(now - baseForCompletion, 0)
|
||||
|
||||
if (timeCompletion === 0 && completionTokens > 0) {
|
||||
timeCompletion = 1
|
||||
}
|
||||
|
||||
return {
|
||||
completion_tokens: completionTokens,
|
||||
time_first_token_millsec: timeFirstToken,
|
||||
time_completion_millsec: timeCompletion
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default AiSdkToChunkAdapter
|
||||
|
||||
@ -12,6 +12,7 @@ import { VertexAPIClient } from './gemini/VertexAPIClient'
|
||||
import { NewAPIClient } from './newapi/NewAPIClient'
|
||||
import { OpenAIAPIClient } from './openai/OpenAIApiClient'
|
||||
import { OpenAIResponseAPIClient } from './openai/OpenAIResponseAPIClient'
|
||||
import { OVMSClient } from './ovms/OVMSClient'
|
||||
import { PPIOAPIClient } from './ppio/PPIOAPIClient'
|
||||
import { ZhipuAPIClient } from './zhipu/ZhipuAPIClient'
|
||||
|
||||
@ -63,6 +64,12 @@ export class ApiClientFactory {
|
||||
return instance
|
||||
}
|
||||
|
||||
if (provider.id === 'ovms') {
|
||||
logger.debug(`Creating OVMSClient for provider: ${provider.id}`)
|
||||
instance = new OVMSClient(provider) as BaseApiClient
|
||||
return instance
|
||||
}
|
||||
|
||||
// 然后检查标准的 Provider Type
|
||||
switch (provider.type) {
|
||||
case 'openai':
|
||||
|
||||
56
src/renderer/src/aiCore/legacy/clients/ovms/OVMSClient.ts
Normal file
56
src/renderer/src/aiCore/legacy/clients/ovms/OVMSClient.ts
Normal file
@ -0,0 +1,56 @@
|
||||
import { loggerService } from '@logger'
|
||||
import { isSupportedModel } from '@renderer/config/models'
|
||||
import { objectKeys, type Provider } from '@renderer/types'
|
||||
import type OpenAI from 'openai'
|
||||
|
||||
import { OpenAIAPIClient } from '../openai/OpenAIApiClient'
|
||||
|
||||
const logger = loggerService.withContext('OVMSClient')
|
||||
|
||||
export class OVMSClient extends OpenAIAPIClient {
|
||||
constructor(provider: Provider) {
|
||||
super(provider)
|
||||
}
|
||||
|
||||
override async listModels(): Promise<OpenAI.Models.Model[]> {
|
||||
try {
|
||||
const sdk = await this.getSdkInstance()
|
||||
|
||||
const chatModelsResponse = await sdk.request({
|
||||
method: 'get',
|
||||
path: '../v1/config'
|
||||
})
|
||||
logger.debug(`Chat models response: ${JSON.stringify(chatModelsResponse)}`)
|
||||
|
||||
// Parse the config response to extract model information
|
||||
const config = chatModelsResponse as Record<string, any>
|
||||
const models = objectKeys(config)
|
||||
.map((modelName) => {
|
||||
const modelInfo = config[modelName]
|
||||
|
||||
// Check if model has at least one version with "AVAILABLE" state
|
||||
const hasAvailableVersion = modelInfo?.model_version_status?.some(
|
||||
(versionStatus: any) => versionStatus?.state === 'AVAILABLE'
|
||||
)
|
||||
|
||||
if (hasAvailableVersion) {
|
||||
return {
|
||||
id: modelName,
|
||||
object: 'model' as const,
|
||||
owned_by: 'ovms',
|
||||
created: Date.now()
|
||||
}
|
||||
}
|
||||
return null // Skip models without available versions
|
||||
})
|
||||
.filter(Boolean) // Remove null entries
|
||||
logger.debug(`Processed models: ${JSON.stringify(models)}`)
|
||||
|
||||
// Filter out unsupported models
|
||||
return models.filter((model): model is OpenAI.Models.Model => model !== null && isSupportedModel(model))
|
||||
} catch (error) {
|
||||
logger.error(`Error listing OVMS models: ${error}`)
|
||||
return []
|
||||
}
|
||||
}
|
||||
}
|
||||
BIN
src/renderer/src/assets/images/models/gpt-5-codex.png
Normal file
BIN
src/renderer/src/assets/images/models/gpt-5-codex.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 25 KiB |
BIN
src/renderer/src/assets/images/providers/intel.png
Normal file
BIN
src/renderer/src/assets/images/providers/intel.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 3.6 KiB |
@ -1,3 +1,4 @@
|
||||
import { cn } from '@heroui/react'
|
||||
import Scrollbar from '@renderer/components/Scrollbar'
|
||||
import { ChevronRight } from 'lucide-react'
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
@ -17,6 +18,10 @@ export interface HorizontalScrollContainerProps {
|
||||
dependencies?: readonly unknown[]
|
||||
scrollDistance?: number
|
||||
className?: string
|
||||
classNames?: {
|
||||
container?: string
|
||||
content?: string
|
||||
}
|
||||
gap?: string
|
||||
expandable?: boolean
|
||||
}
|
||||
@ -26,6 +31,7 @@ const HorizontalScrollContainer: React.FC<HorizontalScrollContainerProps> = ({
|
||||
dependencies = [],
|
||||
scrollDistance = 200,
|
||||
className,
|
||||
classNames,
|
||||
gap = '8px',
|
||||
expandable = false
|
||||
}) => {
|
||||
@ -95,11 +101,16 @@ const HorizontalScrollContainer: React.FC<HorizontalScrollContainerProps> = ({
|
||||
|
||||
return (
|
||||
<Container
|
||||
className={className}
|
||||
className={cn(className, classNames?.container)}
|
||||
$expandable={expandable}
|
||||
$disableHoverButton={isScrolledToEnd}
|
||||
onClick={expandable ? handleContainerClick : undefined}>
|
||||
<ScrollContent ref={scrollRef} $gap={gap} $isExpanded={isExpanded} $expandable={expandable}>
|
||||
<ScrollContent
|
||||
ref={scrollRef}
|
||||
$gap={gap}
|
||||
$isExpanded={isExpanded}
|
||||
$expandable={expandable}
|
||||
className={cn(classNames?.content)}>
|
||||
{children}
|
||||
</ScrollContent>
|
||||
{canScroll && !isExpanded && !isScrolledToEnd && (
|
||||
|
||||
@ -38,6 +38,7 @@ interface PopupContainerProps {
|
||||
message?: Message
|
||||
messages?: Message[]
|
||||
topic?: Topic
|
||||
rawContent?: string
|
||||
}
|
||||
|
||||
// 转换文件信息数组为树形结构
|
||||
@ -140,7 +141,8 @@ const PopupContainer: React.FC<PopupContainerProps> = ({
|
||||
resolve,
|
||||
message,
|
||||
messages,
|
||||
topic
|
||||
topic,
|
||||
rawContent
|
||||
}) => {
|
||||
const [defaultObsidianVault, setDefaultObsidianVault] = usePreference('data.integration.obsidian.default_vault')
|
||||
const [state, setState] = useState({
|
||||
@ -229,7 +231,9 @@ const PopupContainer: React.FC<PopupContainerProps> = ({
|
||||
return
|
||||
}
|
||||
let markdown = ''
|
||||
if (topic) {
|
||||
if (rawContent) {
|
||||
markdown = rawContent
|
||||
} else if (topic) {
|
||||
markdown = await topicToMarkdown(topic, exportReasoning)
|
||||
} else if (messages && messages.length > 0) {
|
||||
markdown = await messagesToMarkdown(messages, exportReasoning)
|
||||
@ -299,7 +303,6 @@ const PopupContainer: React.FC<PopupContainerProps> = ({
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<Modal
|
||||
title={i18n.t('chat.topics.export.obsidian_atributes')}
|
||||
@ -410,9 +413,11 @@ const PopupContainer: React.FC<PopupContainerProps> = ({
|
||||
</Option>
|
||||
</Select>
|
||||
</Form.Item>
|
||||
<Form.Item label={i18n.t('chat.topics.export.obsidian_reasoning')}>
|
||||
<Switch isSelected={exportReasoning} onValueChange={setExportReasoning} />
|
||||
</Form.Item>
|
||||
{!rawContent && (
|
||||
<Form.Item label={i18n.t('chat.topics.export.obsidian_reasoning')}>
|
||||
<Switch isSelected={exportReasoning} onValueChange={setExportReasoning} />
|
||||
</Form.Item>
|
||||
)}
|
||||
</Form>
|
||||
</Modal>
|
||||
)
|
||||
|
||||
@ -10,6 +10,7 @@ interface ObsidianExportOptions {
|
||||
topic?: Topic
|
||||
message?: Message
|
||||
messages?: Message[]
|
||||
rawContent?: string
|
||||
}
|
||||
|
||||
export default class ObsidianExportPopup {
|
||||
@ -25,6 +26,7 @@ export default class ObsidianExportPopup {
|
||||
topic={options.topic}
|
||||
message={options.message}
|
||||
messages={options.messages}
|
||||
rawContent={options.rawContent}
|
||||
obsidianTags={''}
|
||||
open={true}
|
||||
resolve={(v) => {
|
||||
|
||||
@ -61,12 +61,15 @@ const PopupContainer: React.FC<Props> = ({ text, title, extension, resolve }) =>
|
||||
<Editor
|
||||
theme={activeCmTheme}
|
||||
fontSize={fontSize - 1}
|
||||
editable={false}
|
||||
readOnly={true}
|
||||
expanded={false}
|
||||
height="100%"
|
||||
style={{ height: '100%' }}
|
||||
value={text}
|
||||
language={extension}
|
||||
options={{
|
||||
keymap: true
|
||||
}}
|
||||
/>
|
||||
) : (
|
||||
<Text>{text}</Text>
|
||||
|
||||
@ -48,7 +48,8 @@ const RichEditor = ({
|
||||
enableContentSearch = false,
|
||||
isFullWidth = false,
|
||||
fontFamily = 'default',
|
||||
fontSize = 16
|
||||
fontSize = 16,
|
||||
enableSpellCheck = false
|
||||
// toolbarItems: _toolbarItems // TODO: Implement custom toolbar items
|
||||
}: RichEditorProps & { ref?: React.RefObject<RichEditorRef | null> }) => {
|
||||
// Use the rich editor hook for complete editor management
|
||||
@ -71,6 +72,7 @@ const RichEditor = ({
|
||||
onBlur,
|
||||
placeholder,
|
||||
editable,
|
||||
enableSpellCheck,
|
||||
scrollParent: () => scrollContainerRef.current,
|
||||
onShowTableActionMenu: ({ position, actions }) => {
|
||||
const iconMap: Record<string, React.ReactNode> = {
|
||||
|
||||
@ -14,6 +14,31 @@ export const RichEditorWrapper = styled.div<{
|
||||
border-radius: 6px;
|
||||
background: var(--color-background);
|
||||
overflow-y: hidden;
|
||||
.ProseMirror table,
|
||||
.tiptap table {
|
||||
table-layout: auto !important;
|
||||
}
|
||||
|
||||
.ProseMirror table th,
|
||||
.ProseMirror table td,
|
||||
.tiptap th,
|
||||
.tiptap td {
|
||||
white-space: normal !important;
|
||||
word-wrap: break-word !important;
|
||||
word-break: break-word !important;
|
||||
overflow-wrap: break-word !important;
|
||||
overflow: visible !important;
|
||||
text-overflow: clip !important;
|
||||
}
|
||||
|
||||
.ProseMirror table th > *,
|
||||
.ProseMirror table td > *,
|
||||
.tiptap td > *,
|
||||
.tiptap th > * {
|
||||
white-space: normal !important;
|
||||
overflow: visible !important;
|
||||
text-overflow: clip !important;
|
||||
}
|
||||
width: ${({ $isFullWidth }) => ($isFullWidth ? '100%' : '60%')};
|
||||
margin: ${({ $isFullWidth }) => ($isFullWidth ? '0' : '0 auto')};
|
||||
font-family: ${({ $fontFamily }) => ($fontFamily === 'serif' ? 'var(--font-family-serif)' : 'var(--font-family)')};
|
||||
@ -21,6 +46,7 @@ export const RichEditorWrapper = styled.div<{
|
||||
|
||||
${({ $minHeight }) => $minHeight && `min-height: ${$minHeight}px;`}
|
||||
${({ $maxHeight }) => $maxHeight && `max-height: ${$maxHeight}px;`}
|
||||
|
||||
`
|
||||
|
||||
export const ToolbarWrapper = styled.div`
|
||||
|
||||
@ -50,6 +50,8 @@ export interface RichEditorProps {
|
||||
fontFamily?: 'default' | 'serif'
|
||||
/** Font size in pixels */
|
||||
fontSize?: number
|
||||
/** Whether to enable spell check */
|
||||
enableSpellCheck?: boolean
|
||||
}
|
||||
|
||||
export interface ToolbarItem {
|
||||
|
||||
@ -57,6 +57,8 @@ export interface UseRichEditorOptions {
|
||||
editable?: boolean
|
||||
/** Whether to enable table of contents functionality */
|
||||
enableTableOfContents?: boolean
|
||||
/** Whether to enable spell check */
|
||||
enableSpellCheck?: boolean
|
||||
/** Show table action menu (row/column) with concrete actions and position */
|
||||
onShowTableActionMenu?: (payload: {
|
||||
type: 'row' | 'column'
|
||||
@ -126,6 +128,7 @@ export const useRichEditor = (options: UseRichEditorOptions = {}): UseRichEditor
|
||||
previewLength = 50,
|
||||
placeholder = '',
|
||||
editable = true,
|
||||
enableSpellCheck = false,
|
||||
onShowTableActionMenu,
|
||||
scrollParent
|
||||
} = options
|
||||
@ -410,7 +413,9 @@ export const useRichEditor = (options: UseRichEditorOptions = {}): UseRichEditor
|
||||
// Allow text selection even when not editable
|
||||
style: editable
|
||||
? ''
|
||||
: 'user-select: text; -webkit-user-select: text; -moz-user-select: text; -ms-user-select: text;'
|
||||
: 'user-select: text; -webkit-user-select: text; -moz-user-select: text; -ms-user-select: text;',
|
||||
// Set spellcheck attribute on the contenteditable element
|
||||
spellcheck: enableSpellCheck ? 'true' : 'false'
|
||||
}
|
||||
},
|
||||
onUpdate: ({ editor }) => {
|
||||
|
||||
@ -238,7 +238,17 @@ const TabsContainer: React.FC<TabsContainerProps> = ({ children }) => {
|
||||
onSortEnd={onSortEnd}
|
||||
className="tabs-sortable"
|
||||
renderItem={(tab) => (
|
||||
<Tab key={tab.id} active={tab.id === activeTabId} onClick={() => handleTabClick(tab)}>
|
||||
<Tab
|
||||
key={tab.id}
|
||||
active={tab.id === activeTabId}
|
||||
onClick={() => handleTabClick(tab)}
|
||||
onAuxClick={(e) => {
|
||||
if (e.button === 1 && tab.id !== 'home') {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
closeTab(tab.id)
|
||||
}
|
||||
}}>
|
||||
<TabHeader>
|
||||
{tab.id && <TabIcon>{getTabIcon(tab.id, minapps, minAppsCache)}</TabIcon>}
|
||||
<TabTitle>{getTabTitle(tab.id)}</TabTitle>
|
||||
|
||||
@ -260,6 +260,7 @@ export const SYSTEM_MODELS: Record<SystemProviderId | 'defaultModel', Model[]> =
|
||||
{ id: 'deepseek-r1', name: 'DeepSeek-R1', provider: 'burncloud', group: 'deepseek-ai' },
|
||||
{ id: 'deepseek-v3', name: 'DeepSeek-V3', provider: 'burncloud', group: 'deepseek-ai' }
|
||||
],
|
||||
ovms: [],
|
||||
ollama: [],
|
||||
lmstudio: [],
|
||||
silicon: [
|
||||
@ -429,6 +430,12 @@ export const SYSTEM_MODELS: Record<SystemProviderId | 'defaultModel', Model[]> =
|
||||
}
|
||||
],
|
||||
anthropic: [
|
||||
{
|
||||
id: 'claude-sonnet-4-5-20250929',
|
||||
provider: 'anthropic',
|
||||
name: 'Claude Sonnet 4.5',
|
||||
group: 'Claude 4.5'
|
||||
},
|
||||
{
|
||||
id: 'claude-sonnet-4-20250514',
|
||||
provider: 'anthropic',
|
||||
@ -697,6 +704,12 @@ export const SYSTEM_MODELS: Record<SystemProviderId | 'defaultModel', Model[]> =
|
||||
name: 'GLM-4.5-Flash',
|
||||
group: 'GLM-4.5'
|
||||
},
|
||||
{
|
||||
id: 'glm-4.6',
|
||||
provider: 'zhipu',
|
||||
name: 'GLM-4.6',
|
||||
group: 'GLM-4.6'
|
||||
},
|
||||
{
|
||||
id: 'glm-4.5',
|
||||
provider: 'zhipu',
|
||||
|
||||
@ -61,6 +61,7 @@ import ChatGPTImageModelLogo from '@renderer/assets/images/models/gpt_image_1.pn
|
||||
import ChatGPTo1ModelLogo from '@renderer/assets/images/models/gpt_o1.png'
|
||||
import GPT5ModelLogo from '@renderer/assets/images/models/gpt-5.png'
|
||||
import GPT5ChatModelLogo from '@renderer/assets/images/models/gpt-5-chat.png'
|
||||
import GPT5CodexModelLogo from '@renderer/assets/images/models/gpt-5-codex.png'
|
||||
import GPT5MiniModelLogo from '@renderer/assets/images/models/gpt-5-mini.png'
|
||||
import GPT5NanoModelLogo from '@renderer/assets/images/models/gpt-5-nano.png'
|
||||
import GrokModelLogo from '@renderer/assets/images/models/grok.png'
|
||||
@ -162,6 +163,7 @@ export function getModelLogo(modelId: string) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
// key is regex
|
||||
const logoMap = {
|
||||
pixtral: isLight ? PixtralModelLogo : PixtralModelLogoDark,
|
||||
jina: isLight ? JinaModelLogo : JinaModelLogoDark,
|
||||
@ -177,6 +179,7 @@ export function getModelLogo(modelId: string) {
|
||||
'gpt-5-mini': GPT5MiniModelLogo,
|
||||
'gpt-5-nano': GPT5NanoModelLogo,
|
||||
'gpt-5-chat': GPT5ChatModelLogo,
|
||||
'gpt-5-codex': GPT5CodexModelLogo,
|
||||
'gpt-5': GPT5ModelLogo,
|
||||
gpts: isLight ? ChatGPT4ModelLogo : ChatGPT4ModelLogoDark,
|
||||
'gpt-oss(?:-[\\w-]+)': isLight ? ChatGptModelLogo : ChatGptModelLogoDark,
|
||||
@ -286,7 +289,7 @@ export function getModelLogo(modelId: string) {
|
||||
longcat: LongCatAppLogo,
|
||||
bytedance: BytedanceModelLogo,
|
||||
'(V_1|V_1_TURBO|V_2|V_2A|V_2_TURBO|DESCRIBE|UPSCALE)': IdeogramModelLogo
|
||||
}
|
||||
} as const
|
||||
|
||||
for (const key in logoMap) {
|
||||
const regex = new RegExp(key, 'i')
|
||||
|
||||
@ -22,6 +22,7 @@ export const MODEL_SUPPORTED_REASONING_EFFORT: ReasoningEffortConfig = {
|
||||
default: ['low', 'medium', 'high'] as const,
|
||||
o: ['low', 'medium', 'high'] as const,
|
||||
gpt5: ['minimal', 'low', 'medium', 'high'] as const,
|
||||
gpt5_codex: ['low', 'medium', 'high'] as const,
|
||||
grok: ['low', 'high'] as const,
|
||||
gemini: ['low', 'medium', 'high', 'auto'] as const,
|
||||
gemini_pro: ['low', 'medium', 'high', 'auto'] as const,
|
||||
@ -40,6 +41,7 @@ export const MODEL_SUPPORTED_OPTIONS: ThinkingOptionConfig = {
|
||||
default: ['off', ...MODEL_SUPPORTED_REASONING_EFFORT.default] as const,
|
||||
o: MODEL_SUPPORTED_REASONING_EFFORT.o,
|
||||
gpt5: [...MODEL_SUPPORTED_REASONING_EFFORT.gpt5] as const,
|
||||
gpt5_codex: MODEL_SUPPORTED_REASONING_EFFORT.gpt5_codex,
|
||||
grok: MODEL_SUPPORTED_REASONING_EFFORT.grok,
|
||||
gemini: ['off', ...MODEL_SUPPORTED_REASONING_EFFORT.gemini] as const,
|
||||
gemini_pro: MODEL_SUPPORTED_REASONING_EFFORT.gemini_pro,
|
||||
@ -55,8 +57,13 @@ export const MODEL_SUPPORTED_OPTIONS: ThinkingOptionConfig = {
|
||||
|
||||
export const getThinkModelType = (model: Model): ThinkingModelType => {
|
||||
let thinkingModelType: ThinkingModelType = 'default'
|
||||
const modelId = getLowerBaseModelName(model.id)
|
||||
if (isGPT5SeriesModel(model)) {
|
||||
thinkingModelType = 'gpt5'
|
||||
if (modelId.includes('codex')) {
|
||||
thinkingModelType = 'gpt5_codex'
|
||||
} else {
|
||||
thinkingModelType = 'gpt5'
|
||||
}
|
||||
} else if (isSupportedReasoningEffortOpenAIModel(model)) {
|
||||
thinkingModelType = 'o'
|
||||
} else if (isSupportedThinkingTokenGeminiModel(model)) {
|
||||
@ -171,9 +178,13 @@ export function isGeminiReasoningModel(model?: Model): boolean {
|
||||
return false
|
||||
}
|
||||
|
||||
// Gemini 支持思考模式的模型正则
|
||||
export const GEMINI_THINKING_MODEL_REGEX =
|
||||
/gemini-(?:2\.5.*(?:-latest)?|flash-latest|pro-latest|flash-lite-latest)(?:-[\w-]+)*$/i
|
||||
|
||||
export const isSupportedThinkingTokenGeminiModel = (model: Model): boolean => {
|
||||
const modelId = getLowerBaseModelName(model.id, '/')
|
||||
if (modelId.includes('gemini-2.5')) {
|
||||
if (GEMINI_THINKING_MODEL_REGEX.test(modelId)) {
|
||||
if (modelId.includes('image') || modelId.includes('tts')) {
|
||||
return false
|
||||
}
|
||||
@ -328,14 +339,20 @@ export const isSupportedReasoningEffortPerplexityModel = (model: Model): boolean
|
||||
|
||||
export const isSupportedThinkingTokenZhipuModel = (model: Model): boolean => {
|
||||
const modelId = getLowerBaseModelName(model.id, '/')
|
||||
return modelId.includes('glm-4.5')
|
||||
return ['glm-4.5', 'glm-4.6'].some((id) => modelId.includes(id))
|
||||
}
|
||||
|
||||
export const isDeepSeekHybridInferenceModel = (model: Model) => {
|
||||
const modelId = getLowerBaseModelName(model.id)
|
||||
// deepseek官方使用chat和reasoner做推理控制,其他provider需要单独判断,id可能会有所差别
|
||||
// openrouter: deepseek/deepseek-chat-v3.1 不知道会不会有其他provider仿照ds官方分出一个同id的作为非思考模式的模型,这里有风险
|
||||
return /deepseek-v3(?:\.1|-1-\d+)/.test(modelId) || modelId.includes('deepseek-chat-v3.1')
|
||||
// Matches: "deepseek-v3" followed by ".digit" or "-digit".
|
||||
// Optionally, this can be followed by ".alphanumeric_sequence" or "-alphanumeric_sequence"
|
||||
// until the end of the string.
|
||||
// Examples: deepseek-v3.1, deepseek-v3-1, deepseek-v3.1.2, deepseek-v3.1-alpha
|
||||
// Does NOT match: deepseek-v3.123 (missing separator after '1'), deepseek-v3.x (x isn't a digit)
|
||||
// TODO: move to utils and add test cases
|
||||
return /deepseek-v3(?:\.\d|-\d)(?:(\.|-)\w+)?$/.test(modelId) || modelId.includes('deepseek-chat-v3.1')
|
||||
}
|
||||
|
||||
export const isSupportedThinkingTokenDeepSeekModel = isDeepSeekHybridInferenceModel
|
||||
|
||||
@ -12,6 +12,7 @@ const visionAllowedModels = [
|
||||
'gemini-1\\.5',
|
||||
'gemini-2\\.0',
|
||||
'gemini-2\\.5',
|
||||
'gemini-(flash|pro|flash-lite)-latest',
|
||||
'gemini-exp',
|
||||
'claude-3',
|
||||
'claude-sonnet-4',
|
||||
@ -21,7 +22,9 @@ const visionAllowedModels = [
|
||||
'qwen-vl',
|
||||
'qwen2-vl',
|
||||
'qwen2.5-vl',
|
||||
'qwen3-vl',
|
||||
'qwen2.5-omni',
|
||||
'qwen3-omni',
|
||||
'qvq',
|
||||
'internvl2',
|
||||
'grok-vision-beta',
|
||||
|
||||
@ -11,9 +11,12 @@ export const CLAUDE_SUPPORTED_WEBSEARCH_REGEX = new RegExp(
|
||||
'i'
|
||||
)
|
||||
|
||||
export const GEMINI_FLASH_MODEL_REGEX = new RegExp('gemini-.*-flash.*$')
|
||||
export const GEMINI_FLASH_MODEL_REGEX = new RegExp('gemini.*-flash.*$')
|
||||
|
||||
export const GEMINI_SEARCH_REGEX = new RegExp('gemini-2\\..*', 'i')
|
||||
export const GEMINI_SEARCH_REGEX = new RegExp(
|
||||
'gemini-(?:2.*(?:-latest)?|flash-latest|pro-latest|flash-lite-latest)(?:-[\\w-]+)*$',
|
||||
'i'
|
||||
)
|
||||
|
||||
export const PERPLEXITY_SEARCH_MODELS = [
|
||||
'sonar-pro',
|
||||
|
||||
@ -24,6 +24,7 @@ import GrokProviderLogo from '@renderer/assets/images/providers/grok.png'
|
||||
import GroqProviderLogo from '@renderer/assets/images/providers/groq.png'
|
||||
import HyperbolicProviderLogo from '@renderer/assets/images/providers/hyperbolic.png'
|
||||
import InfiniProviderLogo from '@renderer/assets/images/providers/infini.png'
|
||||
import IntelOvmsLogo from '@renderer/assets/images/providers/intel.png'
|
||||
import JinaProviderLogo from '@renderer/assets/images/providers/jina.png'
|
||||
import LanyunProviderLogo from '@renderer/assets/images/providers/lanyun.png'
|
||||
import LMStudioProviderLogo from '@renderer/assets/images/providers/lmstudio.png'
|
||||
@ -102,6 +103,16 @@ export const SYSTEM_PROVIDERS_CONFIG: Record<SystemProviderId, SystemProvider> =
|
||||
isSystem: true,
|
||||
enabled: false
|
||||
},
|
||||
ovms: {
|
||||
id: 'ovms',
|
||||
name: 'OpenVINO Model Server',
|
||||
type: 'openai',
|
||||
apiKey: '',
|
||||
apiHost: 'http://localhost:8000/v3/',
|
||||
models: SYSTEM_MODELS.ovms,
|
||||
isSystem: true,
|
||||
enabled: false
|
||||
},
|
||||
ocoolai: {
|
||||
id: 'ocoolai',
|
||||
name: 'ocoolAI',
|
||||
@ -642,6 +653,7 @@ export const PROVIDER_LOGO_MAP: AtLeast<SystemProviderId, string> = {
|
||||
yi: ZeroOneProviderLogo,
|
||||
groq: GroqProviderLogo,
|
||||
zhipu: ZhipuProviderLogo,
|
||||
ovms: IntelOvmsLogo,
|
||||
ollama: OllamaProviderLogo,
|
||||
lmstudio: LMStudioProviderLogo,
|
||||
moonshot: MoonshotProviderLogo,
|
||||
@ -1027,6 +1039,16 @@ export const PROVIDER_URLS: Record<SystemProviderId, ProviderUrls> = {
|
||||
models: 'https://console.groq.com/docs/models'
|
||||
}
|
||||
},
|
||||
ovms: {
|
||||
api: {
|
||||
url: 'http://localhost:8000/v3/'
|
||||
},
|
||||
websites: {
|
||||
official: 'https://www.intel.com/content/www/us/en/developer/tools/openvino-toolkit/overview.html',
|
||||
docs: 'https://docs.openvino.ai/2025/model-server/ovms_what_is_openvino_model_server.html',
|
||||
models: 'https://www.modelscope.cn/organization/OpenVINO'
|
||||
}
|
||||
},
|
||||
ollama: {
|
||||
api: {
|
||||
url: 'http://localhost:11434'
|
||||
|
||||
@ -12,7 +12,7 @@ import {
|
||||
setSelectedTerminal
|
||||
} from '@renderer/store/codeTools'
|
||||
import type { Model } from '@renderer/types'
|
||||
import type { codeTools } from '@shared/config/constant'
|
||||
import { codeTools } from '@shared/config/constant'
|
||||
import { useCallback } from 'react'
|
||||
|
||||
export const useCodeTools = () => {
|
||||
@ -108,7 +108,11 @@ export const useCodeTools = () => {
|
||||
const environmentVariables = codeToolsState?.environmentVariables?.[codeToolsState.selectedCliTool] || ''
|
||||
|
||||
// 检查是否可以启动(所有必需字段都已填写)
|
||||
const canLaunch = Boolean(codeToolsState.selectedCliTool && selectedModel && codeToolsState.currentDirectory)
|
||||
const canLaunch = Boolean(
|
||||
codeToolsState.selectedCliTool &&
|
||||
codeToolsState.currentDirectory &&
|
||||
(codeToolsState.selectedCliTool === codeTools.githubCopilotCli || selectedModel)
|
||||
)
|
||||
|
||||
return {
|
||||
// 状态
|
||||
|
||||
@ -48,6 +48,17 @@ export function useActiveTopic(assistantId: string, topic?: Topic) {
|
||||
}
|
||||
}, [activeTopic?.id, assistant])
|
||||
|
||||
useEffect(() => {
|
||||
if (!assistant?.topics?.length || !activeTopic) {
|
||||
return
|
||||
}
|
||||
|
||||
const latestTopic = assistant.topics.find((item) => item.id === activeTopic.id)
|
||||
if (latestTopic && latestTopic !== activeTopic) {
|
||||
setActiveTopic(latestTopic)
|
||||
}
|
||||
}, [assistant?.topics, activeTopic])
|
||||
|
||||
return { activeTopic, setActiveTopic }
|
||||
}
|
||||
|
||||
|
||||
@ -62,6 +62,7 @@ const providerKeyMap = {
|
||||
nvidia: 'provider.nvidia',
|
||||
o3: 'provider.o3',
|
||||
ocoolai: 'provider.ocoolai',
|
||||
ovms: 'provider.ovms',
|
||||
ollama: 'provider.ollama',
|
||||
openai: 'provider.openai',
|
||||
openrouter: 'provider.openrouter',
|
||||
|
||||
@ -251,6 +251,7 @@
|
||||
"added": "Added",
|
||||
"case_sensitive": "Case Sensitive",
|
||||
"collapse": "Collapse",
|
||||
"download": "Download",
|
||||
"includes_user_questions": "Include Your Questions",
|
||||
"manage": "Manage",
|
||||
"select_model": "Select Model",
|
||||
@ -1696,6 +1697,12 @@
|
||||
"provider_settings": "Go to provider settings"
|
||||
},
|
||||
"notes": {
|
||||
"auto_rename": {
|
||||
"empty_note": "Note is empty, cannot generate name",
|
||||
"failed": "Failed to generate note name",
|
||||
"label": "Generate Note Name",
|
||||
"success": "Note name generated successfully"
|
||||
},
|
||||
"characters": "Characters",
|
||||
"collapse": "Collapse",
|
||||
"content_placeholder": "Please enter the note content...",
|
||||
@ -1777,6 +1784,8 @@
|
||||
"sort_updated_asc": "Update time (oldest first)",
|
||||
"sort_updated_desc": "Update time (newest first)",
|
||||
"sort_z2a": "File name (Z-A)",
|
||||
"spell_check": "Spell Check",
|
||||
"spell_check_tooltip": "Enable/Disable spell check",
|
||||
"star": "Favorite note",
|
||||
"starred_notes": "Collected notes",
|
||||
"title": "Notes",
|
||||
@ -1826,6 +1835,57 @@
|
||||
},
|
||||
"title": "Ollama"
|
||||
},
|
||||
"ovms": {
|
||||
"action": {
|
||||
"install": "Install",
|
||||
"installing": "Installing",
|
||||
"reinstall": "Re-Install",
|
||||
"run": "Run OVMS",
|
||||
"starting": "Starting",
|
||||
"stop": "Stop OVMS",
|
||||
"stopping": "Stopping"
|
||||
},
|
||||
"description": "<div><p>1. Download OV Models.</p><p>2. Add Models in 'Manager'.</p><p>Support Windows Only!</p><p>OVMS Install Path: '%USERPROFILE%\\.cherrystudio\\ovms' .</p><p>Please refer to <a href=https://github.com/openvinotoolkit/model_server/blob/c55551763d02825829337b62c2dcef9339706f79/docs/deploying_server_baremetal.md>Intel OVMS Guide</a></p></dev>",
|
||||
"download": {
|
||||
"button": "Download",
|
||||
"error": "Download Error",
|
||||
"model_id": {
|
||||
"label": "Model ID:",
|
||||
"model_id_pattern": "Model ID must start with OpenVINO/",
|
||||
"placeholder": "Required e.g. OpenVINO/Qwen3-8B-int4-ov",
|
||||
"required": "Please enter the model ID"
|
||||
},
|
||||
"model_name": {
|
||||
"label": "Model Name:",
|
||||
"placeholder": "Required e.g. Qwen3-8B-int4-ov",
|
||||
"required": "Please enter the model name"
|
||||
},
|
||||
"model_source": "Model Source:",
|
||||
"model_task": "Model Task:",
|
||||
"success": "Download successful",
|
||||
"success_desc": "Model \"{{modelName}}\"-\"{{modelId}}\" downloaded successfully, please go to the OVMS management interface to add the model",
|
||||
"tip": "The model is downloading, sometimes it takes hours. Please be patient...",
|
||||
"title": "Download Intel OpenVINO Model"
|
||||
},
|
||||
"failed": {
|
||||
"install": "Install OVMS failed:",
|
||||
"install_code_100": "Unknown Error",
|
||||
"install_code_101": "Only supports Intel(R) Core(TM) Ultra CPU",
|
||||
"install_code_102": "Only supports Windows",
|
||||
"install_code_103": "Download OVMS runtime failed",
|
||||
"install_code_104": "Uncompress OVMS runtime failed",
|
||||
"install_code_105": "Clean OVMS runtime failed",
|
||||
"run": "Run OVMS failed:",
|
||||
"stop": "Stop OVMS failed:"
|
||||
},
|
||||
"status": {
|
||||
"not_installed": "OVMS is not installed",
|
||||
"not_running": "OVMS is not running",
|
||||
"running": "OVMS is running",
|
||||
"unknown": "OVMS status unknown"
|
||||
},
|
||||
"title": "Intel OVMS"
|
||||
},
|
||||
"paintings": {
|
||||
"aspect_ratio": "Aspect Ratio",
|
||||
"aspect_ratios": {
|
||||
@ -2057,6 +2117,7 @@
|
||||
"ollama": "Ollama",
|
||||
"openai": "OpenAI",
|
||||
"openrouter": "OpenRouter",
|
||||
"ovms": "Intel OVMS",
|
||||
"perplexity": "Perplexity",
|
||||
"ph8": "PH8",
|
||||
"poe": "Poe",
|
||||
|
||||
@ -251,6 +251,7 @@
|
||||
"added": "已添加",
|
||||
"case_sensitive": "区分大小写",
|
||||
"collapse": "收起",
|
||||
"download": "下载",
|
||||
"includes_user_questions": "包含用户提问",
|
||||
"manage": "管理",
|
||||
"select_model": "选择模型",
|
||||
@ -1696,6 +1697,12 @@
|
||||
"provider_settings": "跳转到服务商设置界面"
|
||||
},
|
||||
"notes": {
|
||||
"auto_rename": {
|
||||
"empty_note": "笔记为空,无法生成名称",
|
||||
"failed": "生成笔记名称失败",
|
||||
"label": "生成笔记名称",
|
||||
"success": "笔记名称生成成功"
|
||||
},
|
||||
"characters": "字符",
|
||||
"collapse": "收起",
|
||||
"content_placeholder": "请输入笔记内容...",
|
||||
@ -1777,6 +1784,8 @@
|
||||
"sort_updated_asc": "更新时间(从旧到新)",
|
||||
"sort_updated_desc": "更新时间(从新到旧)",
|
||||
"sort_z2a": "文件名(Z-A)",
|
||||
"spell_check": "拼写检查",
|
||||
"spell_check_tooltip": "启用/禁用拼写检查",
|
||||
"star": "收藏笔记",
|
||||
"starred_notes": "收藏的笔记",
|
||||
"title": "笔记",
|
||||
@ -1826,6 +1835,57 @@
|
||||
},
|
||||
"title": "Ollama"
|
||||
},
|
||||
"ovms": {
|
||||
"action": {
|
||||
"install": "安装",
|
||||
"installing": "正在安装",
|
||||
"reinstall": "重装",
|
||||
"run": "运行 OVMS",
|
||||
"starting": "启动中",
|
||||
"stop": "停止 OVMS",
|
||||
"stopping": "停止中"
|
||||
},
|
||||
"description": "<div><p>1. 下载 OV 模型.</p><p>2. 在 'Manager' 中添加模型.</p><p>仅支持 Windows!</p><p>OVMS 安装路径: '%USERPROFILE%\\.cherrystudio\\ovms' .</p><p>请参考 <a href=https://github.com/openvinotoolkit/model_server/blob/c55551763d02825829337b62c2dcef9339706f79/docs/deploying_server_baremetal.md>Intel OVMS 指南</a></p></dev>",
|
||||
"download": {
|
||||
"button": "下载",
|
||||
"error": "选择失败",
|
||||
"model_id": {
|
||||
"label": "模型 ID",
|
||||
"model_id_pattern": "模型 ID 必须以 OpenVINO/ 开头",
|
||||
"placeholder": "必填,例如 OpenVINO/Qwen3-8B-int4-ov",
|
||||
"required": "请输入模型 ID"
|
||||
},
|
||||
"model_name": {
|
||||
"label": "模型名称",
|
||||
"placeholder": "必填,例如 Qwen3-8B-int4-ov",
|
||||
"required": "请输入模型名称"
|
||||
},
|
||||
"model_source": "模型来源:",
|
||||
"model_task": "模型任务:",
|
||||
"success": "下载成功",
|
||||
"success_desc": "模型\"{{modelName}}\"-\"{{modelId}}\"下载成功,请前往 OVMS 管理界面添加模型",
|
||||
"tip": "模型正在下载,有时需要几个小时。请耐心等待...",
|
||||
"title": "下载 Intel OpenVINO 模型"
|
||||
},
|
||||
"failed": {
|
||||
"install": "安装 OVMS 失败:",
|
||||
"install_code_100": "未知错误",
|
||||
"install_code_101": "仅支持 Intel(R) Core(TM) Ultra CPU",
|
||||
"install_code_102": "仅支持 Windows",
|
||||
"install_code_103": "下载 OVMS runtime 失败",
|
||||
"install_code_104": "解压 OVMS runtime 失败",
|
||||
"install_code_105": "清理 OVMS runtime 失败",
|
||||
"run": "运行 OVMS 失败:",
|
||||
"stop": "停止 OVMS 失败:"
|
||||
},
|
||||
"status": {
|
||||
"not_installed": "OVMS 未安装",
|
||||
"not_running": "OVMS 未运行",
|
||||
"running": "OVMS 正在运行",
|
||||
"unknown": "OVMS 状态未知"
|
||||
},
|
||||
"title": "Intel OVMS"
|
||||
},
|
||||
"paintings": {
|
||||
"aspect_ratio": "画幅比例",
|
||||
"aspect_ratios": {
|
||||
@ -2057,6 +2117,7 @@
|
||||
"ollama": "Ollama",
|
||||
"openai": "OpenAI",
|
||||
"openrouter": "OpenRouter",
|
||||
"ovms": "Intel OVMS",
|
||||
"perplexity": "Perplexity",
|
||||
"ph8": "PH8 大模型开放平台",
|
||||
"poe": "Poe",
|
||||
|
||||
@ -251,6 +251,7 @@
|
||||
"added": "已新增",
|
||||
"case_sensitive": "區分大小寫",
|
||||
"collapse": "折疊",
|
||||
"download": "下載",
|
||||
"includes_user_questions": "包含使用者提問",
|
||||
"manage": "管理",
|
||||
"select_model": "選擇模型",
|
||||
@ -1696,6 +1697,12 @@
|
||||
"provider_settings": "跳轉到服務商設置界面"
|
||||
},
|
||||
"notes": {
|
||||
"auto_rename": {
|
||||
"empty_note": "筆記為空,無法生成名稱",
|
||||
"failed": "生成筆記名稱失敗",
|
||||
"label": "生成筆記名稱",
|
||||
"success": "筆記名稱生成成功"
|
||||
},
|
||||
"characters": "字符",
|
||||
"collapse": "收起",
|
||||
"content_placeholder": "請輸入筆記內容...",
|
||||
@ -1777,6 +1784,8 @@
|
||||
"sort_updated_asc": "更新時間(從舊到新)",
|
||||
"sort_updated_desc": "更新時間(從新到舊)",
|
||||
"sort_z2a": "文件名(Z-A)",
|
||||
"spell_check": "拼寫檢查",
|
||||
"spell_check_tooltip": "啟用/禁用拼寫檢查",
|
||||
"star": "收藏筆記",
|
||||
"starred_notes": "收藏的筆記",
|
||||
"title": "筆記",
|
||||
@ -1826,6 +1835,57 @@
|
||||
},
|
||||
"title": "Ollama"
|
||||
},
|
||||
"ovms": {
|
||||
"action": {
|
||||
"install": "安裝",
|
||||
"installing": "正在安裝",
|
||||
"reinstall": "重新安裝",
|
||||
"run": "執行 OVMS",
|
||||
"starting": "啟動中",
|
||||
"stop": "停止 OVMS",
|
||||
"stopping": "停止中"
|
||||
},
|
||||
"description": "<div><p>1. 下載 OV 模型。</p><p>2. 在 'Manager' 中新增模型。</p><p>僅支援 Windows!</p><p>OVMS 安裝路徑: '%USERPROFILE%\\.cherrystudio\\ovms' 。</p><p>請參考 <a href=https://github.com/openvinotoolkit/model_server/blob/c55551763d02825829337b62c2dcef9339706f79/docs/deploying_server_baremetal.md>Intel OVMS 指南</a></p></dev>",
|
||||
"download": {
|
||||
"button": "下載",
|
||||
"error": "選擇失敗",
|
||||
"model_id": {
|
||||
"label": "模型 ID",
|
||||
"model_id_pattern": "模型 ID 必須以 OpenVINO/ 開頭",
|
||||
"placeholder": "必填,例如 OpenVINO/Qwen3-8B-int4-ov",
|
||||
"required": "請輸入模型 ID"
|
||||
},
|
||||
"model_name": {
|
||||
"label": "模型名稱",
|
||||
"placeholder": "必填,例如 Qwen3-8B-int4-ov",
|
||||
"required": "請輸入模型名稱"
|
||||
},
|
||||
"model_source": "模型來源:",
|
||||
"model_task": "模型任務:",
|
||||
"success": "下載成功",
|
||||
"success_desc": "模型\"{{modelName}}\"-\"{{modelId}}\"下載成功,請前往 OVMS 管理界面添加模型",
|
||||
"tip": "模型正在下載,有時需要幾個小時。請耐心等候...",
|
||||
"title": "下載 Intel OpenVINO 模型"
|
||||
},
|
||||
"failed": {
|
||||
"install": "安裝 OVMS 失敗:",
|
||||
"install_code_100": "未知錯誤",
|
||||
"install_code_101": "僅支援 Intel(R) Core(TM) Ultra CPU",
|
||||
"install_code_102": "僅支援 Windows",
|
||||
"install_code_103": "下載 OVMS runtime 失敗",
|
||||
"install_code_104": "解壓 OVMS runtime 失敗",
|
||||
"install_code_105": "清理 OVMS runtime 失敗",
|
||||
"run": "執行 OVMS 失敗:",
|
||||
"stop": "停止 OVMS 失敗:"
|
||||
},
|
||||
"status": {
|
||||
"not_installed": "OVMS 未安裝",
|
||||
"not_running": "OVMS 未執行",
|
||||
"running": "OVMS 正在執行",
|
||||
"unknown": "OVMS 狀態未知"
|
||||
},
|
||||
"title": "Intel OVMS"
|
||||
},
|
||||
"paintings": {
|
||||
"aspect_ratio": "畫幅比例",
|
||||
"aspect_ratios": {
|
||||
@ -2057,6 +2117,7 @@
|
||||
"ollama": "Ollama",
|
||||
"openai": "OpenAI",
|
||||
"openrouter": "OpenRouter",
|
||||
"ovms": "Intel OVMS",
|
||||
"perplexity": "Perplexity",
|
||||
"ph8": "PH8 大模型開放平台",
|
||||
"poe": "Poe",
|
||||
|
||||
@ -251,6 +251,7 @@
|
||||
"added": "προστέθηκε",
|
||||
"case_sensitive": "Διάκριση πεζών/κεφαλαίων",
|
||||
"collapse": "συμπεριλάβετε",
|
||||
"download": "Λήψη",
|
||||
"includes_user_questions": "Περιλαμβάνει ερωτήσεις χρήστη",
|
||||
"manage": "χειριστείτε",
|
||||
"select_model": "επιλογή μοντέλου",
|
||||
@ -333,6 +334,7 @@
|
||||
"new_topic": "Νέο θέμα {{Command}}",
|
||||
"pause": "Παύση",
|
||||
"placeholder": "Εισάγετε μήνυμα εδώ...",
|
||||
"placeholder_without_triggers": "Εδώ εισαγάγετε το μήνυμα, πατήστε {{key}} για αποστολή",
|
||||
"send": "Αποστολή",
|
||||
"settings": "Ρυθμίσεις",
|
||||
"thinking": {
|
||||
@ -1695,6 +1697,12 @@
|
||||
"provider_settings": "Μετάβαση στις ρυθμίσεις παρόχου"
|
||||
},
|
||||
"notes": {
|
||||
"auto_rename": {
|
||||
"empty_note": "Το σημείωμα είναι κενό, δεν μπορεί να δημιουργηθεί όνομα",
|
||||
"failed": "Αποτυχία δημιουργίας ονόματος σημείωσης",
|
||||
"label": "Δημιουργία ονόματος σημείωσης",
|
||||
"success": "Η δημιουργία του ονόματος σημειώσεων ολοκληρώθηκε με επιτυχία"
|
||||
},
|
||||
"characters": "χαρακτήρας",
|
||||
"collapse": "σύμπτυξη",
|
||||
"content_placeholder": "Παρακαλώ εισαγάγετε το περιεχόμενο των σημειώσεων...",
|
||||
@ -1776,6 +1784,8 @@
|
||||
"sort_updated_asc": "χρόνος ενημέρωσης (από παλιά στα νέα)",
|
||||
"sort_updated_desc": "χρόνος ενημέρωσης (από νεώτερο σε παλαιότερο)",
|
||||
"sort_z2a": "όνομα αρχείου (Z-A)",
|
||||
"spell_check": "Έλεγχος ορθογραφίας",
|
||||
"spell_check_tooltip": "Ενεργοποίηση/Απενεργοποίηση ελέγχου ορθογραφίας",
|
||||
"star": "Αγαπημένες σημειώσεις",
|
||||
"starred_notes": "Σημειώσεις συλλογής",
|
||||
"title": "σημειώσεις",
|
||||
@ -1825,6 +1835,57 @@
|
||||
},
|
||||
"title": "Ollama"
|
||||
},
|
||||
"ovms": {
|
||||
"action": {
|
||||
"install": "Εγκατάσταση",
|
||||
"installing": "Εγκατάσταση σε εξέλιξη",
|
||||
"reinstall": "Επανεγκατάσταση",
|
||||
"run": "Εκτέλεση OVMS",
|
||||
"starting": "Εκκίνηση σε εξέλιξη",
|
||||
"stop": "Διακοπή OVMS",
|
||||
"stopping": "Διακοπή σε εξέλιξη"
|
||||
},
|
||||
"description": "<div><p>1. Λήψη μοντέλου OV.</p><p>2. Προσθήκη μοντέλου στο 'Manager'.</p><p>Υποστηρίζεται μόνο στα Windows!</p><p>Διαδρομή εγκατάστασης OVMS: '%USERPROFILE%\\.cherrystudio\\ovms' .</p><p>Ανατρέξτε στον <a href=https://github.com/openvinotoolkit/model_server/blob/c55551763d02825829337b62c2dcef9339706f79/docs/deploying_server_baremetal.md>Οδηγό Intel OVMS</a></p></div>",
|
||||
"download": {
|
||||
"button": "Λήψη",
|
||||
"error": "Η επιλογή απέτυχε",
|
||||
"model_id": {
|
||||
"label": "Αναγνωριστικό μοντέλου:",
|
||||
"model_id_pattern": "Το αναγνωριστικό μοντέλου πρέπει να ξεκινά με OpenVINO/",
|
||||
"placeholder": "Απαιτείται, π.χ. OpenVINO/Qwen3-8B-int4-ov",
|
||||
"required": "Παρακαλώ εισάγετε το αναγνωριστικό μοντέλου"
|
||||
},
|
||||
"model_name": {
|
||||
"label": "Όνομα μοντέλου:",
|
||||
"placeholder": "Απαιτείται, π.χ. Qwen3-8B-int4-ov",
|
||||
"required": "Παρακαλώ εισάγετε το όνομα του μοντέλου"
|
||||
},
|
||||
"model_source": "Πηγή μοντέλου:",
|
||||
"model_task": "Εργασία μοντέλου:",
|
||||
"success": "Η λήψη ολοκληρώθηκε με επιτυχία",
|
||||
"success_desc": "Το μοντέλο \"{{modelName}}\"-\"{{modelId}}\" λήφθηκε επιτυχώς, παρακαλώ μεταβείτε στη διεπαφή διαχείρισης OVMS για να προσθέσετε το μοντέλο",
|
||||
"tip": "Το μοντέλο κατεβαίνει, μερικές φορές χρειάζονται αρκετές ώρες. Παρακαλώ περιμένετε υπομονετικά...",
|
||||
"title": "Λήψη μοντέλου Intel OpenVINO"
|
||||
},
|
||||
"failed": {
|
||||
"install": "Η εγκατάσταση του OVMS απέτυχε:",
|
||||
"install_code_100": "Άγνωστο σφάλμα",
|
||||
"install_code_101": "Υποστηρίζεται μόνο σε Intel(R) Core(TM) Ultra CPU",
|
||||
"install_code_102": "Υποστηρίζεται μόνο στα Windows",
|
||||
"install_code_103": "Η λήψη του OVMS runtime απέτυχε",
|
||||
"install_code_104": "Η αποσυμπίεση του OVMS runtime απέτυχε",
|
||||
"install_code_105": "Ο καθαρισμός του OVMS runtime απέτυχε",
|
||||
"run": "Η εκτέλεση του OVMS απέτυχε:",
|
||||
"stop": "Η διακοπή του OVMS απέτυχε:"
|
||||
},
|
||||
"status": {
|
||||
"not_installed": "Το OVMS δεν έχει εγκατασταθεί",
|
||||
"not_running": "Το OVMS δεν εκτελείται",
|
||||
"running": "Το OVMS εκτελείται",
|
||||
"unknown": "Άγνωστη κατάσταση OVMS"
|
||||
},
|
||||
"title": "Intel OVMS"
|
||||
},
|
||||
"paintings": {
|
||||
"aspect_ratio": "Λόγος διαστάσεων",
|
||||
"aspect_ratios": {
|
||||
@ -2056,6 +2117,7 @@
|
||||
"ollama": "Ollama",
|
||||
"openai": "OpenAI",
|
||||
"openrouter": "OpenRouter",
|
||||
"ovms": "Intel OVMS",
|
||||
"perplexity": "Perplexity",
|
||||
"ph8": "Πλατφόρμα Ανοιχτής Μεγάλης Μοντέλου PH8",
|
||||
"poe": "Poe",
|
||||
|
||||
@ -251,6 +251,7 @@
|
||||
"added": "Agregado",
|
||||
"case_sensitive": "Distingue mayúsculas y minúsculas",
|
||||
"collapse": "Colapsar",
|
||||
"download": "Descargar",
|
||||
"includes_user_questions": "Incluye preguntas del usuario",
|
||||
"manage": "Administrar",
|
||||
"select_model": "Seleccionar Modelo",
|
||||
@ -333,6 +334,7 @@
|
||||
"new_topic": "Nuevo tema {{Command}}",
|
||||
"pause": "Pausar",
|
||||
"placeholder": "Escribe aquí tu mensaje...",
|
||||
"placeholder_without_triggers": "Escriba un mensaje aquí y presione {{key}} para enviar",
|
||||
"send": "Enviar",
|
||||
"settings": "Configuración",
|
||||
"thinking": {
|
||||
@ -1695,6 +1697,12 @@
|
||||
"provider_settings": "Ir a la configuración del proveedor"
|
||||
},
|
||||
"notes": {
|
||||
"auto_rename": {
|
||||
"empty_note": "La nota está vacía, no se puede generar un nombre",
|
||||
"failed": "Error al generar el nombre de la nota",
|
||||
"label": "Generar nombre de nota",
|
||||
"success": "Se ha generado correctamente el nombre de la nota"
|
||||
},
|
||||
"characters": "carácter",
|
||||
"collapse": "ocultar",
|
||||
"content_placeholder": "Introduzca el contenido de la nota...",
|
||||
@ -1776,6 +1784,8 @@
|
||||
"sort_updated_asc": "Fecha de actualización (de más antigua a más reciente)",
|
||||
"sort_updated_desc": "Fecha de actualización (de más nuevo a más antiguo)",
|
||||
"sort_z2a": "Nombre de archivo (Z-A)",
|
||||
"spell_check": "comprobación ortográfica",
|
||||
"spell_check_tooltip": "Habilitar/deshabilitar revisión ortográfica",
|
||||
"star": "Notas guardadas",
|
||||
"starred_notes": "notas guardadas",
|
||||
"title": "notas",
|
||||
@ -1825,6 +1835,57 @@
|
||||
},
|
||||
"title": "Ollama"
|
||||
},
|
||||
"ovms": {
|
||||
"action": {
|
||||
"install": "Instalar",
|
||||
"installing": "Instalando",
|
||||
"reinstall": "Reinstalar",
|
||||
"run": "Ejecutar OVMS",
|
||||
"starting": "Iniciando",
|
||||
"stop": "Detener OVMS",
|
||||
"stopping": "Deteniendo"
|
||||
},
|
||||
"description": "<div><p>1. Descargar modelo OV.</p><p>2. Agregar modelo en 'Administrador'.</p><p>¡Solo compatible con Windows!</p><p>Ruta de instalación de OVMS: '%USERPROFILE%\\.cherrystudio\\ovms' .</p><p>Consulte la <a href=https://github.com/openvinotoolkit/model_server/blob/c55551763d02825829337b62c2dcef9339706f79/docs/deploying_server_baremetal.md>Guía de Intel OVMS</a></p></dev>",
|
||||
"download": {
|
||||
"button": "Descargar",
|
||||
"error": "Selección fallida",
|
||||
"model_id": {
|
||||
"label": "ID del modelo:",
|
||||
"model_id_pattern": "El ID del modelo debe comenzar con OpenVINO/",
|
||||
"placeholder": "Requerido, por ejemplo, OpenVINO/Qwen3-8B-int4-ov",
|
||||
"required": "Por favor, ingrese el ID del modelo"
|
||||
},
|
||||
"model_name": {
|
||||
"label": "Nombre del modelo:",
|
||||
"placeholder": "Requerido, por ejemplo, Qwen3-8B-int4-ov",
|
||||
"required": "Por favor, ingrese el nombre del modelo"
|
||||
},
|
||||
"model_source": "Fuente del modelo:",
|
||||
"model_task": "Tarea del modelo:",
|
||||
"success": "Descarga exitosa",
|
||||
"success_desc": "El modelo \"{{modelName}}\"-\"{{modelId}}\" se descargó exitosamente, por favor vaya a la interfaz de administración de OVMS para agregar el modelo",
|
||||
"tip": "El modelo se está descargando, a veces toma varias horas. Por favor espere pacientemente...",
|
||||
"title": "Descargar modelo Intel OpenVINO"
|
||||
},
|
||||
"failed": {
|
||||
"install": "Error al instalar OVMS:",
|
||||
"install_code_100": "Error desconocido",
|
||||
"install_code_101": "Solo compatible con CPU Intel(R) Core(TM) Ultra",
|
||||
"install_code_102": "Solo compatible con Windows",
|
||||
"install_code_103": "Error al descargar el tiempo de ejecución de OVMS",
|
||||
"install_code_104": "Error al descomprimir el tiempo de ejecución de OVMS",
|
||||
"install_code_105": "Error al limpiar el tiempo de ejecución de OVMS",
|
||||
"run": "Error al ejecutar OVMS:",
|
||||
"stop": "Error al detener OVMS:"
|
||||
},
|
||||
"status": {
|
||||
"not_installed": "OVMS no instalado",
|
||||
"not_running": "OVMS no está en ejecución",
|
||||
"running": "OVMS en ejecución",
|
||||
"unknown": "Estado de OVMS desconocido"
|
||||
},
|
||||
"title": "Intel OVMS"
|
||||
},
|
||||
"paintings": {
|
||||
"aspect_ratio": "Relación de aspecto",
|
||||
"aspect_ratios": {
|
||||
@ -2056,6 +2117,7 @@
|
||||
"ollama": "Ollama",
|
||||
"openai": "OpenAI",
|
||||
"openrouter": "OpenRouter",
|
||||
"ovms": "Intel OVMS",
|
||||
"perplexity": "Perplejidad",
|
||||
"ph8": "Plataforma Abierta de Grandes Modelos PH8",
|
||||
"poe": "Poe",
|
||||
|
||||
@ -251,6 +251,7 @@
|
||||
"added": "Ajouté",
|
||||
"case_sensitive": "Respecter la casse",
|
||||
"collapse": "Réduire",
|
||||
"download": "Télécharger",
|
||||
"includes_user_questions": "Inclure les questions de l'utilisateur",
|
||||
"manage": "Gérer",
|
||||
"select_model": "Sélectionner le Modèle",
|
||||
@ -333,6 +334,7 @@
|
||||
"new_topic": "Nouveau sujet {{Command}}",
|
||||
"pause": "Pause",
|
||||
"placeholder": "Entrez votre message ici...",
|
||||
"placeholder_without_triggers": "Entrez votre message ici, appuyez sur {{key}} pour envoyer",
|
||||
"send": "Envoyer",
|
||||
"settings": "Paramètres",
|
||||
"thinking": {
|
||||
@ -1695,6 +1697,12 @@
|
||||
"provider_settings": "Aller aux paramètres du fournisseur"
|
||||
},
|
||||
"notes": {
|
||||
"auto_rename": {
|
||||
"empty_note": "La note est vide, impossible de générer un nom",
|
||||
"failed": "Échec de la génération du nom de note",
|
||||
"label": "Générer un nom de note",
|
||||
"success": "La génération du nom de note a réussi"
|
||||
},
|
||||
"characters": "caractère",
|
||||
"collapse": "réduire",
|
||||
"content_placeholder": "Veuillez saisir le contenu de la note...",
|
||||
@ -1776,6 +1784,8 @@
|
||||
"sort_updated_asc": "Heure de mise à jour (du plus ancien au plus récent)",
|
||||
"sort_updated_desc": "Date de mise à jour (du plus récent au plus ancien)",
|
||||
"sort_z2a": "Nom de fichier (Z-A)",
|
||||
"spell_check": "Vérification orthographique",
|
||||
"spell_check_tooltip": "Activer/Désactiver la vérification orthographique",
|
||||
"star": "Notes enregistrées",
|
||||
"starred_notes": "notes de collection",
|
||||
"title": "notes",
|
||||
@ -1825,6 +1835,57 @@
|
||||
},
|
||||
"title": "Ollama"
|
||||
},
|
||||
"ovms": {
|
||||
"action": {
|
||||
"install": "Installer",
|
||||
"installing": "Installation en cours",
|
||||
"reinstall": "Réinstaller",
|
||||
"run": "Exécuter OVMS",
|
||||
"starting": "Démarrage en cours",
|
||||
"stop": "Arrêter OVMS",
|
||||
"stopping": "Arrêt en cours"
|
||||
},
|
||||
"description": "<div><p>1. Télécharger le modèle OV.</p><p>2. Ajouter le modèle dans 'Manager'.</p><p>Uniquement compatible avec Windows !</p><p>Chemin d'installation d'OVMS : '%USERPROFILE%\\.cherrystudio\\ovms' .</p><p>Veuillez vous référer au <a href=https://github.com/openvinotoolkit/model_server/blob/c55551763d02825829337b62c2dcef9339706f79/docs/deploying_server_baremetal.md>Guide Intel OVMS</a></p></dev>",
|
||||
"download": {
|
||||
"button": "Télécharger",
|
||||
"error": "Échec de la sélection",
|
||||
"model_id": {
|
||||
"label": "ID du modèle :",
|
||||
"model_id_pattern": "L'ID du modèle doit commencer par OpenVINO/",
|
||||
"placeholder": "Requis, par exemple OpenVINO/Qwen3-8B-int4-ov",
|
||||
"required": "Veuillez saisir l'ID du modèle"
|
||||
},
|
||||
"model_name": {
|
||||
"label": "Nom du modèle :",
|
||||
"placeholder": "Requis, par exemple Qwen3-8B-int4-ov",
|
||||
"required": "Veuillez saisir le nom du modèle"
|
||||
},
|
||||
"model_source": "Source du modèle :",
|
||||
"model_task": "Tâche du modèle :",
|
||||
"success": "Téléchargement réussi",
|
||||
"success_desc": "Le modèle \"{{modelName}}\"-\"{{modelId}}\" a été téléchargé avec succès, veuillez vous rendre à l'interface de gestion OVMS pour ajouter le modèle",
|
||||
"tip": "Le modèle est en cours de téléchargement, cela peut parfois prendre plusieurs heures. Veuillez patienter...",
|
||||
"title": "Télécharger le modèle Intel OpenVINO"
|
||||
},
|
||||
"failed": {
|
||||
"install": "Échec de l'installation d'OVMS :",
|
||||
"install_code_100": "Erreur inconnue",
|
||||
"install_code_101": "Uniquement compatible avec les processeurs Intel(R) Core(TM) Ultra",
|
||||
"install_code_102": "Uniquement compatible avec Windows",
|
||||
"install_code_103": "Échec du téléchargement du runtime OVMS",
|
||||
"install_code_104": "Échec de la décompression du runtime OVMS",
|
||||
"install_code_105": "Échec du nettoyage du runtime OVMS",
|
||||
"run": "Échec de l'exécution d'OVMS :",
|
||||
"stop": "Échec de l'arrêt d'OVMS :"
|
||||
},
|
||||
"status": {
|
||||
"not_installed": "OVMS non installé",
|
||||
"not_running": "OVMS n'est pas en cours d'exécution",
|
||||
"running": "OVMS en cours d'exécution",
|
||||
"unknown": "État d'OVMS inconnu"
|
||||
},
|
||||
"title": "Intel OVMS"
|
||||
},
|
||||
"paintings": {
|
||||
"aspect_ratio": "Format d'image",
|
||||
"aspect_ratios": {
|
||||
@ -2056,6 +2117,7 @@
|
||||
"ollama": "Ollama",
|
||||
"openai": "OpenAI",
|
||||
"openrouter": "OpenRouter",
|
||||
"ovms": "Intel OVMS",
|
||||
"perplexity": "Perplexité",
|
||||
"ph8": "Plateforme ouverte de grands modèles PH8",
|
||||
"poe": "Poe",
|
||||
|
||||
@ -251,6 +251,7 @@
|
||||
"added": "追加済み",
|
||||
"case_sensitive": "大文字と小文字の区別",
|
||||
"collapse": "折りたたむ",
|
||||
"download": "ダウンロード",
|
||||
"includes_user_questions": "ユーザーからの質問を含む",
|
||||
"manage": "管理",
|
||||
"select_model": "モデルを選択",
|
||||
@ -333,6 +334,7 @@
|
||||
"new_topic": "新しいトピック {{Command}}",
|
||||
"pause": "一時停止",
|
||||
"placeholder": "ここにメッセージを入力し、{{key}} を押して送信...",
|
||||
"placeholder_without_triggers": "ここにメッセージを入力し、{{key}} を押して送信してください",
|
||||
"send": "送信",
|
||||
"settings": "設定",
|
||||
"thinking": {
|
||||
@ -1695,6 +1697,12 @@
|
||||
"provider_settings": "プロバイダー設定に移動"
|
||||
},
|
||||
"notes": {
|
||||
"auto_rename": {
|
||||
"empty_note": "ノートが空です。名前を生成できません。",
|
||||
"failed": "ノート名の生成に失敗しました",
|
||||
"label": "ノート名の生成",
|
||||
"success": "ノート名の生成に成功しました"
|
||||
},
|
||||
"characters": "文字",
|
||||
"collapse": "閉じる",
|
||||
"content_placeholder": "メモの内容を入力してください...",
|
||||
@ -1776,6 +1784,8 @@
|
||||
"sort_updated_asc": "更新日時(古い順)",
|
||||
"sort_updated_desc": "更新日時(新しい順)",
|
||||
"sort_z2a": "ファイル名(Z-A)",
|
||||
"spell_check": "スペルチェック",
|
||||
"spell_check_tooltip": "スペルチェックの有効/無効",
|
||||
"star": "お気に入りのノート",
|
||||
"starred_notes": "収集したノート",
|
||||
"title": "ノート",
|
||||
@ -1825,6 +1835,57 @@
|
||||
},
|
||||
"title": "Ollama"
|
||||
},
|
||||
"ovms": {
|
||||
"action": {
|
||||
"install": "インストール",
|
||||
"installing": "インストール中",
|
||||
"reinstall": "再インストール",
|
||||
"run": "OVMSを実行",
|
||||
"starting": "起動中",
|
||||
"stop": "OVMSを停止",
|
||||
"stopping": "停止中"
|
||||
},
|
||||
"description": "<div><p>1. OVモデルをダウンロードします。</p><p>2. 'マネージャー'でモデルを追加します。</p><p>Windowsのみサポート!</p><p>OVMSインストールパス: '%USERPROFILE%\\.cherrystudio\\ovms' 。</p><p>詳細は<a href=https://github.com/openvinotoolkit/model_server/blob/c55551763d02825829337b62c2dcef9339706f79/docs/deploying_server_baremetal.md>Intel OVMSガイド</a>をご参照ください。</p></dev>",
|
||||
"download": {
|
||||
"button": "ダウンロード",
|
||||
"error": "ダウンロードエラー",
|
||||
"model_id": {
|
||||
"label": "モデルID",
|
||||
"model_id_pattern": "モデルIDはOpenVINO/で始まる必要があります",
|
||||
"placeholder": "必須 例: OpenVINO/Qwen3-8B-int4-ov",
|
||||
"required": "モデルIDを入力してください"
|
||||
},
|
||||
"model_name": {
|
||||
"label": "モデル名",
|
||||
"placeholder": "必須 例: Qwen3-8B-int4-ov",
|
||||
"required": "モデル名を入力してください"
|
||||
},
|
||||
"model_source": "モデルソース:",
|
||||
"model_task": "モデルタスク:",
|
||||
"success": "ダウンロード成功",
|
||||
"success_desc": "モデル\"{{modelName}}\"-\"{{modelId}}\"ダウンロード成功、OVMS管理インターフェースに移動してモデルを追加してください",
|
||||
"tip": "モデルはダウンロードされていますが、時には数時間かかります。我慢してください...",
|
||||
"title": "Intel OpenVINOモデルをダウンロード"
|
||||
},
|
||||
"failed": {
|
||||
"install": "OVMSのインストールに失敗しました:",
|
||||
"install_code_100": "不明なエラー",
|
||||
"install_code_101": "Intel(R) Core(TM) Ultra CPUのみサポート",
|
||||
"install_code_102": "Windowsのみサポート",
|
||||
"install_code_103": "OVMSランタイムのダウンロードに失敗しました",
|
||||
"install_code_104": "OVMSランタイムの解凍に失敗しました",
|
||||
"install_code_105": "OVMSランタイムのクリーンアップに失敗しました",
|
||||
"run": "OVMSの実行に失敗しました:",
|
||||
"stop": "OVMSの停止に失敗しました:"
|
||||
},
|
||||
"status": {
|
||||
"not_installed": "OVMSはインストールされていません",
|
||||
"not_running": "OVMSは実行されていません",
|
||||
"running": "OVMSは実行中です",
|
||||
"unknown": "OVMSのステータスが不明です"
|
||||
},
|
||||
"title": "Intel OVMS"
|
||||
},
|
||||
"paintings": {
|
||||
"aspect_ratio": "画幅比例",
|
||||
"aspect_ratios": {
|
||||
@ -2056,6 +2117,7 @@
|
||||
"ollama": "Ollama",
|
||||
"openai": "OpenAI",
|
||||
"openrouter": "OpenRouter",
|
||||
"ovms": "Intel OVMS",
|
||||
"perplexity": "Perplexity",
|
||||
"ph8": "PH8",
|
||||
"poe": "Poe",
|
||||
|
||||
@ -251,6 +251,7 @@
|
||||
"added": "Adicionado",
|
||||
"case_sensitive": "Diferenciar maiúsculas e minúsculas",
|
||||
"collapse": "Recolher",
|
||||
"download": "Baixar",
|
||||
"includes_user_questions": "Incluir perguntas do usuário",
|
||||
"manage": "Gerenciar",
|
||||
"select_model": "Selecionar Modelo",
|
||||
@ -333,6 +334,7 @@
|
||||
"new_topic": "Novo tópico {{Command}}",
|
||||
"pause": "Pausar",
|
||||
"placeholder": "Digite sua mensagem aqui...",
|
||||
"placeholder_without_triggers": "Digite a mensagem aqui, pressione {{key}} para enviar",
|
||||
"send": "Enviar",
|
||||
"settings": "Configurações",
|
||||
"thinking": {
|
||||
@ -1695,6 +1697,12 @@
|
||||
"provider_settings": "Ir para as configurações do provedor"
|
||||
},
|
||||
"notes": {
|
||||
"auto_rename": {
|
||||
"empty_note": "A nota está vazia, não é possível gerar um nome",
|
||||
"failed": "Falha ao gerar o nome da nota",
|
||||
"label": "Gerar nome da nota",
|
||||
"success": "Nome da nota gerado com sucesso"
|
||||
},
|
||||
"characters": "caractere",
|
||||
"collapse": "[minimizar]",
|
||||
"content_placeholder": "Introduza o conteúdo da nota...",
|
||||
@ -1776,6 +1784,8 @@
|
||||
"sort_updated_asc": "Tempo de atualização (do mais antigo para o mais recente)",
|
||||
"sort_updated_desc": "atualização de tempo (do mais novo para o mais antigo)",
|
||||
"sort_z2a": "Nome do arquivo (Z-A)",
|
||||
"spell_check": "verificação ortográfica",
|
||||
"spell_check_tooltip": "Ativar/Desativar verificação ortográfica",
|
||||
"star": "Notas favoritas",
|
||||
"starred_notes": "notas salvas",
|
||||
"title": "nota",
|
||||
@ -1825,6 +1835,57 @@
|
||||
},
|
||||
"title": "Ollama"
|
||||
},
|
||||
"ovms": {
|
||||
"action": {
|
||||
"install": "Instalar",
|
||||
"installing": "Instalando",
|
||||
"reinstall": "Reinstalar",
|
||||
"run": "Executar OVMS",
|
||||
"starting": "Iniciando",
|
||||
"stop": "Parar OVMS",
|
||||
"stopping": "Parando"
|
||||
},
|
||||
"description": "<div><p>1. Baixe o modelo OV.</p><p>2. Adicione o modelo no 'Gerenciador'.</p><p>Compatível apenas com Windows!</p><p>Caminho de instalação do OVMS: '%USERPROFILE%\\.cherrystudio\\ovms' .</p><p>Consulte o <a href=https://github.com/openvinotoolkit/model_server/blob/c55551763d02825829337b62c2dcef9339706f79/docs/deploying_server_baremetal.md>Guia do Intel OVMS</a></p></dev>",
|
||||
"download": {
|
||||
"button": "Baixar",
|
||||
"error": "Falha na seleção",
|
||||
"model_id": {
|
||||
"label": "ID do modelo:",
|
||||
"model_id_pattern": "O ID do modelo deve começar com OpenVINO/",
|
||||
"placeholder": "Obrigatório, por exemplo, OpenVINO/Qwen3-8B-int4-ov",
|
||||
"required": "Por favor, insira o ID do modelo"
|
||||
},
|
||||
"model_name": {
|
||||
"label": "Nome do modelo:",
|
||||
"placeholder": "Obrigatório, por exemplo, Qwen3-8B-int4-ov",
|
||||
"required": "Por favor, insira o nome do modelo"
|
||||
},
|
||||
"model_source": "Fonte do modelo:",
|
||||
"model_task": "Tarefa do modelo:",
|
||||
"success": "Download concluído com sucesso",
|
||||
"success_desc": "O modelo \"{{modelName}}\"-\"{{modelId}}\" foi baixado com sucesso, por favor vá para a interface de gerenciamento OVMS para adicionar o modelo",
|
||||
"tip": "O modelo está sendo baixado, às vezes leva várias horas. Por favor aguarde pacientemente...",
|
||||
"title": "Baixar modelo Intel OpenVINO"
|
||||
},
|
||||
"failed": {
|
||||
"install": "Falha na instalação do OVMS:",
|
||||
"install_code_100": "Erro desconhecido",
|
||||
"install_code_101": "Compatível apenas com CPU Intel(R) Core(TM) Ultra",
|
||||
"install_code_102": "Compatível apenas com Windows",
|
||||
"install_code_103": "Falha ao baixar o tempo de execução do OVMS",
|
||||
"install_code_104": "Falha ao descompactar o tempo de execução do OVMS",
|
||||
"install_code_105": "Falha ao limpar o tempo de execução do OVMS",
|
||||
"run": "Falha ao executar o OVMS:",
|
||||
"stop": "Falha ao parar o OVMS:"
|
||||
},
|
||||
"status": {
|
||||
"not_installed": "OVMS não instalado",
|
||||
"not_running": "OVMS não está em execução",
|
||||
"running": "OVMS em execução",
|
||||
"unknown": "Status do OVMS desconhecido"
|
||||
},
|
||||
"title": "Intel OVMS"
|
||||
},
|
||||
"paintings": {
|
||||
"aspect_ratio": "Proporção da Imagem",
|
||||
"aspect_ratios": {
|
||||
@ -2056,6 +2117,7 @@
|
||||
"ollama": "Ollama",
|
||||
"openai": "OpenAI",
|
||||
"openrouter": "OpenRouter",
|
||||
"ovms": "Intel OVMS",
|
||||
"perplexity": "Perplexidade",
|
||||
"ph8": "Plataforma Aberta de Grandes Modelos PH8",
|
||||
"poe": "Poe",
|
||||
|
||||
@ -251,6 +251,7 @@
|
||||
"added": "Добавлено",
|
||||
"case_sensitive": "Чувствительность к регистру",
|
||||
"collapse": "Свернуть",
|
||||
"download": "Скачать",
|
||||
"includes_user_questions": "Включает вопросы пользователей",
|
||||
"manage": "Редактировать",
|
||||
"select_model": "Выбрать модель",
|
||||
@ -333,6 +334,7 @@
|
||||
"new_topic": "Новый топик {{Command}}",
|
||||
"pause": "Остановить",
|
||||
"placeholder": "Введите ваше сообщение здесь, нажмите {{key}} для отправки...",
|
||||
"placeholder_without_triggers": "Введите сообщение здесь, нажмите {{key}}, чтобы отправить",
|
||||
"send": "Отправить",
|
||||
"settings": "Настройки",
|
||||
"thinking": {
|
||||
@ -1695,6 +1697,12 @@
|
||||
"provider_settings": "Перейти к настройкам поставщика"
|
||||
},
|
||||
"notes": {
|
||||
"auto_rename": {
|
||||
"empty_note": "Заметки пусты, имя невозможно сгенерировать",
|
||||
"failed": "Создание названия заметки не удалось",
|
||||
"label": "Создать название заметки",
|
||||
"success": "Имя заметки успешно создано"
|
||||
},
|
||||
"characters": "Символы",
|
||||
"collapse": "Свернуть",
|
||||
"content_placeholder": "Введите содержимое заметки...",
|
||||
@ -1776,6 +1784,8 @@
|
||||
"sort_updated_asc": "Время обновления (от старого к новому)",
|
||||
"sort_updated_desc": "Время обновления (от нового к старому)",
|
||||
"sort_z2a": "Имя файла (Я-А)",
|
||||
"spell_check": "Проверка орфографии",
|
||||
"spell_check_tooltip": "Включить/отключить проверку орфографии",
|
||||
"star": "Избранные заметки",
|
||||
"starred_notes": "Сохраненные заметки",
|
||||
"title": "заметки",
|
||||
@ -1825,6 +1835,57 @@
|
||||
},
|
||||
"title": "Ollama"
|
||||
},
|
||||
"ovms": {
|
||||
"action": {
|
||||
"install": "Установить",
|
||||
"installing": "Установка",
|
||||
"reinstall": "Переустановить",
|
||||
"run": "Запустить OVMS",
|
||||
"starting": "Запуск",
|
||||
"stop": "Остановить OVMS",
|
||||
"stopping": "Остановка"
|
||||
},
|
||||
"description": "<div><p>1. Загрузите модели OV.</p><p>2. Добавьте модели в 'Менеджер'.</p><p>Поддерживается только Windows!</p><p>Путь установки OVMS: '%USERPROFILE%\\.cherrystudio\\ovms'.</p><p>Пожалуйста, ознакомьтесь с <a href=https://github.com/openvinotoolkit/model_server/blob/c55551763d02825829337b62c2dcef9339706f79/docs/deploying_server_baremetal.md>руководством Intel OVMS</a></p></dev>",
|
||||
"download": {
|
||||
"button": "Скачать",
|
||||
"error": "Ошибка загрузки",
|
||||
"model_id": {
|
||||
"label": "ID модели",
|
||||
"model_id_pattern": "ID модели должен начинаться с OpenVINO/",
|
||||
"placeholder": "Обязательно, например: OpenVINO/Qwen3-8B-int4-ov",
|
||||
"required": "Пожалуйста, введите ID модели"
|
||||
},
|
||||
"model_name": {
|
||||
"label": "Название модели:",
|
||||
"placeholder": "Обязательно, например: Qwen3-8B-int4-ov",
|
||||
"required": "Пожалуйста, введите название модели"
|
||||
},
|
||||
"model_source": "Источник модели:",
|
||||
"model_task": "Задача модели:",
|
||||
"success": "Скачивание успешно",
|
||||
"success_desc": "Модель \"{{modelName}}\"-\"{{modelId}}\" успешно скачана, пожалуйста, перейдите в интерфейс управления OVMS, чтобы добавить модель",
|
||||
"tip": "Модель загружается, иногда это занимает часы. Пожалуйста, будьте терпеливы...",
|
||||
"title": "Скачать модель Intel OpenVINO"
|
||||
},
|
||||
"failed": {
|
||||
"install": "Ошибка установки OVMS:",
|
||||
"install_code_100": "Неизвестная ошибка",
|
||||
"install_code_101": "Поддерживаются только процессоры Intel(R) Core(TM) Ultra CPU",
|
||||
"install_code_102": "Поддерживается только Windows",
|
||||
"install_code_103": "Ошибка загрузки среды выполнения OVMS",
|
||||
"install_code_104": "Ошибка распаковки среды выполнения OVMS",
|
||||
"install_code_105": "Ошибка очистки среды выполнения OVMS",
|
||||
"run": "Ошибка запуска OVMS:",
|
||||
"stop": "Ошибка остановки OVMS:"
|
||||
},
|
||||
"status": {
|
||||
"not_installed": "OVMS не установлен",
|
||||
"not_running": "OVMS не запущен",
|
||||
"running": "OVMS запущен",
|
||||
"unknown": "Статус OVMS неизвестен"
|
||||
},
|
||||
"title": "Intel OVMS"
|
||||
},
|
||||
"paintings": {
|
||||
"aspect_ratio": "Пропорции изображения",
|
||||
"aspect_ratios": {
|
||||
@ -2056,6 +2117,7 @@
|
||||
"ollama": "Ollama",
|
||||
"openai": "OpenAI",
|
||||
"openrouter": "OpenRouter",
|
||||
"ovms": "Intel OVMS",
|
||||
"perplexity": "Perplexity",
|
||||
"ph8": "PH8",
|
||||
"poe": "Poe",
|
||||
|
||||
@ -101,6 +101,10 @@ const CodeToolsPage: FC = () => {
|
||||
return m.id.includes('openai') || OPENAI_CODEX_SUPPORTED_PROVIDERS.includes(m.provider)
|
||||
}
|
||||
|
||||
if (selectedCliTool === codeTools.githubCopilotCli) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (selectedCliTool === codeTools.qwenCode || selectedCliTool === codeTools.iFlowCli) {
|
||||
if (m.supported_endpoint_types) {
|
||||
return ['openai', 'openai-response'].some((type) =>
|
||||
@ -199,7 +203,7 @@ const CodeToolsPage: FC = () => {
|
||||
}
|
||||
}
|
||||
|
||||
if (!selectedModel) {
|
||||
if (!selectedModel && selectedCliTool !== codeTools.githubCopilotCli) {
|
||||
return { isValid: false, message: t('code.model_required') }
|
||||
}
|
||||
|
||||
@ -208,6 +212,11 @@ const CodeToolsPage: FC = () => {
|
||||
|
||||
// 准备启动环境
|
||||
const prepareLaunchEnvironment = async (): Promise<Record<string, string> | null> => {
|
||||
if (selectedCliTool === codeTools.githubCopilotCli) {
|
||||
const userEnv = parseEnvironmentVariables(environmentVariables)
|
||||
return userEnv
|
||||
}
|
||||
|
||||
if (!selectedModel) return null
|
||||
|
||||
const modelProvider = getProviderByModel(selectedModel)
|
||||
@ -232,7 +241,9 @@ const CodeToolsPage: FC = () => {
|
||||
|
||||
// 执行启动操作
|
||||
const executeLaunch = async (env: Record<string, string>) => {
|
||||
window.api.codeTools.run(selectedCliTool, selectedModel?.id!, currentDirectory, env, {
|
||||
const modelId = selectedCliTool === codeTools.githubCopilotCli ? '' : selectedModel?.id!
|
||||
|
||||
window.api.codeTools.run(selectedCliTool, modelId, currentDirectory, env, {
|
||||
autoUpdateToLatest,
|
||||
terminal: selectedTerminal
|
||||
})
|
||||
@ -319,7 +330,12 @@ const CodeToolsPage: FC = () => {
|
||||
banner
|
||||
style={{ borderRadius: 'var(--list-item-border-radius)' }}
|
||||
message={
|
||||
<div style={{ display: 'flex', justifyContent: 'space-between', alignItems: 'center' }}>
|
||||
<div
|
||||
style={{
|
||||
display: 'flex',
|
||||
justifyContent: 'space-between',
|
||||
alignItems: 'center'
|
||||
}}>
|
||||
<span>{t('code.bun_required_message')}</span>
|
||||
<Button
|
||||
color="primary"
|
||||
@ -348,46 +364,68 @@ const CodeToolsPage: FC = () => {
|
||||
/>
|
||||
</SettingsItem>
|
||||
|
||||
<SettingsItem>
|
||||
<div className="settings-label">
|
||||
{t('code.model')}
|
||||
{selectedCliTool === 'claude-code' && (
|
||||
<Popover
|
||||
content={
|
||||
<div style={{ width: 200 }}>
|
||||
<div style={{ marginBottom: 8, fontWeight: 500 }}>{t('code.supported_providers')}</div>
|
||||
<div style={{ display: 'flex', flexDirection: 'column', gap: 8 }}>
|
||||
{getClaudeSupportedProviders(allProviders).map((provider) => {
|
||||
return (
|
||||
<Link
|
||||
key={provider.id}
|
||||
style={{ color: 'var(--color-text)', display: 'flex', alignItems: 'center', gap: 4 }}
|
||||
to={`/settings/provider?id=${provider.id}`}>
|
||||
<Avatar radius="md" src={getProviderLogo(provider.id)} className="h-5 w-5 rounded-md" />
|
||||
{getProviderLabel(provider.id)}
|
||||
<ArrowUpRight size={14} />
|
||||
</Link>
|
||||
)
|
||||
})}
|
||||
{selectedCliTool !== codeTools.githubCopilotCli && (
|
||||
<SettingsItem>
|
||||
<div className="settings-label">
|
||||
{t('code.model')}
|
||||
{selectedCliTool === 'claude-code' && (
|
||||
<Popover
|
||||
content={
|
||||
<div style={{ width: 200 }}>
|
||||
<div style={{ marginBottom: 8, fontWeight: 500 }}>{t('code.supported_providers')}</div>
|
||||
<div
|
||||
style={{
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
gap: 8
|
||||
}}>
|
||||
{getClaudeSupportedProviders(allProviders).map((provider) => {
|
||||
return (
|
||||
<Link
|
||||
key={provider.id}
|
||||
style={{
|
||||
color: 'var(--color-text)',
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
gap: 4
|
||||
}}
|
||||
to={`/settings/provider?id=${provider.id}`}>
|
||||
<Avatar
|
||||
radius="md"
|
||||
src={getProviderLogo(provider.id)}
|
||||
className="h-5 w-5 rounded-md"
|
||||
/>
|
||||
{getProviderLabel(provider.id)}
|
||||
<ArrowUpRight size={14} />
|
||||
</Link>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
trigger="hover"
|
||||
placement="right">
|
||||
<HelpCircle size={14} style={{ color: 'var(--color-text-3)', cursor: 'pointer' }} />
|
||||
</Popover>
|
||||
)}
|
||||
</div>
|
||||
<ModelSelector
|
||||
providers={availableProviders}
|
||||
predicate={modelPredicate}
|
||||
style={{ width: '100%' }}
|
||||
placeholder={t('code.model_placeholder')}
|
||||
value={selectedModel ? getModelUniqId(selectedModel) : undefined}
|
||||
onChange={handleModelChange}
|
||||
allowClear
|
||||
/>
|
||||
</SettingsItem>
|
||||
}
|
||||
trigger="hover"
|
||||
placement="right">
|
||||
<HelpCircle
|
||||
size={14}
|
||||
style={{
|
||||
color: 'var(--color-text-3)',
|
||||
cursor: 'pointer'
|
||||
}}
|
||||
/>
|
||||
</Popover>
|
||||
)}
|
||||
</div>
|
||||
<ModelSelector
|
||||
providers={availableProviders}
|
||||
predicate={modelPredicate}
|
||||
style={{ width: '100%' }}
|
||||
placeholder={t('code.model_placeholder')}
|
||||
value={selectedModel ? getModelUniqId(selectedModel) : undefined}
|
||||
onChange={handleModelChange}
|
||||
allowClear
|
||||
/>
|
||||
</SettingsItem>
|
||||
)}
|
||||
|
||||
<SettingsItem>
|
||||
<div className="settings-label">{t('code.working_directory')}</div>
|
||||
@ -406,11 +444,27 @@ const CodeToolsPage: FC = () => {
|
||||
options={directories.map((dir) => ({
|
||||
value: dir,
|
||||
label: (
|
||||
<div style={{ display: 'flex', justifyContent: 'space-between', alignItems: 'center' }}>
|
||||
<span style={{ flex: 1, overflow: 'hidden', textOverflow: 'ellipsis' }}>{dir}</span>
|
||||
<div
|
||||
style={{
|
||||
display: 'flex',
|
||||
justifyContent: 'space-between',
|
||||
alignItems: 'center'
|
||||
}}>
|
||||
<span
|
||||
style={{
|
||||
flex: 1,
|
||||
overflow: 'hidden',
|
||||
textOverflow: 'ellipsis'
|
||||
}}>
|
||||
{dir}
|
||||
</span>
|
||||
<X
|
||||
size={14}
|
||||
style={{ marginLeft: 8, cursor: 'pointer', color: '#999' }}
|
||||
style={{
|
||||
marginLeft: 8,
|
||||
cursor: 'pointer',
|
||||
color: '#999'
|
||||
}}
|
||||
onClick={(e) => handleRemoveDirectory(dir, e)}
|
||||
/>
|
||||
</div>
|
||||
@ -432,7 +486,14 @@ const CodeToolsPage: FC = () => {
|
||||
rows={2}
|
||||
style={{ fontFamily: 'monospace' }}
|
||||
/>
|
||||
<div style={{ fontSize: 12, color: 'var(--color-text-3)', marginTop: 4 }}>{t('code.env_vars_help')}</div>
|
||||
<div
|
||||
style={{
|
||||
fontSize: 12,
|
||||
color: 'var(--color-text-3)',
|
||||
marginTop: 4
|
||||
}}>
|
||||
{t('code.env_vars_help')}
|
||||
</div>
|
||||
</SettingsItem>
|
||||
|
||||
{/* 终端选择 (macOS 和 Windows) */}
|
||||
@ -471,7 +532,12 @@ const CodeToolsPage: FC = () => {
|
||||
selectedTerminal !== terminalApps.cmd &&
|
||||
selectedTerminal !== terminalApps.powershell &&
|
||||
selectedTerminal !== terminalApps.windowsTerminal && (
|
||||
<div style={{ fontSize: 12, color: 'var(--color-text-3)', marginTop: 4 }}>
|
||||
<div
|
||||
style={{
|
||||
fontSize: 12,
|
||||
color: 'var(--color-text-3)',
|
||||
marginTop: 4
|
||||
}}>
|
||||
{terminalCustomPaths[selectedTerminal]
|
||||
? `${t('code.custom_path')}: ${terminalCustomPaths[selectedTerminal]}`
|
||||
: t('code.custom_path_required')}
|
||||
|
||||
@ -20,7 +20,8 @@ export const CLI_TOOLS = [
|
||||
{ value: codeTools.qwenCode, label: 'Qwen Code' },
|
||||
{ value: codeTools.geminiCli, label: 'Gemini CLI' },
|
||||
{ value: codeTools.openaiCodex, label: 'OpenAI Codex' },
|
||||
{ value: codeTools.iFlowCli, label: 'iFlow CLI' }
|
||||
{ value: codeTools.iFlowCli, label: 'iFlow CLI' },
|
||||
{ value: codeTools.githubCopilotCli, label: 'GitHub Copilot CLI' }
|
||||
]
|
||||
|
||||
export const GEMINI_SUPPORTED_PROVIDERS = ['aihubmix', 'dmxapi', 'new-api', 'cherryin']
|
||||
@ -43,7 +44,8 @@ export const CLI_TOOL_PROVIDER_MAP: Record<string, (providers: Provider[]) => Pr
|
||||
[codeTools.qwenCode]: (providers) => providers.filter((p) => p.type.includes('openai')),
|
||||
[codeTools.openaiCodex]: (providers) =>
|
||||
providers.filter((p) => p.id === 'openai' || OPENAI_CODEX_SUPPORTED_PROVIDERS.includes(p.id)),
|
||||
[codeTools.iFlowCli]: (providers) => providers.filter((p) => p.type.includes('openai'))
|
||||
[codeTools.iFlowCli]: (providers) => providers.filter((p) => p.type.includes('openai')),
|
||||
[codeTools.githubCopilotCli]: () => []
|
||||
}
|
||||
|
||||
export const getCodeToolsApiBaseUrl = (model: Model, type: EndpointType) => {
|
||||
@ -158,6 +160,10 @@ export const generateToolEnvironment = ({
|
||||
env.IFLOW_BASE_URL = baseUrl
|
||||
env.IFLOW_MODEL_NAME = model.id
|
||||
break
|
||||
|
||||
case codeTools.githubCopilotCli:
|
||||
env.GITHUB_TOKEN = apiKey || ''
|
||||
break
|
||||
}
|
||||
|
||||
return env
|
||||
|
||||
@ -1,5 +1,7 @@
|
||||
import { usePreference } from '@data/hooks/usePreference'
|
||||
import { cn } from '@heroui/react'
|
||||
import { loggerService } from '@logger'
|
||||
import HorizontalScrollContainer from '@renderer/components/HorizontalScrollContainer'
|
||||
import Scrollbar from '@renderer/components/Scrollbar'
|
||||
import { useMessageEditing } from '@renderer/context/MessageEditingContext'
|
||||
import { useAssistant } from '@renderer/hooks/useAssistant'
|
||||
@ -231,20 +233,28 @@ const MessageItem: FC<Props> = ({
|
||||
</MessageErrorBoundary>
|
||||
</MessageContentContainer>
|
||||
{showMenubar && (
|
||||
<MessageFooter className="MessageFooter" $isLastMessage={isLastMessage} $messageStyle={messageStyle}>
|
||||
<MessageMenubar
|
||||
message={message}
|
||||
assistant={assistant}
|
||||
model={model}
|
||||
index={index}
|
||||
topic={topic}
|
||||
isLastMessage={isLastMessage}
|
||||
isAssistantMessage={isAssistantMessage}
|
||||
isGrouped={isGrouped}
|
||||
messageContainerRef={messageContainerRef as React.RefObject<HTMLDivElement>}
|
||||
setModel={setModel}
|
||||
onUpdateUseful={onUpdateUseful}
|
||||
/>
|
||||
<MessageFooter className="MessageFooter">
|
||||
<HorizontalScrollContainer
|
||||
classNames={{
|
||||
content: cn(
|
||||
'items-center',
|
||||
isLastMessage && messageStyle === 'plain' ? 'flex-row-reverse' : 'flex-row'
|
||||
)
|
||||
}}>
|
||||
<MessageMenubar
|
||||
message={message}
|
||||
assistant={assistant}
|
||||
model={model}
|
||||
index={index}
|
||||
topic={topic}
|
||||
isLastMessage={isLastMessage}
|
||||
isAssistantMessage={isAssistantMessage}
|
||||
isGrouped={isGrouped}
|
||||
messageContainerRef={messageContainerRef as React.RefObject<HTMLDivElement>}
|
||||
setModel={setModel}
|
||||
onUpdateUseful={onUpdateUseful}
|
||||
/>
|
||||
</HorizontalScrollContainer>
|
||||
</MessageFooter>
|
||||
)}
|
||||
</>
|
||||
@ -288,10 +298,8 @@ const MessageContentContainer = styled(Scrollbar)`
|
||||
overflow-y: auto;
|
||||
`
|
||||
|
||||
const MessageFooter = styled.div<{ $isLastMessage: boolean; $messageStyle: 'plain' | 'bubble' }>`
|
||||
const MessageFooter = styled.div`
|
||||
display: flex;
|
||||
flex-direction: ${({ $isLastMessage, $messageStyle }) =>
|
||||
$isLastMessage && $messageStyle === 'plain' ? 'row-reverse' : 'row'};
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
gap: 10px;
|
||||
|
||||
@ -339,17 +339,30 @@ const GroupContainer = styled.div`
|
||||
const GridContainer = styled(Scrollbar)<{ $count: number; $gridColumns: number }>`
|
||||
width: 100%;
|
||||
display: grid;
|
||||
overflow-y: visible;
|
||||
gap: 16px;
|
||||
|
||||
&.horizontal {
|
||||
padding-bottom: 4px;
|
||||
grid-template-columns: repeat(${({ $count }) => $count}, minmax(420px, 1fr));
|
||||
overflow-y: hidden;
|
||||
overflow-x: auto;
|
||||
&::-webkit-scrollbar {
|
||||
height: 6px;
|
||||
}
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: var(--color-scrollbar-thumb);
|
||||
border-radius: var(--scrollbar-thumb-radius);
|
||||
}
|
||||
&::-webkit-scrollbar-thumb:hover {
|
||||
background: var(--color-scrollbar-thumb-hover);
|
||||
}
|
||||
}
|
||||
&.fold,
|
||||
&.vertical {
|
||||
grid-template-columns: repeat(1, minmax(0, 1fr));
|
||||
gap: 8px;
|
||||
overflow-y: auto;
|
||||
overflow-x: hidden;
|
||||
}
|
||||
&.grid {
|
||||
grid-template-columns: repeat(
|
||||
@ -357,11 +370,15 @@ const GridContainer = styled(Scrollbar)<{ $count: number; $gridColumns: number }
|
||||
minmax(0, 1fr)
|
||||
);
|
||||
grid-template-rows: auto;
|
||||
overflow-y: auto;
|
||||
overflow-x: hidden;
|
||||
}
|
||||
|
||||
&.multi-select-mode {
|
||||
grid-template-columns: repeat(1, minmax(0, 1fr));
|
||||
gap: 10px;
|
||||
overflow-y: auto;
|
||||
overflow-x: hidden;
|
||||
.grid {
|
||||
height: auto;
|
||||
}
|
||||
@ -387,7 +404,7 @@ interface MessageWrapperProps {
|
||||
const MessageWrapper = styled.div<MessageWrapperProps>`
|
||||
&.horizontal {
|
||||
padding: 1px;
|
||||
overflow-y: auto;
|
||||
/* overflow-y: auto; */
|
||||
.message {
|
||||
height: 100%;
|
||||
border: 0.5px solid var(--color-border);
|
||||
@ -407,8 +424,9 @@ const MessageWrapper = styled.div<MessageWrapperProps>`
|
||||
}
|
||||
}
|
||||
&.grid {
|
||||
display: block;
|
||||
height: 300px;
|
||||
overflow-y: hidden;
|
||||
overflow: hidden;
|
||||
border: 0.5px solid var(--color-border);
|
||||
border-radius: 10px;
|
||||
cursor: pointer;
|
||||
|
||||
@ -543,6 +543,11 @@ const Topics: FC<Props> = ({ assistant: _assistant, activeTopic, setActiveTopic,
|
||||
onContextMenu={() => setTargetTopic(topic)}
|
||||
className={classNames(isActive ? 'active' : '', singlealone ? 'singlealone' : '')}
|
||||
onClick={editingTopicId === topic.id && topicEdit.isEditing ? undefined : () => onSwitchTopic(topic)}
|
||||
onDoubleClick={() => {
|
||||
if (editingTopicId === topic.id && topicEdit.isEditing) return
|
||||
setEditingTopicId(topic.id)
|
||||
topicEdit.startEdit(topic.name)
|
||||
}}
|
||||
style={{
|
||||
borderRadius,
|
||||
cursor: editingTopicId === topic.id && topicEdit.isEditing ? 'default' : 'pointer'
|
||||
@ -559,13 +564,7 @@ const Topics: FC<Props> = ({ assistant: _assistant, activeTopic, setActiveTopic,
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
/>
|
||||
) : (
|
||||
<TopicName
|
||||
className={getTopicNameClassName()}
|
||||
title={topicName}
|
||||
onDoubleClick={() => {
|
||||
setEditingTopicId(topic.id)
|
||||
topicEdit.startEdit(topic.name)
|
||||
}}>
|
||||
<TopicName className={getTopicNameClassName()} title={topicName}>
|
||||
{topicName}
|
||||
</TopicName>
|
||||
)}
|
||||
@ -589,7 +588,8 @@ const Topics: FC<Props> = ({ assistant: _assistant, activeTopic, setActiveTopic,
|
||||
} else {
|
||||
handleDeleteClick(topic.id, e)
|
||||
}
|
||||
}}>
|
||||
}}
|
||||
onDoubleClick={(e) => e.stopPropagation()}>
|
||||
{deletingTopicId === topic.id ? (
|
||||
<DeleteIcon size={14} color="var(--color-error)" style={{ pointerEvents: 'none' }} />
|
||||
) : (
|
||||
|
||||
@ -1,12 +1,15 @@
|
||||
import { SpaceBetweenRowFlex } from '@cherrystudio/ui'
|
||||
import { usePreference } from '@data/hooks/usePreference'
|
||||
import ActionIconButton from '@renderer/components/Buttons/ActionIconButton'
|
||||
import CodeEditor from '@renderer/components/CodeEditor'
|
||||
import RichEditor from '@renderer/components/RichEditor'
|
||||
import type { RichEditorRef } from '@renderer/components/RichEditor/types'
|
||||
import Selector from '@renderer/components/Selector'
|
||||
import { useCodeStyle } from '@renderer/context/CodeStyleProvider'
|
||||
import { useNotesSettings } from '@renderer/hooks/useNotesSettings'
|
||||
import { useAppDispatch } from '@renderer/store'
|
||||
import type { EditorView } from '@renderer/types'
|
||||
import { Empty } from 'antd'
|
||||
import { Empty, Tooltip } from 'antd'
|
||||
import { SpellCheck } from 'lucide-react'
|
||||
import type { FC, RefObject } from 'react'
|
||||
import { memo, useCallback, useMemo, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
@ -23,8 +26,10 @@ interface NotesEditorProps {
|
||||
const NotesEditor: FC<NotesEditorProps> = memo(
|
||||
({ activeNodeId, currentContent, tokenCount, onMarkdownChange, editorRef }) => {
|
||||
const { t } = useTranslation()
|
||||
// oxlint-disable-next-line no-unused-vars
|
||||
const dispatch = useAppDispatch()
|
||||
const { settings } = useNotesSettings()
|
||||
const { activeCmTheme } = useCodeStyle()
|
||||
const [enableSpellCheck, setEnableSpellCheck] = usePreference('app.spell_check.enabled')
|
||||
const currentViewMode = useMemo(() => {
|
||||
if (settings.defaultViewMode === 'edit') {
|
||||
return settings.defaultEditMode
|
||||
@ -55,8 +60,6 @@ const NotesEditor: FC<NotesEditorProps> = memo(
|
||||
{tmpViewMode === 'source' ? (
|
||||
<SourceEditorWrapper isFullWidth={settings.isFullWidth} fontSize={settings.fontSize}>
|
||||
<CodeEditor
|
||||
theme={activeCmTheme}
|
||||
fontSize={settings.fontSize}
|
||||
value={currentContent}
|
||||
language="markdown"
|
||||
onChange={onMarkdownChange}
|
||||
@ -82,6 +85,7 @@ const NotesEditor: FC<NotesEditorProps> = memo(
|
||||
isFullWidth
|
||||
fontFamily={settings.fontFamily}
|
||||
fontSize={settings.fontSize}
|
||||
enableSpellCheck={enableSpellCheck}
|
||||
/>
|
||||
)}
|
||||
</RichEditorContainer>
|
||||
@ -96,8 +100,24 @@ const NotesEditor: FC<NotesEditorProps> = memo(
|
||||
color: 'var(--color-text-3)',
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
gap: 8
|
||||
gap: 12
|
||||
}}>
|
||||
{tmpViewMode === 'preview' && (
|
||||
// oxlint-disable-next-line no-undef
|
||||
<Tooltip placement="top" title={t('notes.spell_check_tooltip')} mouseLeaveDelay={0} arrow>
|
||||
<ActionIconButton
|
||||
active={enableSpellCheck}
|
||||
onClick={() => {
|
||||
const newValue = !enableSpellCheck
|
||||
setEnableSpellCheck(newValue)
|
||||
window.api.setEnableSpellCheck(newValue)
|
||||
}}
|
||||
icon={<SpellCheck size={18} />}>
|
||||
<SpellCheck size={18} />
|
||||
</ActionIconButton>
|
||||
{/* oxlint-disable-next-line no-undef */}
|
||||
</Tooltip>
|
||||
)}
|
||||
<Selector
|
||||
value={tmpViewMode as EditorView}
|
||||
onChange={(value: EditorView) => setTmpViewMode(value)}
|
||||
|
||||
@ -6,12 +6,16 @@ import { useInPlaceEdit } from '@renderer/hooks/useInPlaceEdit'
|
||||
import { useKnowledgeBases } from '@renderer/hooks/useKnowledge'
|
||||
import { useActiveNode } from '@renderer/hooks/useNotesQuery'
|
||||
import NotesSidebarHeader from '@renderer/pages/notes/NotesSidebarHeader'
|
||||
import { fetchNoteSummary } from '@renderer/services/ApiService'
|
||||
import type { RootState } from '@renderer/store'
|
||||
import { useAppSelector } from '@renderer/store'
|
||||
import { selectSortType } from '@renderer/store/note'
|
||||
import type { NotesSortType, NotesTreeNode } from '@renderer/types/note'
|
||||
import { exportNote } from '@renderer/utils/export'
|
||||
import { useVirtualizer } from '@tanstack/react-virtual'
|
||||
import type { InputRef, MenuProps } from 'antd'
|
||||
import { Dropdown, Input } from 'antd'
|
||||
import type { ItemType, MenuItemType } from 'antd/es/menu/interface'
|
||||
import {
|
||||
ChevronDown,
|
||||
ChevronRight,
|
||||
@ -21,12 +25,15 @@ import {
|
||||
FileSearch,
|
||||
Folder,
|
||||
FolderOpen,
|
||||
Sparkles,
|
||||
Star,
|
||||
StarOff
|
||||
StarOff,
|
||||
UploadIcon
|
||||
} from 'lucide-react'
|
||||
import type { FC, Ref } from 'react'
|
||||
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useSelector } from 'react-redux'
|
||||
import styled from 'styled-components'
|
||||
|
||||
interface NotesSidebarProps {
|
||||
@ -52,6 +59,8 @@ interface TreeNodeProps {
|
||||
selectedFolderId?: string | null
|
||||
activeNodeId?: string
|
||||
editingNodeId: string | null
|
||||
renamingNodeIds: Set<string>
|
||||
newlyRenamedNodeIds: Set<string>
|
||||
draggedNodeId: string | null
|
||||
dragOverNodeId: string | null
|
||||
dragPosition: 'before' | 'inside' | 'after'
|
||||
@ -74,6 +83,8 @@ const TreeNode = memo<TreeNodeProps>(
|
||||
selectedFolderId,
|
||||
activeNodeId,
|
||||
editingNodeId,
|
||||
renamingNodeIds,
|
||||
newlyRenamedNodeIds,
|
||||
draggedNodeId,
|
||||
dragOverNodeId,
|
||||
dragPosition,
|
||||
@ -94,6 +105,8 @@ const TreeNode = memo<TreeNodeProps>(
|
||||
? node.type === 'folder' && node.id === selectedFolderId
|
||||
: node.id === activeNodeId
|
||||
const isEditing = editingNodeId === node.id && inPlaceEdit.isEditing
|
||||
const isRenaming = renamingNodeIds.has(node.id)
|
||||
const isNewlyRenamed = newlyRenamedNodeIds.has(node.id)
|
||||
const hasChildren = node.children && node.children.length > 0
|
||||
const isDragging = draggedNodeId === node.id
|
||||
const isDragOver = dragOverNodeId === node.id
|
||||
@ -101,6 +114,12 @@ const TreeNode = memo<TreeNodeProps>(
|
||||
const isDragInside = isDragOver && dragPosition === 'inside'
|
||||
const isDragAfter = isDragOver && dragPosition === 'after'
|
||||
|
||||
const getNodeNameClassName = () => {
|
||||
if (isRenaming) return 'shimmer'
|
||||
if (isNewlyRenamed) return 'typing'
|
||||
return ''
|
||||
}
|
||||
|
||||
return (
|
||||
<div key={node.id}>
|
||||
<Dropdown menu={{ items: getMenuItems(node) }} trigger={['contextMenu']}>
|
||||
@ -158,7 +177,7 @@ const TreeNode = memo<TreeNodeProps>(
|
||||
size="small"
|
||||
/>
|
||||
) : (
|
||||
<NodeName>{node.name}</NodeName>
|
||||
<NodeName className={getNodeNameClassName()}>{node.name}</NodeName>
|
||||
)}
|
||||
</TreeNodeContent>
|
||||
</TreeNodeContainer>
|
||||
@ -175,6 +194,8 @@ const TreeNode = memo<TreeNodeProps>(
|
||||
selectedFolderId={selectedFolderId}
|
||||
activeNodeId={activeNodeId}
|
||||
editingNodeId={editingNodeId}
|
||||
renamingNodeIds={renamingNodeIds}
|
||||
newlyRenamedNodeIds={newlyRenamedNodeIds}
|
||||
draggedNodeId={draggedNodeId}
|
||||
dragOverNodeId={dragOverNodeId}
|
||||
dragPosition={dragPosition}
|
||||
@ -215,7 +236,10 @@ const NotesSidebar: FC<NotesSidebarProps> = ({
|
||||
const { bases } = useKnowledgeBases()
|
||||
const { activeNode } = useActiveNode(notesTree)
|
||||
const sortType = useAppSelector(selectSortType)
|
||||
const exportMenuOptions = useSelector((state: RootState) => state.settings.exportMenuOptions)
|
||||
const [editingNodeId, setEditingNodeId] = useState<string | null>(null)
|
||||
const [renamingNodeIds, setRenamingNodeIds] = useState<Set<string>>(new Set())
|
||||
const [newlyRenamedNodeIds, setNewlyRenamedNodeIds] = useState<Set<string>>(new Set())
|
||||
const [draggedNodeId, setDraggedNodeId] = useState<string | null>(null)
|
||||
const [dragOverNodeId, setDragOverNodeId] = useState<string | null>(null)
|
||||
const [dragPosition, setDragPosition] = useState<'before' | 'inside' | 'after'>('inside')
|
||||
@ -338,6 +362,49 @@ const NotesSidebar: FC<NotesSidebarProps> = ({
|
||||
[bases.length, t]
|
||||
)
|
||||
|
||||
const handleAutoRename = useCallback(
|
||||
async (note: NotesTreeNode) => {
|
||||
if (note.type !== 'file') return
|
||||
|
||||
setRenamingNodeIds((prev) => new Set(prev).add(note.id))
|
||||
try {
|
||||
const content = await window.api.file.readExternal(note.externalPath)
|
||||
if (!content || content.trim().length === 0) {
|
||||
window.toast.warning(t('notes.auto_rename.empty_note'))
|
||||
return
|
||||
}
|
||||
|
||||
const summaryText = await fetchNoteSummary({ content })
|
||||
if (summaryText) {
|
||||
onRenameNode(note.id, summaryText)
|
||||
window.toast.success(t('notes.auto_rename.success'))
|
||||
} else {
|
||||
window.toast.error(t('notes.auto_rename.failed'))
|
||||
}
|
||||
} catch (error) {
|
||||
window.toast.error(t('notes.auto_rename.failed'))
|
||||
logger.error(`Failed to auto-rename note: ${error}`)
|
||||
} finally {
|
||||
setRenamingNodeIds((prev) => {
|
||||
const next = new Set(prev)
|
||||
next.delete(note.id)
|
||||
return next
|
||||
})
|
||||
|
||||
setNewlyRenamedNodeIds((prev) => new Set(prev).add(note.id))
|
||||
|
||||
setTimeout(() => {
|
||||
setNewlyRenamedNodeIds((prev) => {
|
||||
const next = new Set(prev)
|
||||
next.delete(note.id)
|
||||
return next
|
||||
})
|
||||
}, 700)
|
||||
}
|
||||
},
|
||||
[onRenameNode, t]
|
||||
)
|
||||
|
||||
const handleDragStart = useCallback((e: React.DragEvent, node: NotesTreeNode) => {
|
||||
setDraggedNodeId(node.id)
|
||||
e.dataTransfer.effectAllowed = 'move'
|
||||
@ -492,7 +559,22 @@ const NotesSidebar: FC<NotesSidebarProps> = ({
|
||||
|
||||
const getMenuItems = useCallback(
|
||||
(node: NotesTreeNode) => {
|
||||
const baseMenuItems: MenuProps['items'] = [
|
||||
const baseMenuItems: MenuProps['items'] = []
|
||||
|
||||
// only show auto rename for file for now
|
||||
if (node.type !== 'folder') {
|
||||
baseMenuItems.push({
|
||||
label: t('notes.auto_rename.label'),
|
||||
key: 'auto-rename',
|
||||
icon: <Sparkles size={14} />,
|
||||
disabled: renamingNodeIds.has(node.id),
|
||||
onClick: () => {
|
||||
handleAutoRename(node)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
baseMenuItems.push(
|
||||
{
|
||||
label: t('notes.rename'),
|
||||
key: 'rename',
|
||||
@ -509,7 +591,7 @@ const NotesSidebar: FC<NotesSidebarProps> = ({
|
||||
window.api.openPath(node.externalPath)
|
||||
}
|
||||
}
|
||||
]
|
||||
)
|
||||
if (node.type !== 'folder') {
|
||||
baseMenuItems.push(
|
||||
{
|
||||
@ -527,6 +609,48 @@ const NotesSidebar: FC<NotesSidebarProps> = ({
|
||||
onClick: () => {
|
||||
handleExportKnowledge(node)
|
||||
}
|
||||
},
|
||||
{
|
||||
label: t('chat.topics.export.title'),
|
||||
key: 'export',
|
||||
icon: <UploadIcon size={14} />,
|
||||
children: [
|
||||
exportMenuOptions.markdown && {
|
||||
label: t('chat.topics.export.md.label'),
|
||||
key: 'markdown',
|
||||
onClick: () => exportNote({ node, platform: 'markdown' })
|
||||
},
|
||||
exportMenuOptions.docx && {
|
||||
label: t('chat.topics.export.word'),
|
||||
key: 'word',
|
||||
onClick: () => exportNote({ node, platform: 'docx' })
|
||||
},
|
||||
exportMenuOptions.notion && {
|
||||
label: t('chat.topics.export.notion'),
|
||||
key: 'notion',
|
||||
onClick: () => exportNote({ node, platform: 'notion' })
|
||||
},
|
||||
exportMenuOptions.yuque && {
|
||||
label: t('chat.topics.export.yuque'),
|
||||
key: 'yuque',
|
||||
onClick: () => exportNote({ node, platform: 'yuque' })
|
||||
},
|
||||
exportMenuOptions.obsidian && {
|
||||
label: t('chat.topics.export.obsidian'),
|
||||
key: 'obsidian',
|
||||
onClick: () => exportNote({ node, platform: 'obsidian' })
|
||||
},
|
||||
exportMenuOptions.joplin && {
|
||||
label: t('chat.topics.export.joplin'),
|
||||
key: 'joplin',
|
||||
onClick: () => exportNote({ node, platform: 'joplin' })
|
||||
},
|
||||
exportMenuOptions.siyuan && {
|
||||
label: t('chat.topics.export.siyuan'),
|
||||
key: 'siyuan',
|
||||
onClick: () => exportNote({ node, platform: 'siyuan' })
|
||||
}
|
||||
].filter(Boolean) as ItemType<MenuItemType>[]
|
||||
}
|
||||
)
|
||||
}
|
||||
@ -545,7 +669,16 @@ const NotesSidebar: FC<NotesSidebarProps> = ({
|
||||
|
||||
return baseMenuItems
|
||||
},
|
||||
[t, handleStartEdit, onToggleStar, handleExportKnowledge, handleDeleteNode]
|
||||
[
|
||||
t,
|
||||
handleStartEdit,
|
||||
onToggleStar,
|
||||
handleExportKnowledge,
|
||||
handleDeleteNode,
|
||||
renamingNodeIds,
|
||||
handleAutoRename,
|
||||
exportMenuOptions
|
||||
]
|
||||
)
|
||||
|
||||
const handleDropFiles = useCallback(
|
||||
@ -682,6 +815,8 @@ const NotesSidebar: FC<NotesSidebarProps> = ({
|
||||
selectedFolderId={selectedFolderId}
|
||||
activeNodeId={activeNode?.id}
|
||||
editingNodeId={editingNodeId}
|
||||
renamingNodeIds={renamingNodeIds}
|
||||
newlyRenamedNodeIds={newlyRenamedNodeIds}
|
||||
draggedNodeId={draggedNodeId}
|
||||
dragOverNodeId={dragOverNodeId}
|
||||
dragPosition={dragPosition}
|
||||
@ -726,6 +861,8 @@ const NotesSidebar: FC<NotesSidebarProps> = ({
|
||||
selectedFolderId={selectedFolderId}
|
||||
activeNodeId={activeNode?.id}
|
||||
editingNodeId={editingNodeId}
|
||||
renamingNodeIds={renamingNodeIds}
|
||||
newlyRenamedNodeIds={newlyRenamedNodeIds}
|
||||
draggedNodeId={draggedNodeId}
|
||||
dragOverNodeId={dragOverNodeId}
|
||||
dragPosition={dragPosition}
|
||||
@ -748,6 +885,8 @@ const NotesSidebar: FC<NotesSidebarProps> = ({
|
||||
selectedFolderId={selectedFolderId}
|
||||
activeNodeId={activeNode?.id}
|
||||
editingNodeId={editingNodeId}
|
||||
renamingNodeIds={renamingNodeIds}
|
||||
newlyRenamedNodeIds={newlyRenamedNodeIds}
|
||||
draggedNodeId={draggedNodeId}
|
||||
dragOverNodeId={dragOverNodeId}
|
||||
dragPosition={dragPosition}
|
||||
@ -935,6 +1074,44 @@ const NodeName = styled.div`
|
||||
text-overflow: ellipsis;
|
||||
font-size: 13px;
|
||||
color: var(--color-text);
|
||||
position: relative;
|
||||
will-change: background-position, width;
|
||||
|
||||
--color-shimmer-mid: var(--color-text-1);
|
||||
--color-shimmer-end: color-mix(in srgb, var(--color-text-1) 25%, transparent);
|
||||
|
||||
&.shimmer {
|
||||
background: linear-gradient(to left, var(--color-shimmer-end), var(--color-shimmer-mid), var(--color-shimmer-end));
|
||||
background-size: 200% 100%;
|
||||
background-clip: text;
|
||||
color: transparent;
|
||||
animation: shimmer 3s linear infinite;
|
||||
}
|
||||
|
||||
&.typing {
|
||||
display: block;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
animation: typewriter 0.5s steps(40, end);
|
||||
}
|
||||
|
||||
@keyframes shimmer {
|
||||
0% {
|
||||
background-position: 200% 0;
|
||||
}
|
||||
100% {
|
||||
background-position: -200% 0;
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes typewriter {
|
||||
from {
|
||||
width: 0;
|
||||
}
|
||||
to {
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
`
|
||||
|
||||
const EditInput = styled(Input)`
|
||||
|
||||
@ -0,0 +1,354 @@
|
||||
import { loggerService } from '@logger'
|
||||
import { TopView } from '@renderer/components/TopView'
|
||||
import type { Provider } from '@renderer/types'
|
||||
import type { FormProps } from 'antd'
|
||||
import { AutoComplete, Button, Flex, Form, Input, Modal, Progress, Select } from 'antd'
|
||||
import { useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
||||
import { useTimer } from '../../../../hooks/useTimer'
|
||||
|
||||
const logger = loggerService.withContext('OVMSClient')
|
||||
|
||||
interface ShowParams {
|
||||
title: string
|
||||
provider: Provider
|
||||
}
|
||||
|
||||
interface Props extends ShowParams {
|
||||
resolve: (data: any) => unknown
|
||||
}
|
||||
|
||||
type FieldType = {
|
||||
modelName: string
|
||||
modelId: string
|
||||
modelSource: string
|
||||
task: string
|
||||
}
|
||||
|
||||
interface PresetModel {
|
||||
modelId: string
|
||||
modelName: string
|
||||
modelSource: string
|
||||
task: string
|
||||
label: string
|
||||
}
|
||||
|
||||
const PRESET_MODELS: PresetModel[] = [
|
||||
{
|
||||
modelId: 'OpenVINO/Qwen3-8B-int4-ov',
|
||||
modelName: 'Qwen3-8B-int4-ov',
|
||||
modelSource: 'https://www.modelscope.cn/models',
|
||||
task: 'text_generation',
|
||||
label: 'Qwen3-8B-int4-ov (Text Generation)'
|
||||
},
|
||||
{
|
||||
modelId: 'OpenVINO/bge-base-en-v1.5-fp16-ov',
|
||||
modelName: 'bge-base-en-v1.5-fp16-ov',
|
||||
modelSource: 'https://www.modelscope.cn/models',
|
||||
task: 'embeddings',
|
||||
label: 'bge-base-en-v1.5-fp16-ov (Embeddings)'
|
||||
},
|
||||
{
|
||||
modelId: 'OpenVINO/bge-reranker-base-fp16-ov',
|
||||
modelName: 'bge-reranker-base-fp16-ov',
|
||||
modelSource: 'https://www.modelscope.cn/models',
|
||||
task: 'rerank',
|
||||
label: 'bge-reranker-base-fp16-ov (Rerank)'
|
||||
},
|
||||
{
|
||||
modelId: 'OpenVINO/DeepSeek-R1-Distill-Qwen-7B-int4-ov',
|
||||
modelName: 'DeepSeek-R1-Distill-Qwen-7B-int4-ov',
|
||||
modelSource: 'https://www.modelscope.cn/models',
|
||||
task: 'text_generation',
|
||||
label: 'DeepSeek-R1-Distill-Qwen-7B-int4-ov (Text Generation)'
|
||||
},
|
||||
{
|
||||
modelId: 'OpenVINO/stable-diffusion-v1-5-int8-ov',
|
||||
modelName: 'stable-diffusion-v1-5-int8-ov',
|
||||
modelSource: 'https://www.modelscope.cn/models',
|
||||
task: 'image_generation',
|
||||
label: 'stable-diffusion-v1-5-int8-ov (Image Generation)'
|
||||
},
|
||||
{
|
||||
modelId: 'OpenVINO/FLUX.1-schnell-int4-ov',
|
||||
modelName: 'FLUX.1-schnell-int4-ov',
|
||||
modelSource: 'https://www.modelscope.cn/models',
|
||||
task: 'image_generation',
|
||||
label: 'FLUX.1-schnell-int4-ov (Image Generation)'
|
||||
}
|
||||
]
|
||||
|
||||
const PopupContainer: React.FC<Props> = ({ title, resolve }) => {
|
||||
const [open, setOpen] = useState(true)
|
||||
const [loading, setLoading] = useState(false)
|
||||
const [progress, setProgress] = useState(0)
|
||||
const [cancelled, setCancelled] = useState(false)
|
||||
const [form] = Form.useForm()
|
||||
const { t } = useTranslation()
|
||||
const { setIntervalTimer, clearIntervalTimer, setTimeoutTimer } = useTimer()
|
||||
|
||||
const startFakeProgress = () => {
|
||||
setProgress(0)
|
||||
setIntervalTimer(
|
||||
'progress',
|
||||
() => {
|
||||
setProgress((prev) => {
|
||||
if (prev >= 95) {
|
||||
return prev // Stop at 95% until actual completion
|
||||
}
|
||||
// Simulate realistic download progress with slowing speed
|
||||
const increment =
|
||||
prev < 30
|
||||
? Math.random() * 1 + 0.25
|
||||
: prev < 60
|
||||
? Math.random() * 0.5 + 0.125
|
||||
: Math.random() * 0.25 + 0.03125
|
||||
|
||||
return Math.min(prev + increment, 95)
|
||||
})
|
||||
},
|
||||
500
|
||||
)
|
||||
}
|
||||
|
||||
const stopFakeProgress = (complete = false) => {
|
||||
clearIntervalTimer('progress')
|
||||
if (complete) {
|
||||
setProgress(100)
|
||||
// Reset progress after a short delay
|
||||
setTimeoutTimer('progress-reset', () => setProgress(0), 1500)
|
||||
} else {
|
||||
setProgress(0)
|
||||
}
|
||||
}
|
||||
|
||||
const handlePresetSelect = (value: string) => {
|
||||
const selectedPreset = PRESET_MODELS.find((model) => model.modelId === value)
|
||||
if (selectedPreset) {
|
||||
form.setFieldsValue({
|
||||
modelId: selectedPreset.modelId,
|
||||
modelName: selectedPreset.modelName,
|
||||
modelSource: selectedPreset.modelSource,
|
||||
task: selectedPreset.task
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const handleModelIdChange = (value: string) => {
|
||||
if (value) {
|
||||
// Extract model name from model ID (part after last '/')
|
||||
const lastSlashIndex = value.lastIndexOf('/')
|
||||
if (lastSlashIndex !== -1 && lastSlashIndex < value.length - 1) {
|
||||
const modelName = value.substring(lastSlashIndex + 1)
|
||||
form.setFieldValue('modelName', modelName)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const onCancel = async () => {
|
||||
if (loading) {
|
||||
// Stop the download
|
||||
try {
|
||||
setCancelled(true) // Mark as cancelled by user
|
||||
logger.info('Stopping download...')
|
||||
await window.api.ovms.stopAddModel()
|
||||
stopFakeProgress(false)
|
||||
setLoading(false)
|
||||
} catch (error) {
|
||||
logger.error(`Failed to stop download: ${error}`)
|
||||
}
|
||||
return
|
||||
}
|
||||
setOpen(false)
|
||||
}
|
||||
|
||||
const onClose = () => {
|
||||
resolve({})
|
||||
}
|
||||
|
||||
const onFinish: FormProps<FieldType>['onFinish'] = async (values) => {
|
||||
setLoading(true)
|
||||
setCancelled(false) // Reset cancelled state
|
||||
startFakeProgress()
|
||||
try {
|
||||
const { modelName, modelId, modelSource, task } = values
|
||||
logger.info(`🔄 Downloading model: ${modelName} with ID: ${modelId}, source: ${modelSource}, task: ${task}`)
|
||||
const result = await window.api.ovms.addModel(modelName, modelId, modelSource, task)
|
||||
|
||||
if (result.success) {
|
||||
stopFakeProgress(true) // Complete the progress bar
|
||||
Modal.success({
|
||||
title: t('ovms.download.success'),
|
||||
content: t('ovms.download.success_desc', { modelName: modelName, modelId: modelId }),
|
||||
onOk: () => {
|
||||
setOpen(false)
|
||||
}
|
||||
})
|
||||
} else {
|
||||
stopFakeProgress(false) // Reset progress on error
|
||||
logger.error(`Download failed, is it cancelled? ${cancelled}`)
|
||||
// Only show error if not cancelled by user
|
||||
if (!cancelled) {
|
||||
Modal.error({
|
||||
title: t('ovms.download.error'),
|
||||
content: <div dangerouslySetInnerHTML={{ __html: result.message }}></div>,
|
||||
onOk: () => {
|
||||
// Keep the form open for retry
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
} catch (error: any) {
|
||||
stopFakeProgress(false) // Reset progress on error
|
||||
logger.error(`Download crashed, is it cancelled? ${cancelled}`)
|
||||
// Only show error if not cancelled by user
|
||||
if (!cancelled) {
|
||||
Modal.error({
|
||||
title: t('ovms.download.error'),
|
||||
content: error.message,
|
||||
onOk: () => {
|
||||
// Keep the form open for retry
|
||||
}
|
||||
})
|
||||
}
|
||||
} finally {
|
||||
setLoading(false)
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<Modal
|
||||
title={title}
|
||||
open={open}
|
||||
onCancel={onCancel}
|
||||
maskClosable={false}
|
||||
afterClose={onClose}
|
||||
footer={null}
|
||||
transitionName="animation-move-down"
|
||||
centered
|
||||
closeIcon={!loading}>
|
||||
<Form
|
||||
form={form}
|
||||
labelCol={{ flex: '110px' }}
|
||||
labelAlign="left"
|
||||
colon={false}
|
||||
style={{ marginTop: 25 }}
|
||||
onFinish={onFinish}
|
||||
disabled={false}>
|
||||
<Form.Item
|
||||
name="modelId"
|
||||
label={t('ovms.download.model_id.label')}
|
||||
rules={[
|
||||
{ required: true, message: t('ovms.download.model_id.required') },
|
||||
{
|
||||
pattern: /^OpenVINO\/.+/,
|
||||
message: t('ovms.download.model_id.model_id_pattern')
|
||||
}
|
||||
]}>
|
||||
<AutoComplete
|
||||
placeholder={t('ovms.download.model_id.placeholder')}
|
||||
options={PRESET_MODELS.map((model) => ({
|
||||
value: model.modelId,
|
||||
label: model.label
|
||||
}))}
|
||||
onSelect={handlePresetSelect}
|
||||
onChange={handleModelIdChange}
|
||||
disabled={loading}
|
||||
allowClear
|
||||
/>
|
||||
</Form.Item>
|
||||
<Form.Item
|
||||
name="modelName"
|
||||
label={t('ovms.download.model_name.label')}
|
||||
rules={[{ required: true, message: t('ovms.download.model_name.required') }]}>
|
||||
<Input
|
||||
placeholder={t('ovms.download.model_name.placeholder')}
|
||||
spellCheck={false}
|
||||
maxLength={200}
|
||||
disabled={loading}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Form.Item
|
||||
name="modelSource"
|
||||
label={t('ovms.download.model_source')}
|
||||
initialValue="https://www.modelscope.cn/models"
|
||||
rules={[{ required: false }]}>
|
||||
<Select
|
||||
options={[
|
||||
{ value: '', label: 'HuggingFace' },
|
||||
{ value: 'https://hf-mirror.com', label: 'HF-Mirror' },
|
||||
{ value: 'https://www.modelscope.cn/models', label: 'ModelScope' }
|
||||
]}
|
||||
disabled={loading}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Form.Item
|
||||
name="task"
|
||||
label={t('ovms.download.model_task')}
|
||||
initialValue="text_generation"
|
||||
rules={[{ required: false }]}>
|
||||
<Select
|
||||
options={[
|
||||
{ value: 'text_generation', label: 'Text Generation' },
|
||||
{ value: 'embeddings', label: 'Embeddings' },
|
||||
{ value: 'rerank', label: 'Rerank' },
|
||||
{ value: 'image_generation', label: 'Image Generation' }
|
||||
]}
|
||||
disabled={loading}
|
||||
/>
|
||||
</Form.Item>
|
||||
{loading && (
|
||||
<Form.Item style={{ marginBottom: 16 }}>
|
||||
<Progress
|
||||
percent={Math.round(progress)}
|
||||
status={progress === 100 ? 'success' : 'active'}
|
||||
strokeColor={{
|
||||
'0%': '#108ee9',
|
||||
'100%': '#87d068'
|
||||
}}
|
||||
showInfo={true}
|
||||
format={(percent) => `${percent}%`}
|
||||
/>
|
||||
<div style={{ textAlign: 'center', marginTop: 8, color: '#666', fontSize: '14px' }}>
|
||||
{t('ovms.download.tip')}
|
||||
</div>
|
||||
</Form.Item>
|
||||
)}
|
||||
<Form.Item style={{ marginBottom: 8, textAlign: 'center' }}>
|
||||
<Flex justify="end" align="center" style={{ position: 'relative' }}>
|
||||
<Button
|
||||
type="primary"
|
||||
htmlType={loading ? 'button' : 'submit'}
|
||||
size="middle"
|
||||
loading={false}
|
||||
onClick={loading ? onCancel : undefined}>
|
||||
{loading ? t('common.cancel') : t('ovms.download.button')}
|
||||
</Button>
|
||||
</Flex>
|
||||
</Form.Item>
|
||||
</Form>
|
||||
</Modal>
|
||||
)
|
||||
}
|
||||
|
||||
export default class DownloadOVMSModelPopup {
|
||||
static topviewId = 0
|
||||
static hide() {
|
||||
TopView.hide('DownloadOVMSModelPopup')
|
||||
}
|
||||
static show(props: ShowParams) {
|
||||
return new Promise<any>((resolve) => {
|
||||
TopView.show(
|
||||
<PopupContainer
|
||||
{...props}
|
||||
resolve={(v) => {
|
||||
resolve(v)
|
||||
this.hide()
|
||||
}}
|
||||
/>,
|
||||
'DownloadOVMSModelPopup'
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -8,6 +8,7 @@ import { getProviderLabel } from '@renderer/i18n/label'
|
||||
import { SettingHelpLink, SettingHelpText, SettingHelpTextRow, SettingSubtitle } from '@renderer/pages/settings'
|
||||
import EditModelPopup from '@renderer/pages/settings/ProviderSettings/EditModelPopup/EditModelPopup'
|
||||
import AddModelPopup from '@renderer/pages/settings/ProviderSettings/ModelList/AddModelPopup'
|
||||
import DownloadOVMSModelPopup from '@renderer/pages/settings/ProviderSettings/ModelList/DownloadOVMSModelPopup'
|
||||
import ManageModelsPopup from '@renderer/pages/settings/ProviderSettings/ModelList/ManageModelsPopup'
|
||||
import NewApiAddModelPopup from '@renderer/pages/settings/ProviderSettings/ModelList/NewApiAddModelPopup'
|
||||
import type { Model } from '@renderer/types'
|
||||
@ -93,6 +94,11 @@ const ModelList: React.FC<ModelListProps> = ({ providerId }) => {
|
||||
}
|
||||
}, [provider, t])
|
||||
|
||||
const onDownloadModel = useCallback(
|
||||
() => DownloadOVMSModelPopup.show({ title: t('ovms.download.title'), provider }),
|
||||
[provider, t]
|
||||
)
|
||||
|
||||
const isLoading = useMemo(() => displayedModelGroups === null, [displayedModelGroups])
|
||||
|
||||
return (
|
||||
@ -172,9 +178,19 @@ const ModelList: React.FC<ModelListProps> = ({ providerId }) => {
|
||||
isDisabled={isHealthChecking}>
|
||||
{t('button.manage')}
|
||||
</Button>
|
||||
<Button variant="solid" onPress={onAddModel} startContent={<Plus size={16} />} isDisabled={isHealthChecking}>
|
||||
{t('button.add')}
|
||||
</Button>
|
||||
{provider.id !== 'ovms' ? (
|
||||
<Button variant="solid" onPress={onAddModel} startContent={<Plus size={16} />} isDisabled={isHealthChecking}>
|
||||
{t('button.add')}
|
||||
</Button>
|
||||
) : (
|
||||
<Button
|
||||
variant="solid"
|
||||
onPress={onDownloadModel}
|
||||
startContent={<Plus size={16} />}
|
||||
isDisabled={isHealthChecking}>
|
||||
{t('button.download')}
|
||||
</Button>
|
||||
)}
|
||||
</Flex>
|
||||
</>
|
||||
)
|
||||
|
||||
@ -0,0 +1,171 @@
|
||||
import { VStack } from '@renderer/components/Layout'
|
||||
import { Alert, Button } from 'antd'
|
||||
import type { FC } from 'react'
|
||||
import { useEffect, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
||||
import { SettingRow, SettingSubtitle } from '..'
|
||||
|
||||
const OVMSSettings: FC = () => {
|
||||
const { t } = useTranslation()
|
||||
|
||||
const [ovmsStatus, setOvmsStatus] = useState<'not-installed' | 'not-running' | 'running'>('not-running')
|
||||
const [isInstallingOvms, setIsInstallingOvms] = useState(false)
|
||||
const [isRunningOvms, setIsRunningOvms] = useState(false)
|
||||
const [isStoppingOvms, setIsStoppingOvms] = useState(false)
|
||||
|
||||
useEffect(() => {
|
||||
const checkStatus = async () => {
|
||||
const status = await window.api.ovms.getStatus()
|
||||
setOvmsStatus(status)
|
||||
}
|
||||
checkStatus()
|
||||
}, [])
|
||||
|
||||
const installOvms = async () => {
|
||||
try {
|
||||
setIsInstallingOvms(true)
|
||||
await window.api.installOvmsBinary()
|
||||
// 安装成功后重新检查状态
|
||||
const status = await window.api.ovms.getStatus()
|
||||
setOvmsStatus(status)
|
||||
setIsInstallingOvms(false)
|
||||
} catch (error: any) {
|
||||
const errCodeMsg = {
|
||||
'100': t('ovms.failed.install_code_100'),
|
||||
'101': t('ovms.failed.install_code_101'),
|
||||
'102': t('ovms.failed.install_code_102'),
|
||||
'103': t('ovms.failed.install_code_103'),
|
||||
'104': t('ovms.failed.install_code_104'),
|
||||
'105': t('ovms.failed.install_code_105')
|
||||
}
|
||||
const match = error.message.match(/code (\d+)/)
|
||||
const code = match ? match[1] : 'unknown'
|
||||
const errorMsg = errCodeMsg[code as keyof typeof errCodeMsg] || error.message
|
||||
|
||||
window.toast.error(t('ovms.failed.install') + errorMsg)
|
||||
setIsInstallingOvms(false)
|
||||
}
|
||||
}
|
||||
|
||||
const runOvms = async () => {
|
||||
try {
|
||||
setIsRunningOvms(true)
|
||||
await window.api.ovms.runOvms()
|
||||
// 运行成功后重新检查状态
|
||||
const status = await window.api.ovms.getStatus()
|
||||
setOvmsStatus(status)
|
||||
setIsRunningOvms(false)
|
||||
} catch (error: any) {
|
||||
window.toast.error(t('ovms.failed.run') + error.message)
|
||||
setIsRunningOvms(false)
|
||||
}
|
||||
}
|
||||
|
||||
const stopOvms = async () => {
|
||||
try {
|
||||
setIsStoppingOvms(true)
|
||||
await window.api.ovms.stopOvms()
|
||||
// 停止成功后重新检查状态
|
||||
const status = await window.api.ovms.getStatus()
|
||||
setOvmsStatus(status)
|
||||
setIsStoppingOvms(false)
|
||||
} catch (error: any) {
|
||||
window.toast.error(t('ovms.failed.stop') + error.message)
|
||||
setIsStoppingOvms(false)
|
||||
}
|
||||
}
|
||||
|
||||
const getAlertType = () => {
|
||||
switch (ovmsStatus) {
|
||||
case 'running':
|
||||
return 'success'
|
||||
case 'not-running':
|
||||
return 'warning'
|
||||
case 'not-installed':
|
||||
return 'error'
|
||||
default:
|
||||
return 'warning'
|
||||
}
|
||||
}
|
||||
|
||||
const getStatusMessage = () => {
|
||||
switch (ovmsStatus) {
|
||||
case 'running':
|
||||
return t('ovms.status.running')
|
||||
case 'not-running':
|
||||
return t('ovms.status.not_running')
|
||||
case 'not-installed':
|
||||
return t('ovms.status.not_installed')
|
||||
default:
|
||||
return t('ovms.status.unknown')
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<Alert
|
||||
type={getAlertType()}
|
||||
banner
|
||||
style={{ borderRadius: 'var(--list-item-border-radius)' }}
|
||||
description={
|
||||
<VStack>
|
||||
<SettingRow style={{ width: '100%' }}>
|
||||
<SettingSubtitle style={{ margin: 0, fontWeight: 'normal' }}>{getStatusMessage()}</SettingSubtitle>
|
||||
{ovmsStatus === 'not-installed' && (
|
||||
<Button
|
||||
type="primary"
|
||||
onClick={installOvms}
|
||||
loading={isInstallingOvms}
|
||||
disabled={isInstallingOvms}
|
||||
size="small">
|
||||
{isInstallingOvms ? t('ovms.action.installing') : t('ovms.action.install')}
|
||||
</Button>
|
||||
)}
|
||||
{ovmsStatus === 'not-running' && (
|
||||
<div style={{ display: 'flex', gap: '8px' }}>
|
||||
<Button
|
||||
type="primary"
|
||||
onClick={installOvms}
|
||||
loading={isInstallingOvms}
|
||||
disabled={isInstallingOvms || isRunningOvms}
|
||||
size="small">
|
||||
{isInstallingOvms ? t('ovms.action.installing') : t('ovms.action.reinstall')}
|
||||
</Button>
|
||||
<Button
|
||||
type="primary"
|
||||
onClick={runOvms}
|
||||
loading={isRunningOvms}
|
||||
disabled={isRunningOvms}
|
||||
size="small">
|
||||
{isRunningOvms ? t('ovms.action.starting') : t('ovms.action.run')}
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
{ovmsStatus === 'running' && (
|
||||
<Button
|
||||
type="primary"
|
||||
danger
|
||||
onClick={stopOvms}
|
||||
loading={isStoppingOvms}
|
||||
disabled={isStoppingOvms}
|
||||
size="small">
|
||||
{isStoppingOvms ? t('ovms.action.stopping') : t('ovms.action.stop')}
|
||||
</Button>
|
||||
)}
|
||||
</SettingRow>
|
||||
</VStack>
|
||||
}
|
||||
/>
|
||||
<Alert
|
||||
type="info"
|
||||
style={{ marginTop: 5 }}
|
||||
message={'Intel OVMS Guide:'}
|
||||
description={<div dangerouslySetInnerHTML={{ __html: t('ovms.description') }}></div>}
|
||||
showIcon
|
||||
/>
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
export default OVMSSettings
|
||||
@ -31,6 +31,8 @@ import UrlSchemaInfoPopup from './UrlSchemaInfoPopup'
|
||||
const logger = loggerService.withContext('ProviderList')
|
||||
|
||||
const BUTTON_WRAPPER_HEIGHT = 50
|
||||
const systemType = await window.api.system.getDeviceType()
|
||||
const cpuName = await window.api.system.getCpuName()
|
||||
|
||||
const ProviderList: FC = () => {
|
||||
const [searchParams, setSearchParams] = useSearchParams()
|
||||
@ -277,6 +279,10 @@ const ProviderList: FC = () => {
|
||||
}
|
||||
|
||||
const filteredProviders = providers.filter((provider) => {
|
||||
if (provider.id === 'ovms' && (systemType !== 'windows' || !cpuName.toLowerCase().includes('intel'))) {
|
||||
return false
|
||||
}
|
||||
|
||||
const keywords = searchText.toLowerCase().split(/\s+/).filter(Boolean)
|
||||
const isProviderMatch = matchKeywordsInProvider(keywords, provider)
|
||||
const isModelMatch = provider.models.some((model) => matchKeywordsInModel(keywords, model))
|
||||
|
||||
@ -49,6 +49,7 @@ import DMXAPISettings from './DMXAPISettings'
|
||||
import GithubCopilotSettings from './GithubCopilotSettings'
|
||||
import GPUStackSettings from './GPUStackSettings'
|
||||
import LMStudioSettings from './LMStudioSettings'
|
||||
import OVMSSettings from './OVMSSettings'
|
||||
import ProviderOAuth from './ProviderOAuth'
|
||||
import SelectProviderModelPopup from './SelectProviderModelPopup'
|
||||
import VertexAISettings from './VertexAISettings'
|
||||
@ -286,6 +287,7 @@ const ProviderSetting: FC<Props> = ({ providerId }) => {
|
||||
<Divider style={{ width: '100%', margin: '10px 0' }} />
|
||||
{isProviderSupportAuth(provider) && <ProviderOAuth providerId={provider.id} />}
|
||||
{provider.id === 'openai' && <OpenAIAlert />}
|
||||
{provider.id === 'ovms' && <OVMSSettings />}
|
||||
{isDmxapi && <DMXAPISettings providerId={provider.id} />}
|
||||
{provider.id === 'anthropic' && (
|
||||
<>
|
||||
|
||||
@ -252,6 +252,68 @@ export async function fetchMessagesSummary({ messages, assistant }: { messages:
|
||||
}
|
||||
}
|
||||
|
||||
export async function fetchNoteSummary({ content, assistant }: { content: string; assistant?: Assistant }) {
|
||||
let prompt = (await preferenceService.get('topic.naming_prompt')) || i18n.t('prompts.title')
|
||||
const resolvedAssistant = assistant || getDefaultAssistant()
|
||||
const model = getQuickModel() || resolvedAssistant.model || getDefaultModel()
|
||||
|
||||
if (prompt && containsSupportedVariables(prompt)) {
|
||||
prompt = await replacePromptVariables(prompt, model.name)
|
||||
}
|
||||
|
||||
const provider = getProviderByModel(model)
|
||||
|
||||
if (!hasApiKey(provider)) {
|
||||
return null
|
||||
}
|
||||
|
||||
const AI = new AiProviderNew(model)
|
||||
|
||||
// only 2000 char and no images
|
||||
const truncatedContent = content.substring(0, 2000)
|
||||
const purifiedContent = purifyMarkdownImages(truncatedContent)
|
||||
|
||||
const summaryAssistant = {
|
||||
...resolvedAssistant,
|
||||
settings: {
|
||||
...resolvedAssistant.settings,
|
||||
reasoning_effort: undefined,
|
||||
qwenThinkMode: false
|
||||
},
|
||||
prompt,
|
||||
model
|
||||
}
|
||||
|
||||
const llmMessages = {
|
||||
system: prompt,
|
||||
prompt: purifiedContent
|
||||
}
|
||||
|
||||
const middlewareConfig: AiSdkMiddlewareConfig = {
|
||||
streamOutput: false,
|
||||
enableReasoning: false,
|
||||
isPromptToolUse: false,
|
||||
isSupportedToolUse: false,
|
||||
isImageGenerationEndpoint: false,
|
||||
enableWebSearch: false,
|
||||
enableGenerateImage: false,
|
||||
enableUrlContext: false,
|
||||
mcpTools: []
|
||||
}
|
||||
|
||||
try {
|
||||
const { getText } = await AI.completions(model.id, llmMessages, {
|
||||
...middlewareConfig,
|
||||
assistant: summaryAssistant,
|
||||
callType: 'summary'
|
||||
})
|
||||
const text = getText()
|
||||
return removeSpecialCharactersForTopicName(text) || null
|
||||
} catch (error: any) {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
// export async function fetchSearchSummary({ messages, assistant }: { messages: Message[]; assistant: Assistant }) {
|
||||
// const model = getQuickModel() || assistant.model || getDefaultModel()
|
||||
// const provider = getProviderByModel(model)
|
||||
|
||||
@ -27,12 +27,17 @@ export const initialState: CodeToolsState = {
|
||||
[codeTools.qwenCode]: null,
|
||||
[codeTools.claudeCode]: null,
|
||||
[codeTools.geminiCli]: null,
|
||||
[codeTools.openaiCodex]: null
|
||||
[codeTools.openaiCodex]: null,
|
||||
[codeTools.iFlowCli]: null,
|
||||
[codeTools.githubCopilotCli]: null
|
||||
},
|
||||
environmentVariables: {
|
||||
'qwen-code': '',
|
||||
'claude-code': '',
|
||||
'gemini-cli': ''
|
||||
'gemini-cli': '',
|
||||
'openai-codex': '',
|
||||
'iflow-cli': '',
|
||||
'github-copilot-cli': ''
|
||||
},
|
||||
directories: [],
|
||||
currentDirectory: '',
|
||||
@ -64,7 +69,10 @@ const codeToolsSlice = createSlice({
|
||||
state.environmentVariables = {
|
||||
'qwen-code': '',
|
||||
'claude-code': '',
|
||||
'gemini-cli': ''
|
||||
'gemini-cli': '',
|
||||
'openai-codex': '',
|
||||
'iflow-cli': '',
|
||||
'github-copilot-cli': ''
|
||||
}
|
||||
}
|
||||
state.environmentVariables[state.selectedCliTool] = action.payload
|
||||
|
||||
@ -2553,6 +2553,15 @@ const migrateConfig = {
|
||||
logger.error('migrate 158 error', error as Error)
|
||||
return state
|
||||
}
|
||||
},
|
||||
'159': (state: RootState) => {
|
||||
try {
|
||||
addProvider(state, 'ovms')
|
||||
return state
|
||||
} catch (error) {
|
||||
logger.error('migrate 159 error', error as Error)
|
||||
return state
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -76,6 +76,7 @@ const ThinkModelTypes = [
|
||||
'default',
|
||||
'o',
|
||||
'gpt5',
|
||||
'gpt5_codex',
|
||||
'grok',
|
||||
'gemini',
|
||||
'gemini_pro',
|
||||
@ -272,6 +273,7 @@ export const SystemProviderIds = {
|
||||
// cherryin: 'cherryin',
|
||||
silicon: 'silicon',
|
||||
aihubmix: 'aihubmix',
|
||||
ovms: 'ovms',
|
||||
ocoolai: 'ocoolai',
|
||||
deepseek: 'deepseek',
|
||||
ppio: 'ppio',
|
||||
|
||||
@ -1105,3 +1105,51 @@ export const exportTopicToNotes = async (topic: Topic, folderPath: string): Prom
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
const exportNoteAsMarkdown = async (noteName: string, content: string): Promise<void> => {
|
||||
const markdown = `# ${noteName}\n\n${content}`
|
||||
const fileName = removeSpecialCharactersForFileName(noteName) + '.md'
|
||||
const result = await window.api.file.save(fileName, markdown)
|
||||
if (result) {
|
||||
window.toast.success(i18n.t('message.success.markdown.export.specified'))
|
||||
}
|
||||
}
|
||||
|
||||
interface NoteExportOptions {
|
||||
node: { name: string; externalPath: string }
|
||||
platform: 'markdown' | 'docx' | 'notion' | 'yuque' | 'obsidian' | 'joplin' | 'siyuan'
|
||||
}
|
||||
|
||||
export const exportNote = async ({ node, platform }: NoteExportOptions): Promise<void> => {
|
||||
try {
|
||||
const content = await window.api.file.readExternal(node.externalPath)
|
||||
|
||||
switch (platform) {
|
||||
case 'markdown':
|
||||
return await exportNoteAsMarkdown(node.name, content)
|
||||
case 'docx':
|
||||
window.api.export.toWord(`# ${node.name}\n\n${content}`, removeSpecialCharactersForFileName(node.name))
|
||||
return
|
||||
case 'notion':
|
||||
await exportMessageToNotion(node.name, content)
|
||||
return
|
||||
case 'yuque':
|
||||
await exportMarkdownToYuque(node.name, `# ${node.name}\n\n${content}`)
|
||||
return
|
||||
case 'obsidian': {
|
||||
const { default: ObsidianExportPopup } = await import('@renderer/components/Popups/ObsidianExportPopup')
|
||||
await ObsidianExportPopup.show({ title: node.name, processingMethod: '1', rawContent: content })
|
||||
return
|
||||
}
|
||||
case 'joplin':
|
||||
await exportMarkdownToJoplin(node.name, content)
|
||||
return
|
||||
case 'siyuan':
|
||||
await exportMarkdownToSiyuan(node.name, `# ${node.name}\n\n${content}`)
|
||||
return
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Failed to export note to ${platform}:`, error as Error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
226
yarn.lock
226
yarn.lock
@ -74,169 +74,157 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@ai-sdk/amazon-bedrock@npm:^3.0.21":
|
||||
version: 3.0.21
|
||||
resolution: "@ai-sdk/amazon-bedrock@npm:3.0.21"
|
||||
"@ai-sdk/amazon-bedrock@npm:^3.0.29":
|
||||
version: 3.0.29
|
||||
resolution: "@ai-sdk/amazon-bedrock@npm:3.0.29"
|
||||
dependencies:
|
||||
"@ai-sdk/anthropic": "npm:2.0.17"
|
||||
"@ai-sdk/anthropic": "npm:2.0.22"
|
||||
"@ai-sdk/provider": "npm:2.0.0"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.9"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.10"
|
||||
"@smithy/eventstream-codec": "npm:^4.0.1"
|
||||
"@smithy/util-utf8": "npm:^4.0.0"
|
||||
aws4fetch: "npm:^1.0.20"
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4
|
||||
checksum: 10c0/2d15baaad53e389666cede9673e2b43f5299e2cedb70f5b7afc656b7616e73775a9108c2cc1beee4644ff4c66ad41c8dd0b412373dd05caa4fc3d477c4343ea8
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
checksum: 10c0/7add02e6c13774943929bb5d568b3110f6badc6d95cb56c6d3011cafc45778e27c0133417dd7fe835e7f0b1ae7767c22a7d5e3d39f725e2aa44e2b6e47d95fb7
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@ai-sdk/anthropic@npm:2.0.17, @ai-sdk/anthropic@npm:^2.0.17":
|
||||
version: 2.0.17
|
||||
resolution: "@ai-sdk/anthropic@npm:2.0.17"
|
||||
"@ai-sdk/anthropic@npm:2.0.22, @ai-sdk/anthropic@npm:^2.0.22":
|
||||
version: 2.0.22
|
||||
resolution: "@ai-sdk/anthropic@npm:2.0.22"
|
||||
dependencies:
|
||||
"@ai-sdk/provider": "npm:2.0.0"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.9"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.10"
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4
|
||||
checksum: 10c0/783b6a953f3854c4303ad7c30dd56d4706486c7d1151adb17071d87933418c59c26bce53d5c26d34c4d4728eaac4a856ce49a336caed26a7216f982fea562814
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
checksum: 10c0/d922d2ff606b2429fb14c099628ba6734ef7c9b0e9225635f3faaf2d067362dea6ae0e920a35c05ccf15a01c59fef93ead5f147a9609dd3dd8c3ac18a3123b85
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@ai-sdk/azure@npm:^2.0.30":
|
||||
version: 2.0.30
|
||||
resolution: "@ai-sdk/azure@npm:2.0.30"
|
||||
"@ai-sdk/azure@npm:^2.0.42":
|
||||
version: 2.0.42
|
||||
resolution: "@ai-sdk/azure@npm:2.0.42"
|
||||
dependencies:
|
||||
"@ai-sdk/openai": "npm:2.0.30"
|
||||
"@ai-sdk/openai": "npm:2.0.42"
|
||||
"@ai-sdk/provider": "npm:2.0.0"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.9"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.10"
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4
|
||||
checksum: 10c0/22af450e28026547badc891a627bcb3cfa2d030864089947172506810f06cfa4c74c453aabd6a0d5c05ede5ffdee381b9278772ce781eca0c7c826c7d7ae3dc3
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
checksum: 10c0/14d3d6edac691df57879a9a7efc46d5d00b6bde5b64cd62a67a7668455c341171119ae90a431e57ac37009bced19add50b3da26998376b7e56e080bc2c997c00
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@ai-sdk/deepseek@npm:^1.0.17":
|
||||
version: 1.0.17
|
||||
resolution: "@ai-sdk/deepseek@npm:1.0.17"
|
||||
"@ai-sdk/deepseek@npm:^1.0.20":
|
||||
version: 1.0.20
|
||||
resolution: "@ai-sdk/deepseek@npm:1.0.20"
|
||||
dependencies:
|
||||
"@ai-sdk/openai-compatible": "npm:1.0.17"
|
||||
"@ai-sdk/openai-compatible": "npm:1.0.19"
|
||||
"@ai-sdk/provider": "npm:2.0.0"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.9"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.10"
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4
|
||||
checksum: 10c0/c408701343bb28ed0b3e034b8789e6de1dfd6cfc6a9b53feb68f155889e29a9fbbcf05bd99e63f60809cf05ee4b158abaccdf1cbcd9df92c0987094220a61d08
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
checksum: 10c0/e66ece8cf6371c2bac5436ed82cd1e2bb5c367fae6df60090f91cff62bf241f4df0abded99c33558013f8dc0bcc7d962f2126086eba8587ba929da50afd3d806
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@ai-sdk/gateway@npm:1.0.23":
|
||||
version: 1.0.23
|
||||
resolution: "@ai-sdk/gateway@npm:1.0.23"
|
||||
"@ai-sdk/gateway@npm:1.0.32":
|
||||
version: 1.0.32
|
||||
resolution: "@ai-sdk/gateway@npm:1.0.32"
|
||||
dependencies:
|
||||
"@ai-sdk/provider": "npm:2.0.0"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.9"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.10"
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4
|
||||
checksum: 10c0/b1e1a6ab63b9191075eed92c586cd927696f8997ad24f056585aee3f5fffd283d981aa6b071a2560ecda4295445b80a4cfd321fa63c06e7ac54a06bc4c84887f
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
checksum: 10c0/82c98db6e4e8e235e1ff66410318ebe77cc1518ebf06d8d4757b4f30aaa3bf7075d3028816438551fef2f89e2d4c8c26e4efcd9913a06717aee1308dad3ddc30
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@ai-sdk/google-vertex@npm:^3.0.27":
|
||||
version: 3.0.27
|
||||
resolution: "@ai-sdk/google-vertex@npm:3.0.27"
|
||||
"@ai-sdk/google-vertex@npm:^3.0.33":
|
||||
version: 3.0.33
|
||||
resolution: "@ai-sdk/google-vertex@npm:3.0.33"
|
||||
dependencies:
|
||||
"@ai-sdk/anthropic": "npm:2.0.17"
|
||||
"@ai-sdk/google": "npm:2.0.14"
|
||||
"@ai-sdk/anthropic": "npm:2.0.22"
|
||||
"@ai-sdk/google": "npm:2.0.17"
|
||||
"@ai-sdk/provider": "npm:2.0.0"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.9"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.10"
|
||||
google-auth-library: "npm:^9.15.0"
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4
|
||||
checksum: 10c0/7017838aef9c04c18ce9acec52eb602ee0a38d68a7496977a3898411f1ac235b2d7776011fa686084b90b0881e65c69596014e5465b8ed0d0e313b5db1f967a7
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
checksum: 10c0/d440e46f702385985a34f2260074eb41cf2516036598039c8c72d6155825114452942c3c012a181da7661341bee9a38958e5f9a53bba145b9c5dc4446411a651
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@ai-sdk/google@npm:2.0.14":
|
||||
version: 2.0.14
|
||||
resolution: "@ai-sdk/google@npm:2.0.14"
|
||||
"@ai-sdk/google@npm:2.0.17":
|
||||
version: 2.0.17
|
||||
resolution: "@ai-sdk/google@npm:2.0.17"
|
||||
dependencies:
|
||||
"@ai-sdk/provider": "npm:2.0.0"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.9"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.10"
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4
|
||||
checksum: 10c0/2c04839cf58c33514a54c9de8190c363b5cacfbfc8404fea5d2ec36ad0af5ced4fc571f978e7aa35876bd9afae138f4c700d2bc1f64a78a37d0401f6797bf8f3
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
checksum: 10c0/174bcde507e5bf4bf95f20dbe4eaba73870715b13779e320f3df44995606e4d7ccd1e1f4b759d224deaf58bdfc6aa2e43a24dcbe5fa335ddfe91df1b06114218
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@ai-sdk/google@patch:@ai-sdk/google@npm%3A2.0.14#~/.yarn/patches/@ai-sdk-google-npm-2.0.14-376d8b03cc.patch":
|
||||
version: 2.0.14
|
||||
resolution: "@ai-sdk/google@patch:@ai-sdk/google@npm%3A2.0.14#~/.yarn/patches/@ai-sdk-google-npm-2.0.14-376d8b03cc.patch::version=2.0.14&hash=351f1a"
|
||||
"@ai-sdk/mistral@npm:^2.0.17":
|
||||
version: 2.0.17
|
||||
resolution: "@ai-sdk/mistral@npm:2.0.17"
|
||||
dependencies:
|
||||
"@ai-sdk/provider": "npm:2.0.0"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.9"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.10"
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4
|
||||
checksum: 10c0/1ed5a0732a82b981d51f63c6241ed8ee94d5c29a842764db770305cfc2f49ab6e528cac438b5357fc7b02194104c7b76d4390a1dc1d019ace9c174b0849e0da6
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
checksum: 10c0/58a129357c93cc7f2b15b2ba6ccfb9df3fb72e06163641602ea41c858f835cd76985d66665a56e4ed3fa1eb19ca75a83ae12986d466ec41942e9bf13d558c441
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@ai-sdk/mistral@npm:^2.0.14":
|
||||
version: 2.0.14
|
||||
resolution: "@ai-sdk/mistral@npm:2.0.14"
|
||||
"@ai-sdk/openai-compatible@npm:1.0.19, @ai-sdk/openai-compatible@npm:^1.0.19":
|
||||
version: 1.0.19
|
||||
resolution: "@ai-sdk/openai-compatible@npm:1.0.19"
|
||||
dependencies:
|
||||
"@ai-sdk/provider": "npm:2.0.0"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.9"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.10"
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4
|
||||
checksum: 10c0/420be3a039095830aaf59b6f82c1f986ff4800ba5b9438e1dd85530026a42c9454a6e632b6a1a1839816609f4752d0a19140d8943ad78bb976fb5d6a37714e16
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
checksum: 10c0/5b7b21fb515e829c3d8a499a5760ffc035d9b8220695996110e361bd79e9928859da4ecf1ea072735bcbe4977c6dd0661f543871921692e86f8b5bfef14fe0e5
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@ai-sdk/openai-compatible@npm:1.0.17, @ai-sdk/openai-compatible@npm:^1.0.17":
|
||||
version: 1.0.17
|
||||
resolution: "@ai-sdk/openai-compatible@npm:1.0.17"
|
||||
"@ai-sdk/openai@npm:2.0.42, @ai-sdk/openai@npm:^2.0.42":
|
||||
version: 2.0.42
|
||||
resolution: "@ai-sdk/openai@npm:2.0.42"
|
||||
dependencies:
|
||||
"@ai-sdk/provider": "npm:2.0.0"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.9"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.10"
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4
|
||||
checksum: 10c0/53ab6111e0f44437a2e268a51fb747600844d85b0cd0d170fb87a7b68af3eb21d7728d7bbf14d71c9fcf36e7a0f94ad75f0ad6b1070e473c867ab08ef84f6564
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
checksum: 10c0/b1ab158aafc86735e53c4621ffe125d469bc1732c533193652768a9f66ecd4d169303ce7ca59069b7baf725da49e55bcf81210848f09f66deaf2a8335399e6d7
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@ai-sdk/openai@npm:2.0.30, @ai-sdk/openai@npm:^2.0.30":
|
||||
version: 2.0.30
|
||||
resolution: "@ai-sdk/openai@npm:2.0.30"
|
||||
"@ai-sdk/perplexity@npm:^2.0.11":
|
||||
version: 2.0.11
|
||||
resolution: "@ai-sdk/perplexity@npm:2.0.11"
|
||||
dependencies:
|
||||
"@ai-sdk/provider": "npm:2.0.0"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.9"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.10"
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4
|
||||
checksum: 10c0/90a57c1b10dac46c0bbe7e16cf9202557fb250d9f0e94a2a5fb7d95b5ea77815a56add78b00238d3823f0313c9b2c42abe865478d28a6196f72b341d32dd40af
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
checksum: 10c0/a8722b68f529b3d1baaa1ba4624c61efe732f22b24dfc20e27afae07bb25d72532bcb62d022191ab5e49df24496af619eabc092a4e6ad293b3fe231ef61b6467
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@ai-sdk/perplexity@npm:^2.0.9":
|
||||
version: 2.0.9
|
||||
resolution: "@ai-sdk/perplexity@npm:2.0.9"
|
||||
dependencies:
|
||||
"@ai-sdk/provider": "npm:2.0.0"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.9"
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4
|
||||
checksum: 10c0/2023aadc26c41430571c4897df79074e7a95a12f2238ad57081355484066bcf9e8dfde1da60fa6af12fc9fb2a195899326f753c69f4913dc005a33367f150349
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@ai-sdk/provider-utils@npm:3.0.9, @ai-sdk/provider-utils@npm:^3.0.9":
|
||||
version: 3.0.9
|
||||
resolution: "@ai-sdk/provider-utils@npm:3.0.9"
|
||||
"@ai-sdk/provider-utils@npm:3.0.10, @ai-sdk/provider-utils@npm:^3.0.10":
|
||||
version: 3.0.10
|
||||
resolution: "@ai-sdk/provider-utils@npm:3.0.10"
|
||||
dependencies:
|
||||
"@ai-sdk/provider": "npm:2.0.0"
|
||||
"@standard-schema/spec": "npm:^1.0.0"
|
||||
eventsource-parser: "npm:^3.0.5"
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4
|
||||
checksum: 10c0/f8b659343d7e22ae099f7b6fc514591c0408012eb0aa00f7a912798b6d7d7305cafa8f18a07c7adec0bb5d39d9b6256b76d65c5393c3fc843d1361c52f1f8080
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
checksum: 10c0/d2c16abdb84ba4ef48c9f56190b5ffde224b9e6ae5147c5c713d2623627732d34b96aa9aef2a2ea4b0c49e1b863cc963c7d7ff964a1dc95f0f036097aaaaaa98
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
@ -249,16 +237,16 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@ai-sdk/xai@npm:^2.0.18":
|
||||
version: 2.0.18
|
||||
resolution: "@ai-sdk/xai@npm:2.0.18"
|
||||
"@ai-sdk/xai@npm:^2.0.23":
|
||||
version: 2.0.23
|
||||
resolution: "@ai-sdk/xai@npm:2.0.23"
|
||||
dependencies:
|
||||
"@ai-sdk/openai-compatible": "npm:1.0.17"
|
||||
"@ai-sdk/openai-compatible": "npm:1.0.19"
|
||||
"@ai-sdk/provider": "npm:2.0.0"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.9"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.10"
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4
|
||||
checksum: 10c0/7134501a2d315ec13605558aa24d7f5662885fe8b0491a634abefeb0c5c88517149677d1beff0c8abeec78a6dcd14573a2f57d96fa54a1d63d03820ac7ff827a
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
checksum: 10c0/4cf6b3bc71024797d1b2e37b57fb746f7387f9a7c1da530fd040aad1a840603a1a86fb7df7e428c723eba9b1547f89063d68f84e6e08444d2d4f152dee321dc3
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
@ -2420,14 +2408,14 @@ __metadata:
|
||||
version: 0.0.0-use.local
|
||||
resolution: "@cherrystudio/ai-core@workspace:packages/aiCore"
|
||||
dependencies:
|
||||
"@ai-sdk/anthropic": "npm:^2.0.17"
|
||||
"@ai-sdk/azure": "npm:^2.0.30"
|
||||
"@ai-sdk/deepseek": "npm:^1.0.17"
|
||||
"@ai-sdk/openai": "npm:^2.0.30"
|
||||
"@ai-sdk/openai-compatible": "npm:^1.0.17"
|
||||
"@ai-sdk/anthropic": "npm:^2.0.22"
|
||||
"@ai-sdk/azure": "npm:^2.0.42"
|
||||
"@ai-sdk/deepseek": "npm:^1.0.20"
|
||||
"@ai-sdk/openai": "npm:^2.0.42"
|
||||
"@ai-sdk/openai-compatible": "npm:^1.0.19"
|
||||
"@ai-sdk/provider": "npm:^2.0.0"
|
||||
"@ai-sdk/provider-utils": "npm:^3.0.9"
|
||||
"@ai-sdk/xai": "npm:^2.0.18"
|
||||
"@ai-sdk/provider-utils": "npm:^3.0.10"
|
||||
"@ai-sdk/xai": "npm:^2.0.23"
|
||||
tsdown: "npm:^0.12.9"
|
||||
typescript: "npm:^5.0.0"
|
||||
vitest: "npm:^3.2.4"
|
||||
@ -15077,10 +15065,10 @@ __metadata:
|
||||
"@agentic/exa": "npm:^7.3.3"
|
||||
"@agentic/searxng": "npm:^7.3.3"
|
||||
"@agentic/tavily": "npm:^7.3.3"
|
||||
"@ai-sdk/amazon-bedrock": "npm:^3.0.21"
|
||||
"@ai-sdk/google-vertex": "npm:^3.0.27"
|
||||
"@ai-sdk/mistral": "npm:^2.0.14"
|
||||
"@ai-sdk/perplexity": "npm:^2.0.9"
|
||||
"@ai-sdk/amazon-bedrock": "npm:^3.0.29"
|
||||
"@ai-sdk/google-vertex": "npm:^3.0.33"
|
||||
"@ai-sdk/mistral": "npm:^2.0.17"
|
||||
"@ai-sdk/perplexity": "npm:^2.0.11"
|
||||
"@ant-design/v5-patch-for-react-19": "npm:^1.0.3"
|
||||
"@anthropic-ai/sdk": "npm:^0.41.0"
|
||||
"@anthropic-ai/vertex-sdk": "patch:@anthropic-ai/vertex-sdk@npm%3A0.11.4#~/.yarn/patches/@anthropic-ai-vertex-sdk-npm-0.11.4-c19cb41edb.patch"
|
||||
@ -15199,7 +15187,7 @@ __metadata:
|
||||
"@viz-js/lang-dot": "npm:^1.0.5"
|
||||
"@viz-js/viz": "npm:^3.14.0"
|
||||
"@xyflow/react": "npm:^12.4.4"
|
||||
ai: "npm:^5.0.44"
|
||||
ai: "npm:^5.0.59"
|
||||
antd: "patch:antd@npm%3A5.27.0#~/.yarn/patches/antd-npm-5.27.0-aa91c36546.patch"
|
||||
archiver: "npm:^7.0.1"
|
||||
async-mutex: "npm:^0.5.0"
|
||||
@ -15224,7 +15212,7 @@ __metadata:
|
||||
dotenv-cli: "npm:^7.4.2"
|
||||
drizzle-kit: "npm:^0.31.4"
|
||||
drizzle-orm: "npm:^0.44.2"
|
||||
electron: "npm:37.4.0"
|
||||
electron: "npm:37.6.0"
|
||||
electron-builder: "npm:26.0.15"
|
||||
electron-devtools-installer: "npm:^3.2.0"
|
||||
electron-store: "npm:^8.2.0"
|
||||
@ -15460,17 +15448,17 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"ai@npm:^5.0.44":
|
||||
version: 5.0.44
|
||||
resolution: "ai@npm:5.0.44"
|
||||
"ai@npm:^5.0.59":
|
||||
version: 5.0.59
|
||||
resolution: "ai@npm:5.0.59"
|
||||
dependencies:
|
||||
"@ai-sdk/gateway": "npm:1.0.23"
|
||||
"@ai-sdk/gateway": "npm:1.0.32"
|
||||
"@ai-sdk/provider": "npm:2.0.0"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.9"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.10"
|
||||
"@opentelemetry/api": "npm:1.9.0"
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4
|
||||
checksum: 10c0/528c7e165f75715194204051ce0aa341d8dca7d5536c2abcf3df83ccda7399ed5d91deaa45a81340f93d2461b1c2fc5f740f7804dfd396927c71b0667403569b
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
checksum: 10c0/daa956e753b93fbc30afbfba5be2ebb73e3c280dae3064e13949f04d5a22c0f4ea5698cc87e24a23ed6585d9cf7febee61b915292dbbd4286dc40c449cf2b845
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
@ -18934,16 +18922,16 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"electron@npm:37.4.0":
|
||||
version: 37.4.0
|
||||
resolution: "electron@npm:37.4.0"
|
||||
"electron@npm:37.6.0":
|
||||
version: 37.6.0
|
||||
resolution: "electron@npm:37.6.0"
|
||||
dependencies:
|
||||
"@electron/get": "npm:^2.0.0"
|
||||
"@types/node": "npm:^22.7.7"
|
||||
extract-zip: "npm:^2.0.1"
|
||||
bin:
|
||||
electron: cli.js
|
||||
checksum: 10c0/92a0c41190e234d302bc612af6cce9af08cd07f6699c1ff21a9365297e73dc9d88c6c4c25ddabf352447e3e555878d2ab0f2f31a14e210dda6de74d2787ff323
|
||||
checksum: 10c0/d67b7f0ff902f9184c2a7445507746343f8b39f3616d9d26128e7515e0184252cfc8ac97a3f1458f9ea9b4af6ab5b3208282014e8d91c0e1505ff21f5fa57ce6
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
|
||||
Loading…
Reference in New Issue
Block a user