mirror of
https://github.com/CherryHQ/cherry-studio.git
synced 2025-12-28 05:11:24 +08:00
Merge branch 'main' into v2
This commit is contained in:
commit
7419cadd80
@ -1,26 +0,0 @@
|
||||
diff --git a/dist/index.js b/dist/index.js
|
||||
index ff305b112779b718f21a636a27b1196125a332d9..cf32ff5086d4d9e56f8fe90c98724559083bafc3 100644
|
||||
--- a/dist/index.js
|
||||
+++ b/dist/index.js
|
||||
@@ -471,7 +471,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
|
||||
|
||||
// src/get-model-path.ts
|
||||
function getModelPath(modelId) {
|
||||
- return modelId.includes("/") ? modelId : `models/${modelId}`;
|
||||
+ return modelId.includes("models/") ? modelId : `models/${modelId}`;
|
||||
}
|
||||
|
||||
// src/google-generative-ai-options.ts
|
||||
diff --git a/dist/index.mjs b/dist/index.mjs
|
||||
index 57659290f1cec74878a385626ad75b2a4d5cd3fc..d04e5927ec3725b6ffdb80868bfa1b5a48849537 100644
|
||||
--- a/dist/index.mjs
|
||||
+++ b/dist/index.mjs
|
||||
@@ -477,7 +477,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
|
||||
|
||||
// src/get-model-path.ts
|
||||
function getModelPath(modelId) {
|
||||
- return modelId.includes("/") ? modelId : `models/${modelId}`;
|
||||
+ return modelId.includes("models/") ? modelId : `models/${modelId}`;
|
||||
}
|
||||
|
||||
// src/google-generative-ai-options.ts
|
||||
152
.yarn/patches/@ai-sdk-google-npm-2.0.36-6f3cc06026.patch
vendored
Normal file
152
.yarn/patches/@ai-sdk-google-npm-2.0.36-6f3cc06026.patch
vendored
Normal file
@ -0,0 +1,152 @@
|
||||
diff --git a/dist/index.js b/dist/index.js
|
||||
index c2ef089c42e13a8ee4a833899a415564130e5d79..75efa7baafb0f019fb44dd50dec1641eee8879e7 100644
|
||||
--- a/dist/index.js
|
||||
+++ b/dist/index.js
|
||||
@@ -471,7 +471,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
|
||||
|
||||
// src/get-model-path.ts
|
||||
function getModelPath(modelId) {
|
||||
- return modelId.includes("/") ? modelId : `models/${modelId}`;
|
||||
+ return modelId.includes("models/") ? modelId : `models/${modelId}`;
|
||||
}
|
||||
|
||||
// src/google-generative-ai-options.ts
|
||||
diff --git a/dist/index.mjs b/dist/index.mjs
|
||||
index d75c0cc13c41192408c1f3f2d29d76a7bffa6268..ada730b8cb97d9b7d4cb32883a1d1ff416404d9b 100644
|
||||
--- a/dist/index.mjs
|
||||
+++ b/dist/index.mjs
|
||||
@@ -477,7 +477,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
|
||||
|
||||
// src/get-model-path.ts
|
||||
function getModelPath(modelId) {
|
||||
- return modelId.includes("/") ? modelId : `models/${modelId}`;
|
||||
+ return modelId.includes("models/") ? modelId : `models/${modelId}`;
|
||||
}
|
||||
|
||||
// src/google-generative-ai-options.ts
|
||||
diff --git a/dist/internal/index.js b/dist/internal/index.js
|
||||
index 277cac8dc734bea2fb4f3e9a225986b402b24f48..bb704cd79e602eb8b0cee1889e42497d59ccdb7a 100644
|
||||
--- a/dist/internal/index.js
|
||||
+++ b/dist/internal/index.js
|
||||
@@ -432,7 +432,15 @@ function prepareTools({
|
||||
var _a;
|
||||
tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
|
||||
const toolWarnings = [];
|
||||
- const isGemini2 = modelId.includes("gemini-2");
|
||||
+ // These changes could be safely removed when @ai-sdk/google v3 released.
|
||||
+ const isLatest = (
|
||||
+ [
|
||||
+ 'gemini-flash-latest',
|
||||
+ 'gemini-flash-lite-latest',
|
||||
+ 'gemini-pro-latest',
|
||||
+ ]
|
||||
+ ).some(id => id === modelId);
|
||||
+ const isGemini2OrNewer = modelId.includes("gemini-2") || modelId.includes("gemini-3") || isLatest;
|
||||
const supportsDynamicRetrieval = modelId.includes("gemini-1.5-flash") && !modelId.includes("-8b");
|
||||
const supportsFileSearch = modelId.includes("gemini-2.5");
|
||||
if (tools == null) {
|
||||
@@ -458,7 +466,7 @@ function prepareTools({
|
||||
providerDefinedTools.forEach((tool) => {
|
||||
switch (tool.id) {
|
||||
case "google.google_search":
|
||||
- if (isGemini2) {
|
||||
+ if (isGemini2OrNewer) {
|
||||
googleTools2.push({ googleSearch: {} });
|
||||
} else if (supportsDynamicRetrieval) {
|
||||
googleTools2.push({
|
||||
@@ -474,7 +482,7 @@ function prepareTools({
|
||||
}
|
||||
break;
|
||||
case "google.url_context":
|
||||
- if (isGemini2) {
|
||||
+ if (isGemini2OrNewer) {
|
||||
googleTools2.push({ urlContext: {} });
|
||||
} else {
|
||||
toolWarnings.push({
|
||||
@@ -485,7 +493,7 @@ function prepareTools({
|
||||
}
|
||||
break;
|
||||
case "google.code_execution":
|
||||
- if (isGemini2) {
|
||||
+ if (isGemini2OrNewer) {
|
||||
googleTools2.push({ codeExecution: {} });
|
||||
} else {
|
||||
toolWarnings.push({
|
||||
@@ -507,7 +515,7 @@ function prepareTools({
|
||||
}
|
||||
break;
|
||||
case "google.vertex_rag_store":
|
||||
- if (isGemini2) {
|
||||
+ if (isGemini2OrNewer) {
|
||||
googleTools2.push({
|
||||
retrieval: {
|
||||
vertex_rag_store: {
|
||||
diff --git a/dist/internal/index.mjs b/dist/internal/index.mjs
|
||||
index 03b7cc591be9b58bcc2e775a96740d9f98862a10..347d2c12e1cee79f0f8bb258f3844fb0522a6485 100644
|
||||
--- a/dist/internal/index.mjs
|
||||
+++ b/dist/internal/index.mjs
|
||||
@@ -424,7 +424,15 @@ function prepareTools({
|
||||
var _a;
|
||||
tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
|
||||
const toolWarnings = [];
|
||||
- const isGemini2 = modelId.includes("gemini-2");
|
||||
+ // These changes could be safely removed when @ai-sdk/google v3 released.
|
||||
+ const isLatest = (
|
||||
+ [
|
||||
+ 'gemini-flash-latest',
|
||||
+ 'gemini-flash-lite-latest',
|
||||
+ 'gemini-pro-latest',
|
||||
+ ]
|
||||
+ ).some(id => id === modelId);
|
||||
+ const isGemini2OrNewer = modelId.includes("gemini-2") || modelId.includes("gemini-3") || isLatest;
|
||||
const supportsDynamicRetrieval = modelId.includes("gemini-1.5-flash") && !modelId.includes("-8b");
|
||||
const supportsFileSearch = modelId.includes("gemini-2.5");
|
||||
if (tools == null) {
|
||||
@@ -450,7 +458,7 @@ function prepareTools({
|
||||
providerDefinedTools.forEach((tool) => {
|
||||
switch (tool.id) {
|
||||
case "google.google_search":
|
||||
- if (isGemini2) {
|
||||
+ if (isGemini2OrNewer) {
|
||||
googleTools2.push({ googleSearch: {} });
|
||||
} else if (supportsDynamicRetrieval) {
|
||||
googleTools2.push({
|
||||
@@ -466,7 +474,7 @@ function prepareTools({
|
||||
}
|
||||
break;
|
||||
case "google.url_context":
|
||||
- if (isGemini2) {
|
||||
+ if (isGemini2OrNewer) {
|
||||
googleTools2.push({ urlContext: {} });
|
||||
} else {
|
||||
toolWarnings.push({
|
||||
@@ -477,7 +485,7 @@ function prepareTools({
|
||||
}
|
||||
break;
|
||||
case "google.code_execution":
|
||||
- if (isGemini2) {
|
||||
+ if (isGemini2OrNewer) {
|
||||
googleTools2.push({ codeExecution: {} });
|
||||
} else {
|
||||
toolWarnings.push({
|
||||
@@ -499,7 +507,7 @@ function prepareTools({
|
||||
}
|
||||
break;
|
||||
case "google.vertex_rag_store":
|
||||
- if (isGemini2) {
|
||||
+ if (isGemini2OrNewer) {
|
||||
googleTools2.push({
|
||||
retrieval: {
|
||||
vertex_rag_store: {
|
||||
@@ -1434,9 +1442,7 @@ var googleTools = {
|
||||
vertexRagStore
|
||||
};
|
||||
export {
|
||||
- GoogleGenerativeAILanguageModel,
|
||||
getGroundingMetadataSchema,
|
||||
- getUrlContextMetadataSchema,
|
||||
- googleTools
|
||||
+ getUrlContextMetadataSchema, GoogleGenerativeAILanguageModel, googleTools
|
||||
};
|
||||
//# sourceMappingURL=index.mjs.map
|
||||
\ No newline at end of file
|
||||
17
package.json
17
package.json
@ -77,9 +77,10 @@
|
||||
"prepare": "git config blame.ignoreRevsFile .git-blame-ignore-revs && husky",
|
||||
"claude": "dotenv -e .env -- claude",
|
||||
"migrations:generate": "drizzle-kit generate --config ./migrations/sqlite-drizzle.config.ts",
|
||||
"release:aicore:alpha": "yarn workspace @cherrystudio/ai-core version prerelease --immediate && yarn workspace @cherrystudio/ai-core npm publish --tag alpha --access public",
|
||||
"release:aicore:beta": "yarn workspace @cherrystudio/ai-core version prerelease --immediate && yarn workspace @cherrystudio/ai-core npm publish --tag beta --access public",
|
||||
"release:aicore": "yarn workspace @cherrystudio/ai-core version patch --immediate && yarn workspace @cherrystudio/ai-core npm publish --access public"
|
||||
"release:aicore:alpha": "yarn workspace @cherrystudio/ai-core version prerelease --preid alpha --immediate && yarn workspace @cherrystudio/ai-core build && yarn workspace @cherrystudio/ai-core npm publish --tag alpha --access public",
|
||||
"release:aicore:beta": "yarn workspace @cherrystudio/ai-core version prerelease --preid beta --immediate && yarn workspace @cherrystudio/ai-core build && yarn workspace @cherrystudio/ai-core npm publish --tag beta --access public",
|
||||
"release:aicore": "yarn workspace @cherrystudio/ai-core version patch --immediate && yarn workspace @cherrystudio/ai-core build && yarn workspace @cherrystudio/ai-core npm publish --access public",
|
||||
"release:ai-sdk-provider": "yarn workspace @cherrystudio/ai-sdk-provider version patch --immediate && yarn workspace @cherrystudio/ai-sdk-provider build && yarn workspace @cherrystudio/ai-sdk-provider npm publish --access public"
|
||||
},
|
||||
"dependencies": {
|
||||
"@anthropic-ai/claude-agent-sdk": "patch:@anthropic-ai/claude-agent-sdk@npm%3A0.1.30#~/.yarn/patches/@anthropic-ai-claude-agent-sdk-npm-0.1.30-b50a299674.patch",
|
||||
@ -88,6 +89,7 @@
|
||||
"@napi-rs/system-ocr": "patch:@napi-rs/system-ocr@npm%3A1.0.2#~/.yarn/patches/@napi-rs-system-ocr-npm-1.0.2-59e7a78e8b.patch",
|
||||
"@paymoapp/electron-shutdown-handler": "^1.1.2",
|
||||
"@strongtz/win32-arm64-msvc": "^0.4.7",
|
||||
"emoji-picker-element-data": "^1",
|
||||
"express": "^5.1.0",
|
||||
"font-list": "^2.0.0",
|
||||
"graceful-fs": "^4.2.11",
|
||||
@ -115,10 +117,11 @@
|
||||
"@ai-sdk/anthropic": "^2.0.44",
|
||||
"@ai-sdk/cerebras": "^1.0.31",
|
||||
"@ai-sdk/gateway": "^2.0.9",
|
||||
"@ai-sdk/google": "^2.0.32",
|
||||
"@ai-sdk/google-vertex": "^3.0.62",
|
||||
"@ai-sdk/google": "patch:@ai-sdk/google@npm%3A2.0.36#~/.yarn/patches/@ai-sdk-google-npm-2.0.36-6f3cc06026.patch",
|
||||
"@ai-sdk/google-vertex": "^3.0.68",
|
||||
"@ai-sdk/huggingface": "patch:@ai-sdk/huggingface@npm%3A0.0.8#~/.yarn/patches/@ai-sdk-huggingface-npm-0.0.8-d4d0aaac93.patch",
|
||||
"@ai-sdk/mistral": "^2.0.23",
|
||||
"@ai-sdk/openai": "patch:@ai-sdk/openai@npm%3A2.0.64#~/.yarn/patches/@ai-sdk-openai-npm-2.0.64-48f99f5bf3.patch",
|
||||
"@ai-sdk/perplexity": "^2.0.17",
|
||||
"@ant-design/v5-patch-for-react-19": "^1.0.3",
|
||||
"@anthropic-ai/sdk": "^0.41.0",
|
||||
@ -127,7 +130,7 @@
|
||||
"@aws-sdk/client-bedrock-runtime": "^3.910.0",
|
||||
"@aws-sdk/client-s3": "^3.910.0",
|
||||
"@biomejs/biome": "2.2.4",
|
||||
"@cherrystudio/ai-core": "workspace:^1.0.0-alpha.18",
|
||||
"@cherrystudio/ai-core": "workspace:^1.0.9",
|
||||
"@cherrystudio/embedjs": "^0.1.31",
|
||||
"@cherrystudio/embedjs-libsql": "^0.1.31",
|
||||
"@cherrystudio/embedjs-loader-csv": "^0.1.31",
|
||||
@ -415,7 +418,7 @@
|
||||
"@langchain/openai@npm:>=0.2.0 <0.7.0": "patch:@langchain/openai@npm%3A1.0.0#~/.yarn/patches/@langchain-openai-npm-1.0.0-474d0ad9d4.patch",
|
||||
"@ai-sdk/openai@npm:2.0.64": "patch:@ai-sdk/openai@npm%3A2.0.64#~/.yarn/patches/@ai-sdk-openai-npm-2.0.64-48f99f5bf3.patch",
|
||||
"@ai-sdk/openai@npm:^2.0.42": "patch:@ai-sdk/openai@npm%3A2.0.64#~/.yarn/patches/@ai-sdk-openai-npm-2.0.64-48f99f5bf3.patch",
|
||||
"@ai-sdk/google@npm:2.0.31": "patch:@ai-sdk/google@npm%3A2.0.31#~/.yarn/patches/@ai-sdk-google-npm-2.0.31-b0de047210.patch"
|
||||
"@ai-sdk/google@npm:2.0.36": "patch:@ai-sdk/google@npm%3A2.0.36#~/.yarn/patches/@ai-sdk-google-npm-2.0.36-6f3cc06026.patch"
|
||||
},
|
||||
"packageManager": "yarn@4.9.1",
|
||||
"lint-staged": {
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@cherrystudio/ai-sdk-provider",
|
||||
"version": "0.1.0",
|
||||
"version": "0.1.2",
|
||||
"description": "Cherry Studio AI SDK provider bundle with CherryIN routing.",
|
||||
"keywords": [
|
||||
"ai-sdk",
|
||||
|
||||
@ -71,7 +71,7 @@ Cherry Studio AI Core 是一个基于 Vercel AI SDK 的统一 AI Provider 接口
|
||||
## 安装
|
||||
|
||||
```bash
|
||||
npm install @cherrystudio/ai-core ai
|
||||
npm install @cherrystudio/ai-core ai @ai-sdk/google @ai-sdk/openai
|
||||
```
|
||||
|
||||
### React Native
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@cherrystudio/ai-core",
|
||||
"version": "1.0.1",
|
||||
"version": "1.0.9",
|
||||
"description": "Cherry Studio AI Core - Unified AI Provider Interface Based on Vercel AI SDK",
|
||||
"main": "dist/index.js",
|
||||
"module": "dist/index.mjs",
|
||||
@ -33,19 +33,19 @@
|
||||
},
|
||||
"homepage": "https://github.com/CherryHQ/cherry-studio#readme",
|
||||
"peerDependencies": {
|
||||
"@ai-sdk/google": "^2.0.36",
|
||||
"@ai-sdk/openai": "^2.0.64",
|
||||
"@cherrystudio/ai-sdk-provider": "^0.1.2",
|
||||
"ai": "^5.0.26"
|
||||
},
|
||||
"dependencies": {
|
||||
"@ai-sdk/anthropic": "^2.0.43",
|
||||
"@ai-sdk/azure": "^2.0.66",
|
||||
"@ai-sdk/deepseek": "^1.0.27",
|
||||
"@ai-sdk/google": "patch:@ai-sdk/google@npm%3A2.0.31#~/.yarn/patches/@ai-sdk-google-npm-2.0.31-b0de047210.patch",
|
||||
"@ai-sdk/openai": "patch:@ai-sdk/openai@npm%3A2.0.64#~/.yarn/patches/@ai-sdk-openai-npm-2.0.64-48f99f5bf3.patch",
|
||||
"@ai-sdk/openai-compatible": "^1.0.26",
|
||||
"@ai-sdk/provider": "^2.0.0",
|
||||
"@ai-sdk/provider-utils": "^3.0.16",
|
||||
"@ai-sdk/xai": "^2.0.31",
|
||||
"@cherrystudio/ai-sdk-provider": "workspace:*",
|
||||
"zod": "^4.1.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@ -4,12 +4,7 @@
|
||||
*/
|
||||
export const BUILT_IN_PLUGIN_PREFIX = 'built-in:'
|
||||
|
||||
export { googleToolsPlugin } from './googleToolsPlugin'
|
||||
export { createLoggingPlugin } from './logging'
|
||||
export { createPromptToolUsePlugin } from './toolUsePlugin/promptToolUsePlugin'
|
||||
export type {
|
||||
PromptToolUseConfig,
|
||||
ToolUseRequestContext,
|
||||
ToolUseResult
|
||||
} from './toolUsePlugin/type'
|
||||
export { webSearchPlugin, type WebSearchPluginConfig } from './webSearchPlugin'
|
||||
export * from './googleToolsPlugin'
|
||||
export * from './toolUsePlugin/promptToolUsePlugin'
|
||||
export * from './toolUsePlugin/type'
|
||||
export * from './webSearchPlugin'
|
||||
|
||||
@ -32,7 +32,7 @@ export const webSearchPlugin = (config: WebSearchPluginConfig = DEFAULT_WEB_SEAR
|
||||
})
|
||||
|
||||
// 导出类型定义供开发者使用
|
||||
export type { WebSearchPluginConfig, WebSearchToolOutputSchema } from './helper'
|
||||
export * from './helper'
|
||||
|
||||
// 默认导出
|
||||
export default webSearchPlugin
|
||||
|
||||
@ -44,7 +44,7 @@ export {
|
||||
// ==================== 基础数据和类型 ====================
|
||||
|
||||
// 基础Provider数据源
|
||||
export { baseProviderIds, baseProviders } from './schemas'
|
||||
export { baseProviderIds, baseProviders, isBaseProvider } from './schemas'
|
||||
|
||||
// 类型定义和Schema
|
||||
export type {
|
||||
|
||||
@ -7,7 +7,6 @@ import { createAzure } from '@ai-sdk/azure'
|
||||
import { type AzureOpenAIProviderSettings } from '@ai-sdk/azure'
|
||||
import { createDeepSeek } from '@ai-sdk/deepseek'
|
||||
import { createGoogleGenerativeAI } from '@ai-sdk/google'
|
||||
import { createHuggingFace } from '@ai-sdk/huggingface'
|
||||
import { createOpenAI, type OpenAIProviderSettings } from '@ai-sdk/openai'
|
||||
import { createOpenAICompatible } from '@ai-sdk/openai-compatible'
|
||||
import type { LanguageModelV2 } from '@ai-sdk/provider'
|
||||
@ -33,8 +32,7 @@ export const baseProviderIds = [
|
||||
'deepseek',
|
||||
'openrouter',
|
||||
'cherryin',
|
||||
'cherryin-chat',
|
||||
'huggingface'
|
||||
'cherryin-chat'
|
||||
] as const
|
||||
|
||||
/**
|
||||
@ -158,12 +156,6 @@ export const baseProviders = [
|
||||
})
|
||||
},
|
||||
supportsImageGeneration: true
|
||||
},
|
||||
{
|
||||
id: 'huggingface',
|
||||
name: 'HuggingFace',
|
||||
creator: createHuggingFace,
|
||||
supportsImageGeneration: true
|
||||
}
|
||||
] as const satisfies BaseProvider[]
|
||||
|
||||
|
||||
@ -41,6 +41,7 @@ export enum IpcChannel {
|
||||
App_SetFullScreen = 'app:set-full-screen',
|
||||
App_IsFullScreen = 'app:is-full-screen',
|
||||
App_GetSystemFonts = 'app:get-system-fonts',
|
||||
APP_CrashRenderProcess = 'app:crash-render-process',
|
||||
|
||||
App_MacIsProcessTrusted = 'app:mac-is-process-trusted',
|
||||
App_MacRequestProcessTrust = 'app:mac-request-process-trust',
|
||||
|
||||
@ -10,7 +10,7 @@ import { electronApp, optimizer } from '@electron-toolkit/utils'
|
||||
import { dbService } from '@data/db/DbService'
|
||||
import { preferenceService } from '@data/PreferenceService'
|
||||
import { replaceDevtoolsFont } from '@main/utils/windowUtil'
|
||||
import { app, dialog } from 'electron'
|
||||
import { app, dialog, crashReporter } from 'electron'
|
||||
import installExtension, { REACT_DEVELOPER_TOOLS, REDUX_DEVTOOLS } from 'electron-devtools-installer'
|
||||
import { isDev, isLinux, isWin } from './constant'
|
||||
|
||||
@ -47,6 +47,14 @@ import { initWebviewHotkeys } from './services/WebviewService'
|
||||
|
||||
const logger = loggerService.withContext('MainEntry')
|
||||
|
||||
// enable local crash reports
|
||||
crashReporter.start({
|
||||
companyName: 'CherryHQ',
|
||||
productName: 'CherryStudio',
|
||||
submitURL: '',
|
||||
uploadToServer: false
|
||||
})
|
||||
|
||||
/**
|
||||
* Disable hardware acceleration if setting is enabled
|
||||
*/
|
||||
|
||||
@ -1040,6 +1040,10 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
ipcMain.handle(IpcChannel.WebSocket_SendFile, WebSocketService.sendFile)
|
||||
ipcMain.handle(IpcChannel.WebSocket_GetAllCandidates, WebSocketService.getAllCandidates)
|
||||
|
||||
ipcMain.handle(IpcChannel.APP_CrashRenderProcess, () => {
|
||||
mainWindow.webContents.forcefullyCrashRenderer()
|
||||
})
|
||||
|
||||
// Preference handlers
|
||||
PreferenceService.registerIpcHandler()
|
||||
}
|
||||
|
||||
@ -25,7 +25,7 @@ describe('stripLocalCommandTags', () => {
|
||||
|
||||
describe('Claude → AiSDK transform', () => {
|
||||
it('handles tool call streaming lifecycle', () => {
|
||||
const state = new ClaudeStreamState()
|
||||
const state = new ClaudeStreamState({ agentSessionId: baseStreamMetadata.session_id })
|
||||
const parts: ReturnType<typeof transformSDKMessageToStreamParts>[number][] = []
|
||||
|
||||
const messages: SDKMessage[] = [
|
||||
@ -182,14 +182,14 @@ describe('Claude → AiSDK transform', () => {
|
||||
(typeof parts)[number],
|
||||
{ type: 'tool-result' }
|
||||
>
|
||||
expect(toolResult.toolCallId).toBe('tool-1')
|
||||
expect(toolResult.toolCallId).toBe('session-123:tool-1')
|
||||
expect(toolResult.toolName).toBe('Bash')
|
||||
expect(toolResult.input).toEqual({ command: 'ls' })
|
||||
expect(toolResult.output).toBe('ok')
|
||||
})
|
||||
|
||||
it('handles streaming text completion', () => {
|
||||
const state = new ClaudeStreamState()
|
||||
const state = new ClaudeStreamState({ agentSessionId: baseStreamMetadata.session_id })
|
||||
const parts: ReturnType<typeof transformSDKMessageToStreamParts>[number][] = []
|
||||
|
||||
const messages: SDKMessage[] = [
|
||||
|
||||
@ -10,8 +10,21 @@
|
||||
* Every Claude turn gets its own instance. `resetStep` should be invoked once the finish event has
|
||||
* been emitted to avoid leaking state into the next turn.
|
||||
*/
|
||||
import { loggerService } from '@logger'
|
||||
import type { FinishReason, LanguageModelUsage, ProviderMetadata } from 'ai'
|
||||
|
||||
/**
|
||||
* Builds a namespaced tool call ID by combining session ID with raw tool call ID.
|
||||
* This ensures tool calls from different sessions don't conflict even if they have
|
||||
* the same raw ID from the SDK.
|
||||
*
|
||||
* @param sessionId - The agent session ID
|
||||
* @param rawToolCallId - The raw tool call ID from SDK (e.g., "WebFetch_0")
|
||||
*/
|
||||
export function buildNamespacedToolCallId(sessionId: string, rawToolCallId: string): string {
|
||||
return `${sessionId}:${rawToolCallId}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Shared fields for every block that Claude can stream (text, reasoning, tool).
|
||||
*/
|
||||
@ -34,6 +47,7 @@ type ReasoningBlockState = BaseBlockState & {
|
||||
type ToolBlockState = BaseBlockState & {
|
||||
kind: 'tool'
|
||||
toolCallId: string
|
||||
rawToolCallId: string
|
||||
toolName: string
|
||||
inputBuffer: string
|
||||
providerMetadata?: ProviderMetadata
|
||||
@ -48,12 +62,17 @@ type PendingUsageState = {
|
||||
}
|
||||
|
||||
type PendingToolCall = {
|
||||
rawToolCallId: string
|
||||
toolCallId: string
|
||||
toolName: string
|
||||
input: unknown
|
||||
providerMetadata?: ProviderMetadata
|
||||
}
|
||||
|
||||
type ClaudeStreamStateOptions = {
|
||||
agentSessionId: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Tracks the lifecycle of Claude streaming blocks (text, thinking, tool calls)
|
||||
* across individual websocket events. The transformer relies on this class to
|
||||
@ -61,12 +80,20 @@ type PendingToolCall = {
|
||||
* usage/finish metadata once Anthropic closes a message.
|
||||
*/
|
||||
export class ClaudeStreamState {
|
||||
private logger
|
||||
private readonly agentSessionId: string
|
||||
private blocksByIndex = new Map<number, BlockState>()
|
||||
private toolIndexById = new Map<string, number>()
|
||||
private toolIndexByNamespacedId = new Map<string, number>()
|
||||
private pendingUsage: PendingUsageState = {}
|
||||
private pendingToolCalls = new Map<string, PendingToolCall>()
|
||||
private stepActive = false
|
||||
|
||||
constructor(options: ClaudeStreamStateOptions) {
|
||||
this.logger = loggerService.withContext('ClaudeStreamState')
|
||||
this.agentSessionId = options.agentSessionId
|
||||
this.logger.silly('ClaudeStreamState', options)
|
||||
}
|
||||
|
||||
/** Marks the beginning of a new AiSDK step. */
|
||||
beginStep(): void {
|
||||
this.stepActive = true
|
||||
@ -104,19 +131,21 @@ export class ClaudeStreamState {
|
||||
/** Caches tool metadata so subsequent input deltas and results can find it. */
|
||||
openToolBlock(
|
||||
index: number,
|
||||
params: { toolCallId: string; toolName: string; providerMetadata?: ProviderMetadata }
|
||||
params: { rawToolCallId: string; toolName: string; providerMetadata?: ProviderMetadata }
|
||||
): ToolBlockState {
|
||||
const toolCallId = buildNamespacedToolCallId(this.agentSessionId, params.rawToolCallId)
|
||||
const block: ToolBlockState = {
|
||||
kind: 'tool',
|
||||
id: params.toolCallId,
|
||||
id: toolCallId,
|
||||
index,
|
||||
toolCallId: params.toolCallId,
|
||||
toolCallId,
|
||||
rawToolCallId: params.rawToolCallId,
|
||||
toolName: params.toolName,
|
||||
inputBuffer: '',
|
||||
providerMetadata: params.providerMetadata
|
||||
}
|
||||
this.blocksByIndex.set(index, block)
|
||||
this.toolIndexById.set(params.toolCallId, index)
|
||||
this.toolIndexByNamespacedId.set(toolCallId, index)
|
||||
return block
|
||||
}
|
||||
|
||||
@ -125,13 +154,17 @@ export class ClaudeStreamState {
|
||||
}
|
||||
|
||||
getToolBlockById(toolCallId: string): ToolBlockState | undefined {
|
||||
const index = this.toolIndexById.get(toolCallId)
|
||||
const index = this.toolIndexByNamespacedId.get(toolCallId)
|
||||
if (index === undefined) return undefined
|
||||
const block = this.blocksByIndex.get(index)
|
||||
if (!block || block.kind !== 'tool') return undefined
|
||||
return block
|
||||
}
|
||||
|
||||
getToolBlockByRawId(rawToolCallId: string): ToolBlockState | undefined {
|
||||
return this.getToolBlockById(buildNamespacedToolCallId(this.agentSessionId, rawToolCallId))
|
||||
}
|
||||
|
||||
/** Appends streamed text to a text block, returning the updated state when present. */
|
||||
appendTextDelta(index: number, text: string): TextBlockState | undefined {
|
||||
const block = this.blocksByIndex.get(index)
|
||||
@ -158,10 +191,12 @@ export class ClaudeStreamState {
|
||||
|
||||
/** Records a tool call to be consumed once its result arrives from the user. */
|
||||
registerToolCall(
|
||||
toolCallId: string,
|
||||
rawToolCallId: string,
|
||||
payload: { toolName: string; input: unknown; providerMetadata?: ProviderMetadata }
|
||||
): void {
|
||||
this.pendingToolCalls.set(toolCallId, {
|
||||
const toolCallId = buildNamespacedToolCallId(this.agentSessionId, rawToolCallId)
|
||||
this.pendingToolCalls.set(rawToolCallId, {
|
||||
rawToolCallId,
|
||||
toolCallId,
|
||||
toolName: payload.toolName,
|
||||
input: payload.input,
|
||||
@ -170,10 +205,10 @@ export class ClaudeStreamState {
|
||||
}
|
||||
|
||||
/** Retrieves and clears the buffered tool call metadata for the given id. */
|
||||
consumePendingToolCall(toolCallId: string): PendingToolCall | undefined {
|
||||
const entry = this.pendingToolCalls.get(toolCallId)
|
||||
consumePendingToolCall(rawToolCallId: string): PendingToolCall | undefined {
|
||||
const entry = this.pendingToolCalls.get(rawToolCallId)
|
||||
if (entry) {
|
||||
this.pendingToolCalls.delete(toolCallId)
|
||||
this.pendingToolCalls.delete(rawToolCallId)
|
||||
}
|
||||
return entry
|
||||
}
|
||||
@ -183,12 +218,12 @@ export class ClaudeStreamState {
|
||||
* completion so that downstream tool results can reference the original call.
|
||||
*/
|
||||
completeToolBlock(toolCallId: string, input: unknown, providerMetadata?: ProviderMetadata): void {
|
||||
const block = this.getToolBlockByRawId(toolCallId)
|
||||
this.registerToolCall(toolCallId, {
|
||||
toolName: this.getToolBlockById(toolCallId)?.toolName ?? 'unknown',
|
||||
toolName: block?.toolName ?? 'unknown',
|
||||
input,
|
||||
providerMetadata
|
||||
})
|
||||
const block = this.getToolBlockById(toolCallId)
|
||||
if (block) {
|
||||
block.resolvedInput = input
|
||||
}
|
||||
@ -200,7 +235,7 @@ export class ClaudeStreamState {
|
||||
if (!block) return undefined
|
||||
this.blocksByIndex.delete(index)
|
||||
if (block.kind === 'tool') {
|
||||
this.toolIndexById.delete(block.toolCallId)
|
||||
this.toolIndexByNamespacedId.delete(block.toolCallId)
|
||||
}
|
||||
return block
|
||||
}
|
||||
@ -227,7 +262,7 @@ export class ClaudeStreamState {
|
||||
/** Drops cached block metadata for the currently active message. */
|
||||
resetBlocks(): void {
|
||||
this.blocksByIndex.clear()
|
||||
this.toolIndexById.clear()
|
||||
this.toolIndexByNamespacedId.clear()
|
||||
}
|
||||
|
||||
/** Resets the entire step lifecycle after emitting a terminal frame. */
|
||||
@ -236,6 +271,10 @@ export class ClaudeStreamState {
|
||||
this.resetPendingUsage()
|
||||
this.stepActive = false
|
||||
}
|
||||
|
||||
getNamespacedToolCallId(rawToolCallId: string): string {
|
||||
return buildNamespacedToolCallId(this.agentSessionId, rawToolCallId)
|
||||
}
|
||||
}
|
||||
|
||||
export type { PendingToolCall }
|
||||
|
||||
@ -13,6 +13,7 @@ import { app } from 'electron'
|
||||
import type { GetAgentSessionResponse } from '../..'
|
||||
import type { AgentServiceInterface, AgentStream, AgentStreamEvent } from '../../interfaces/AgentStreamInterface'
|
||||
import { sessionService } from '../SessionService'
|
||||
import { buildNamespacedToolCallId } from './claude-stream-state'
|
||||
import { promptForToolApproval } from './tool-permissions'
|
||||
import { ClaudeStreamState, transformSDKMessageToStreamParts } from './transform'
|
||||
|
||||
@ -150,7 +151,10 @@ class ClaudeCodeService implements AgentServiceInterface {
|
||||
return { behavior: 'allow', updatedInput: input }
|
||||
}
|
||||
|
||||
return promptForToolApproval(toolName, input, options)
|
||||
return promptForToolApproval(toolName, input, {
|
||||
...options,
|
||||
toolCallId: buildNamespacedToolCallId(session.id, options.toolUseID)
|
||||
})
|
||||
}
|
||||
|
||||
// Build SDK options from parameters
|
||||
@ -346,7 +350,7 @@ class ClaudeCodeService implements AgentServiceInterface {
|
||||
const jsonOutput: SDKMessage[] = []
|
||||
let hasCompleted = false
|
||||
const startTime = Date.now()
|
||||
const streamState = new ClaudeStreamState()
|
||||
const streamState = new ClaudeStreamState({ agentSessionId: sessionId })
|
||||
|
||||
try {
|
||||
for await (const message of query({ prompt: promptStream, options })) {
|
||||
|
||||
@ -37,6 +37,7 @@ type RendererPermissionRequestPayload = {
|
||||
requestId: string
|
||||
toolName: string
|
||||
toolId: string
|
||||
toolCallId: string
|
||||
description?: string
|
||||
requiresPermissions: boolean
|
||||
input: Record<string, unknown>
|
||||
@ -206,10 +207,19 @@ const ensureIpcHandlersRegistered = () => {
|
||||
})
|
||||
}
|
||||
|
||||
type PromptForToolApprovalOptions = {
|
||||
signal: AbortSignal
|
||||
suggestions?: PermissionUpdate[]
|
||||
|
||||
// NOTICE: This ID is namespaced with session ID, not the raw SDK tool call ID.
|
||||
// Format: `${sessionId}:${rawToolCallId}`, e.g., `session_123:WebFetch_0`
|
||||
toolCallId: string
|
||||
}
|
||||
|
||||
export async function promptForToolApproval(
|
||||
toolName: string,
|
||||
input: Record<string, unknown>,
|
||||
options?: { signal: AbortSignal; suggestions?: PermissionUpdate[] }
|
||||
options: PromptForToolApprovalOptions
|
||||
): Promise<PermissionResult> {
|
||||
if (shouldAutoApproveTools) {
|
||||
logger.debug('promptForToolApproval auto-approving tool for test', {
|
||||
@ -245,6 +255,7 @@ export async function promptForToolApproval(
|
||||
logger.info('Requesting user approval for tool usage', {
|
||||
requestId,
|
||||
toolName,
|
||||
toolCallId: options.toolCallId,
|
||||
description: toolMetadata?.description
|
||||
})
|
||||
|
||||
@ -252,6 +263,7 @@ export async function promptForToolApproval(
|
||||
requestId,
|
||||
toolName,
|
||||
toolId: toolMetadata?.id ?? toolName,
|
||||
toolCallId: options.toolCallId,
|
||||
description: toolMetadata?.description,
|
||||
requiresPermissions: toolMetadata?.requirePermissions ?? false,
|
||||
input: sanitizedInput,
|
||||
@ -266,6 +278,7 @@ export async function promptForToolApproval(
|
||||
logger.debug('Registering tool permission request', {
|
||||
requestId,
|
||||
toolName,
|
||||
toolCallId: options.toolCallId,
|
||||
requiresPermissions: requestPayload.requiresPermissions,
|
||||
timeoutMs: TOOL_APPROVAL_TIMEOUT_MS,
|
||||
suggestionCount: sanitizedSuggestions.length
|
||||
@ -273,7 +286,11 @@ export async function promptForToolApproval(
|
||||
|
||||
return new Promise<PermissionResult>((resolve) => {
|
||||
const timeout = setTimeout(() => {
|
||||
logger.info('User tool permission request timed out', { requestId, toolName })
|
||||
logger.info('User tool permission request timed out', {
|
||||
requestId,
|
||||
toolName,
|
||||
toolCallId: options.toolCallId
|
||||
})
|
||||
finalizeRequest(requestId, { behavior: 'deny', message: 'Timed out waiting for approval' }, 'timeout')
|
||||
}, TOOL_APPROVAL_TIMEOUT_MS)
|
||||
|
||||
@ -287,7 +304,11 @@ export async function promptForToolApproval(
|
||||
|
||||
if (options?.signal) {
|
||||
const abortListener = () => {
|
||||
logger.info('Tool permission request aborted before user responded', { requestId, toolName })
|
||||
logger.info('Tool permission request aborted before user responded', {
|
||||
requestId,
|
||||
toolName,
|
||||
toolCallId: options.toolCallId
|
||||
})
|
||||
finalizeRequest(requestId, defaultDenyUpdate, 'aborted')
|
||||
}
|
||||
|
||||
|
||||
@ -243,9 +243,10 @@ function handleAssistantToolUse(
|
||||
state: ClaudeStreamState,
|
||||
chunks: AgentStreamPart[]
|
||||
): void {
|
||||
const toolCallId = state.getNamespacedToolCallId(block.id)
|
||||
chunks.push({
|
||||
type: 'tool-call',
|
||||
toolCallId: block.id,
|
||||
toolCallId,
|
||||
toolName: block.name,
|
||||
input: block.input,
|
||||
providerExecuted: true,
|
||||
@ -331,10 +332,11 @@ function handleUserMessage(
|
||||
if (block.type === 'tool_result') {
|
||||
const toolResult = block as ToolResultContent
|
||||
const pendingCall = state.consumePendingToolCall(toolResult.tool_use_id)
|
||||
const toolCallId = pendingCall?.toolCallId ?? state.getNamespacedToolCallId(toolResult.tool_use_id)
|
||||
if (toolResult.is_error) {
|
||||
chunks.push({
|
||||
type: 'tool-error',
|
||||
toolCallId: toolResult.tool_use_id,
|
||||
toolCallId,
|
||||
toolName: pendingCall?.toolName ?? 'unknown',
|
||||
input: pendingCall?.input,
|
||||
error: toolResult.content,
|
||||
@ -343,7 +345,7 @@ function handleUserMessage(
|
||||
} else {
|
||||
chunks.push({
|
||||
type: 'tool-result',
|
||||
toolCallId: toolResult.tool_use_id,
|
||||
toolCallId,
|
||||
toolName: pendingCall?.toolName ?? 'unknown',
|
||||
input: pendingCall?.input,
|
||||
output: toolResult.content,
|
||||
@ -514,7 +516,7 @@ function handleContentBlockStart(
|
||||
}
|
||||
case 'tool_use': {
|
||||
const block = state.openToolBlock(index, {
|
||||
toolCallId: contentBlock.id,
|
||||
rawToolCallId: contentBlock.id,
|
||||
toolName: contentBlock.name,
|
||||
providerMetadata
|
||||
})
|
||||
|
||||
@ -116,6 +116,7 @@ const api = {
|
||||
setFullScreen: (value: boolean): Promise<void> => ipcRenderer.invoke(IpcChannel.App_SetFullScreen, value),
|
||||
isFullScreen: (): Promise<boolean> => ipcRenderer.invoke(IpcChannel.App_IsFullScreen),
|
||||
getSystemFonts: (): Promise<string[]> => ipcRenderer.invoke(IpcChannel.App_GetSystemFonts),
|
||||
mockCrashRenderProcess: () => ipcRenderer.invoke(IpcChannel.APP_CrashRenderProcess),
|
||||
mac: {
|
||||
isProcessTrusted: (): Promise<boolean> => ipcRenderer.invoke(IpcChannel.App_MacIsProcessTrusted),
|
||||
requestProcessTrust: (): Promise<boolean> => ipcRenderer.invoke(IpcChannel.App_MacRequestProcessTrust)
|
||||
|
||||
@ -99,9 +99,6 @@ export function buildProviderOptions(
|
||||
serviceTier: serviceTierSetting
|
||||
}
|
||||
break
|
||||
case 'huggingface':
|
||||
providerSpecificOptions = buildOpenAIProviderOptions(assistant, model, capabilities)
|
||||
break
|
||||
case 'anthropic':
|
||||
providerSpecificOptions = buildAnthropicProviderOptions(assistant, model, capabilities)
|
||||
break
|
||||
@ -144,6 +141,9 @@ export function buildProviderOptions(
|
||||
case 'bedrock':
|
||||
providerSpecificOptions = buildBedrockProviderOptions(assistant, model, capabilities)
|
||||
break
|
||||
case 'huggingface':
|
||||
providerSpecificOptions = buildOpenAIProviderOptions(assistant, model, capabilities)
|
||||
break
|
||||
default:
|
||||
// 对于其他 provider,使用通用的构建逻辑
|
||||
providerSpecificOptions = {
|
||||
@ -162,13 +162,17 @@ export function buildProviderOptions(
|
||||
...getCustomParameters(assistant)
|
||||
}
|
||||
|
||||
const rawProviderKey =
|
||||
let rawProviderKey =
|
||||
{
|
||||
'google-vertex': 'google',
|
||||
'google-vertex-anthropic': 'anthropic',
|
||||
'ai-gateway': 'gateway'
|
||||
}[rawProviderId] || rawProviderId
|
||||
|
||||
if (rawProviderKey === 'cherryin') {
|
||||
rawProviderKey = { gemini: 'google' }[actualProvider.type] || actualProvider.type
|
||||
}
|
||||
|
||||
// 返回 AI Core SDK 要求的格式:{ 'providerId': providerOptions }
|
||||
return {
|
||||
[rawProviderKey]: providerSpecificOptions
|
||||
|
||||
@ -1,35 +1,120 @@
|
||||
import 'emoji-picker-element'
|
||||
|
||||
import TwemojiCountryFlagsWoff2 from '@renderer/assets/fonts/country-flag-fonts/TwemojiCountryFlags.woff2?url'
|
||||
import { useTheme } from '@renderer/context/ThemeProvider'
|
||||
import type { LanguageVarious } from '@shared/data/preference/preferenceTypes'
|
||||
import { polyfillCountryFlagEmojis } from 'country-flag-emoji-polyfill'
|
||||
// i18n translations from emoji-picker-element
|
||||
import de from 'emoji-picker-element/i18n/de'
|
||||
import en from 'emoji-picker-element/i18n/en'
|
||||
import es from 'emoji-picker-element/i18n/es'
|
||||
import fr from 'emoji-picker-element/i18n/fr'
|
||||
import ja from 'emoji-picker-element/i18n/ja'
|
||||
import pt_PT from 'emoji-picker-element/i18n/pt_PT'
|
||||
import ru_RU from 'emoji-picker-element/i18n/ru_RU'
|
||||
import zh_CN from 'emoji-picker-element/i18n/zh_CN'
|
||||
import type Picker from 'emoji-picker-element/picker'
|
||||
import type { EmojiClickEvent, NativeEmoji } from 'emoji-picker-element/shared'
|
||||
// Emoji data from emoji-picker-element-data (local, no CDN)
|
||||
// Using CLDR format for full multi-language search support (28 languages)
|
||||
import dataDE from 'emoji-picker-element-data/de/cldr/data.json?url'
|
||||
import dataEN from 'emoji-picker-element-data/en/cldr/data.json?url'
|
||||
import dataES from 'emoji-picker-element-data/es/cldr/data.json?url'
|
||||
import dataFR from 'emoji-picker-element-data/fr/cldr/data.json?url'
|
||||
import dataJA from 'emoji-picker-element-data/ja/cldr/data.json?url'
|
||||
import dataPT from 'emoji-picker-element-data/pt/cldr/data.json?url'
|
||||
import dataRU from 'emoji-picker-element-data/ru/cldr/data.json?url'
|
||||
import dataZH from 'emoji-picker-element-data/zh/cldr/data.json?url'
|
||||
import dataZH_HANT from 'emoji-picker-element-data/zh-hant/cldr/data.json?url'
|
||||
import type { FC } from 'react'
|
||||
import { useEffect, useRef } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
||||
interface Props {
|
||||
onEmojiClick: (emoji: string) => void
|
||||
}
|
||||
|
||||
// Mapping from app locale to emoji-picker-element i18n
|
||||
const i18nMap: Record<LanguageVarious, typeof en> = {
|
||||
'en-US': en,
|
||||
'zh-CN': zh_CN,
|
||||
'zh-TW': zh_CN, // Closest available
|
||||
'de-DE': de,
|
||||
'el-GR': en, // No Greek available, fallback to English
|
||||
'es-ES': es,
|
||||
'fr-FR': fr,
|
||||
'ja-JP': ja,
|
||||
'pt-PT': pt_PT,
|
||||
'ru-RU': ru_RU
|
||||
}
|
||||
|
||||
// Mapping from app locale to emoji data URL
|
||||
// Using CLDR format provides native language search support for all locales
|
||||
const dataSourceMap: Record<LanguageVarious, string> = {
|
||||
'en-US': dataEN,
|
||||
'zh-CN': dataZH,
|
||||
'zh-TW': dataZH_HANT,
|
||||
'de-DE': dataDE,
|
||||
'el-GR': dataEN, // No Greek CLDR available, fallback to English
|
||||
'es-ES': dataES,
|
||||
'fr-FR': dataFR,
|
||||
'ja-JP': dataJA,
|
||||
'pt-PT': dataPT,
|
||||
'ru-RU': dataRU
|
||||
}
|
||||
|
||||
// Mapping from app locale to emoji-picker-element locale string
|
||||
// Must match the data source locale for proper IndexedDB caching
|
||||
const localeMap: Record<LanguageVarious, string> = {
|
||||
'en-US': 'en',
|
||||
'zh-CN': 'zh',
|
||||
'zh-TW': 'zh-hant',
|
||||
'de-DE': 'de',
|
||||
'el-GR': 'en',
|
||||
'es-ES': 'es',
|
||||
'fr-FR': 'fr',
|
||||
'ja-JP': 'ja',
|
||||
'pt-PT': 'pt',
|
||||
'ru-RU': 'ru'
|
||||
}
|
||||
|
||||
const EmojiPicker: FC<Props> = ({ onEmojiClick }) => {
|
||||
const { theme } = useTheme()
|
||||
const ref = useRef<HTMLDivElement>(null)
|
||||
const { i18n } = useTranslation()
|
||||
const ref = useRef<Picker>(null)
|
||||
const currentLocale = i18n.language as LanguageVarious
|
||||
|
||||
useEffect(() => {
|
||||
polyfillCountryFlagEmojis('Twemoji Mozilla', TwemojiCountryFlagsWoff2)
|
||||
}, [])
|
||||
|
||||
// Configure picker with i18n and dataSource
|
||||
useEffect(() => {
|
||||
const refValue = ref.current
|
||||
const picker = ref.current
|
||||
if (picker) {
|
||||
picker.i18n = i18nMap[currentLocale] || en
|
||||
picker.dataSource = dataSourceMap[currentLocale] || dataEN
|
||||
picker.locale = localeMap[currentLocale] || 'en'
|
||||
}
|
||||
}, [currentLocale])
|
||||
|
||||
if (refValue) {
|
||||
const handleEmojiClick = (event: any) => {
|
||||
useEffect(() => {
|
||||
const picker = ref.current
|
||||
|
||||
if (picker) {
|
||||
const handleEmojiClick = (event: EmojiClickEvent) => {
|
||||
event.stopPropagation()
|
||||
onEmojiClick(event.detail.unicode || event.detail.emoji.unicode)
|
||||
const { detail } = event
|
||||
// Use detail.unicode (processed with skin tone) or fallback to emoji's unicode for native emoji
|
||||
const unicode = detail.unicode || ('unicode' in detail.emoji ? (detail.emoji as NativeEmoji).unicode : '')
|
||||
onEmojiClick(unicode)
|
||||
}
|
||||
// 添加事件监听器
|
||||
refValue.addEventListener('emoji-click', handleEmojiClick)
|
||||
picker.addEventListener('emoji-click', handleEmojiClick)
|
||||
|
||||
// 清理事件监听器
|
||||
return () => {
|
||||
refValue.removeEventListener('emoji-click', handleEmojiClick)
|
||||
picker.removeEventListener('emoji-click', handleEmojiClick)
|
||||
}
|
||||
}
|
||||
return
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
import { loggerService } from '@logger'
|
||||
import ClaudeIcon from '@renderer/assets/images/models/claude.png'
|
||||
import { ErrorBoundary } from '@renderer/components/ErrorBoundary'
|
||||
import { TopView } from '@renderer/components/TopView'
|
||||
import { permissionModeCards } from '@renderer/config/agent'
|
||||
@ -9,7 +8,6 @@ import SelectAgentBaseModelButton from '@renderer/pages/home/components/SelectAg
|
||||
import type {
|
||||
AddAgentForm,
|
||||
AgentEntity,
|
||||
AgentType,
|
||||
ApiModel,
|
||||
BaseAgentForm,
|
||||
PermissionMode,
|
||||
@ -17,30 +15,22 @@ import type {
|
||||
UpdateAgentForm
|
||||
} from '@renderer/types'
|
||||
import { AgentConfigurationSchema, isAgentType } from '@renderer/types'
|
||||
import { Avatar, Button, Input, Modal, Select } from 'antd'
|
||||
import { Button, Input, Modal, Select } from 'antd'
|
||||
import { AlertTriangleIcon } from 'lucide-react'
|
||||
import type { ChangeEvent, FormEvent } from 'react'
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import styled from 'styled-components'
|
||||
|
||||
import type { BaseOption } from './shared'
|
||||
|
||||
const { TextArea } = Input
|
||||
|
||||
const logger = loggerService.withContext('AddAgentPopup')
|
||||
|
||||
interface AgentTypeOption extends BaseOption {
|
||||
type: 'type'
|
||||
key: AgentEntity['type']
|
||||
name: AgentEntity['name']
|
||||
}
|
||||
|
||||
type AgentWithTools = AgentEntity & { tools?: Tool[] }
|
||||
|
||||
const buildAgentForm = (existing?: AgentWithTools): BaseAgentForm => ({
|
||||
type: existing?.type ?? 'claude-code',
|
||||
name: existing?.name ?? 'Claude Code',
|
||||
name: existing?.name ?? 'Agent',
|
||||
description: existing?.description,
|
||||
instructions: existing?.instructions,
|
||||
model: existing?.model ?? '',
|
||||
@ -100,54 +90,6 @@ const PopupContainer: React.FC<Props> = ({ agent, afterSubmit, resolve }) => {
|
||||
})
|
||||
}, [])
|
||||
|
||||
// add supported agents type here.
|
||||
const agentConfig = useMemo(
|
||||
() =>
|
||||
[
|
||||
{
|
||||
type: 'type',
|
||||
key: 'claude-code',
|
||||
label: 'Claude Code',
|
||||
name: 'Claude Code',
|
||||
avatar: ClaudeIcon
|
||||
}
|
||||
] as const satisfies AgentTypeOption[],
|
||||
[]
|
||||
)
|
||||
|
||||
const agentOptions = useMemo(
|
||||
() =>
|
||||
agentConfig.map((option) => ({
|
||||
value: option.key,
|
||||
label: (
|
||||
<OptionWrapper>
|
||||
<Avatar src={option.avatar} size={24} />
|
||||
<span>{option.label}</span>
|
||||
</OptionWrapper>
|
||||
)
|
||||
})),
|
||||
[agentConfig]
|
||||
)
|
||||
|
||||
const onAgentTypeChange = useCallback(
|
||||
(value: AgentType) => {
|
||||
const prevConfig = agentConfig.find((config) => config.key === form.type)
|
||||
let newName: string | undefined = form.name
|
||||
if (prevConfig && prevConfig.name === form.name) {
|
||||
const newConfig = agentConfig.find((config) => config.key === value)
|
||||
if (newConfig) {
|
||||
newName = newConfig.name
|
||||
}
|
||||
}
|
||||
setForm((prev) => ({
|
||||
...prev,
|
||||
type: value,
|
||||
name: newName
|
||||
}))
|
||||
},
|
||||
[agentConfig, form.name, form.type]
|
||||
)
|
||||
|
||||
const onNameChange = useCallback((e: ChangeEvent<HTMLInputElement>) => {
|
||||
setForm((prev) => ({
|
||||
...prev,
|
||||
@ -155,12 +97,12 @@ const PopupContainer: React.FC<Props> = ({ agent, afterSubmit, resolve }) => {
|
||||
}))
|
||||
}, [])
|
||||
|
||||
const onDescChange = useCallback((e: ChangeEvent<HTMLTextAreaElement>) => {
|
||||
setForm((prev) => ({
|
||||
...prev,
|
||||
description: e.target.value
|
||||
}))
|
||||
}, [])
|
||||
// const onDescChange = useCallback((e: ChangeEvent<HTMLTextAreaElement>) => {
|
||||
// setForm((prev) => ({
|
||||
// ...prev,
|
||||
// description: e.target.value
|
||||
// }))
|
||||
// }, [])
|
||||
|
||||
const onInstChange = useCallback((e: ChangeEvent<HTMLTextAreaElement>) => {
|
||||
setForm((prev) => ({
|
||||
@ -334,16 +276,6 @@ const PopupContainer: React.FC<Props> = ({ agent, afterSubmit, resolve }) => {
|
||||
<StyledForm onSubmit={onSubmit}>
|
||||
<FormContent>
|
||||
<FormRow>
|
||||
<FormItem style={{ flex: 1 }}>
|
||||
<Label>{t('agent.type.label')}</Label>
|
||||
<Select
|
||||
value={form.type}
|
||||
onChange={onAgentTypeChange}
|
||||
options={agentOptions}
|
||||
disabled={isEditing(agent)}
|
||||
style={{ width: '100%' }}
|
||||
/>
|
||||
</FormItem>
|
||||
<FormItem style={{ flex: 1 }}>
|
||||
<Label>
|
||||
{t('common.name')} <RequiredMark>*</RequiredMark>
|
||||
@ -363,7 +295,7 @@ const PopupContainer: React.FC<Props> = ({ agent, afterSubmit, resolve }) => {
|
||||
avatarSize={24}
|
||||
iconSize={16}
|
||||
buttonStyle={{
|
||||
padding: '8px 12px',
|
||||
padding: '3px 8px',
|
||||
width: '100%',
|
||||
border: '1px solid var(--color-border)',
|
||||
borderRadius: 6,
|
||||
@ -382,7 +314,6 @@ const PopupContainer: React.FC<Props> = ({ agent, afterSubmit, resolve }) => {
|
||||
onChange={onPermissionModeChange}
|
||||
style={{ width: '100%' }}
|
||||
placeholder={t('agent.settings.tooling.permissionMode.placeholder', 'Select permission mode')}
|
||||
dropdownStyle={{ minWidth: '500px' }}
|
||||
optionLabelProp="label">
|
||||
{permissionModeCards.map((item) => (
|
||||
<Select.Option key={item.mode} value={item.mode} label={t(item.titleKey, item.titleFallback)}>
|
||||
@ -438,10 +369,10 @@ const PopupContainer: React.FC<Props> = ({ agent, afterSubmit, resolve }) => {
|
||||
<TextArea rows={3} value={form.instructions ?? ''} onChange={onInstChange} />
|
||||
</FormItem>
|
||||
|
||||
<FormItem>
|
||||
{/* <FormItem>
|
||||
<Label>{t('common.description')}</Label>
|
||||
<TextArea rows={2} value={form.description ?? ''} onChange={onDescChange} />
|
||||
</FormItem>
|
||||
<TextArea rows={1} value={form.description ?? ''} onChange={onDescChange} />
|
||||
</FormItem> */}
|
||||
</FormContent>
|
||||
|
||||
<FormFooter>
|
||||
@ -575,14 +506,7 @@ const FormFooter = styled.div`
|
||||
display: flex;
|
||||
justify-content: flex-end;
|
||||
gap: 8px;
|
||||
padding-top: 16px;
|
||||
border-top: 1px solid var(--color-border);
|
||||
`
|
||||
|
||||
const OptionWrapper = styled.div`
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
padding: 10px;
|
||||
`
|
||||
|
||||
const PermissionOptionWrapper = styled.div`
|
||||
|
||||
@ -1,6 +1,12 @@
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { isDoubaoSeedAfter251015, isDoubaoThinkingAutoModel, isLingReasoningModel } from '../models/reasoning'
|
||||
import {
|
||||
isDoubaoSeedAfter251015,
|
||||
isDoubaoThinkingAutoModel,
|
||||
isGeminiReasoningModel,
|
||||
isLingReasoningModel,
|
||||
isSupportedThinkingTokenGeminiModel
|
||||
} from '../models/reasoning'
|
||||
|
||||
vi.mock('@renderer/store', () => ({
|
||||
default: {
|
||||
@ -231,3 +237,284 @@ describe('Ling Models', () => {
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Gemini Models', () => {
|
||||
describe('isSupportedThinkingTokenGeminiModel', () => {
|
||||
it('should return true for gemini 2.5 models', () => {
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-2.5-flash',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-2.5-pro',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-2.5-flash-latest',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-2.5-pro-latest',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
})
|
||||
|
||||
it('should return true for gemini latest models', () => {
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-flash-latest',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-pro-latest',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-flash-lite-latest',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
})
|
||||
|
||||
it('should return true for gemini 3 models', () => {
|
||||
// Preview versions
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-3-pro-preview',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'google/gemini-3-pro-preview',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
// Future stable versions
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-3-flash',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-3-pro',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'google/gemini-3-flash',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'google/gemini-3-pro',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
})
|
||||
|
||||
it('should return false for image and tts models', () => {
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-2.5-flash-image',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(false)
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-2.5-flash-preview-tts',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false for older gemini models', () => {
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-1.5-flash',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(false)
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-1.5-pro',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(false)
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-1.0-pro',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('isGeminiReasoningModel', () => {
|
||||
it('should return true for gemini thinking models', () => {
|
||||
expect(
|
||||
isGeminiReasoningModel({
|
||||
id: 'gemini-2.0-flash-thinking',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isGeminiReasoningModel({
|
||||
id: 'gemini-thinking-exp',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
})
|
||||
|
||||
it('should return true for supported thinking token gemini models', () => {
|
||||
expect(
|
||||
isGeminiReasoningModel({
|
||||
id: 'gemini-2.5-flash',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isGeminiReasoningModel({
|
||||
id: 'gemini-2.5-pro',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
})
|
||||
|
||||
it('should return true for gemini-3 models', () => {
|
||||
// Preview versions
|
||||
expect(
|
||||
isGeminiReasoningModel({
|
||||
id: 'gemini-3-pro-preview',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isGeminiReasoningModel({
|
||||
id: 'google/gemini-3-pro-preview',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
// Future stable versions
|
||||
expect(
|
||||
isGeminiReasoningModel({
|
||||
id: 'gemini-3-flash',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isGeminiReasoningModel({
|
||||
id: 'gemini-3-pro',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isGeminiReasoningModel({
|
||||
id: 'google/gemini-3-flash',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isGeminiReasoningModel({
|
||||
id: 'google/gemini-3-pro',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
})
|
||||
|
||||
it('should return false for older gemini models without thinking', () => {
|
||||
expect(
|
||||
isGeminiReasoningModel({
|
||||
id: 'gemini-1.5-flash',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(false)
|
||||
expect(
|
||||
isGeminiReasoningModel({
|
||||
id: 'gemini-1.5-pro',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false for undefined model', () => {
|
||||
expect(isGeminiReasoningModel(undefined)).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
167
src/renderer/src/config/__test__/vision.test.ts
Normal file
167
src/renderer/src/config/__test__/vision.test.ts
Normal file
@ -0,0 +1,167 @@
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { isVisionModel } from '../models/vision'
|
||||
|
||||
vi.mock('@renderer/store', () => ({
|
||||
default: {
|
||||
getState: () => ({
|
||||
llm: {
|
||||
settings: {}
|
||||
}
|
||||
})
|
||||
}
|
||||
}))
|
||||
|
||||
// FIXME: Idk why it's imported. Maybe circular dependency somewhere
|
||||
vi.mock('@renderer/services/AssistantService.ts', () => ({
|
||||
getDefaultAssistant: () => {
|
||||
return {
|
||||
id: 'default',
|
||||
name: 'default',
|
||||
emoji: '😀',
|
||||
prompt: '',
|
||||
topics: [],
|
||||
messages: [],
|
||||
type: 'assistant',
|
||||
regularPhrases: [],
|
||||
settings: {}
|
||||
}
|
||||
},
|
||||
getProviderByModel: () => null
|
||||
}))
|
||||
|
||||
describe('isVisionModel', () => {
|
||||
describe('Gemini Models', () => {
|
||||
it('should return true for gemini 1.5 models', () => {
|
||||
expect(
|
||||
isVisionModel({
|
||||
id: 'gemini-1.5-flash',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isVisionModel({
|
||||
id: 'gemini-1.5-pro',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
})
|
||||
|
||||
it('should return true for gemini 2.x models', () => {
|
||||
expect(
|
||||
isVisionModel({
|
||||
id: 'gemini-2.0-flash',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isVisionModel({
|
||||
id: 'gemini-2.0-pro',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isVisionModel({
|
||||
id: 'gemini-2.5-flash',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isVisionModel({
|
||||
id: 'gemini-2.5-pro',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
})
|
||||
|
||||
it('should return true for gemini latest models', () => {
|
||||
expect(
|
||||
isVisionModel({
|
||||
id: 'gemini-flash-latest',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isVisionModel({
|
||||
id: 'gemini-pro-latest',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isVisionModel({
|
||||
id: 'gemini-flash-lite-latest',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
})
|
||||
|
||||
it('should return true for gemini 3 models', () => {
|
||||
// Preview versions
|
||||
expect(
|
||||
isVisionModel({
|
||||
id: 'gemini-3-pro-preview',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
// Future stable versions
|
||||
expect(
|
||||
isVisionModel({
|
||||
id: 'gemini-3-flash',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isVisionModel({
|
||||
id: 'gemini-3-pro',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
})
|
||||
|
||||
it('should return true for gemini exp models', () => {
|
||||
expect(
|
||||
isVisionModel({
|
||||
id: 'gemini-exp-1206',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
})
|
||||
|
||||
it('should return false for gemini 1.0 models', () => {
|
||||
expect(
|
||||
isVisionModel({
|
||||
id: 'gemini-1.0-pro',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
64
src/renderer/src/config/__test__/websearch.test.ts
Normal file
64
src/renderer/src/config/__test__/websearch.test.ts
Normal file
@ -0,0 +1,64 @@
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { GEMINI_SEARCH_REGEX } from '../models/websearch'
|
||||
|
||||
vi.mock('@renderer/store', () => ({
|
||||
default: {
|
||||
getState: () => ({
|
||||
llm: {
|
||||
settings: {}
|
||||
}
|
||||
})
|
||||
}
|
||||
}))
|
||||
|
||||
// FIXME: Idk why it's imported. Maybe circular dependency somewhere
|
||||
vi.mock('@renderer/services/AssistantService.ts', () => ({
|
||||
getDefaultAssistant: () => {
|
||||
return {
|
||||
id: 'default',
|
||||
name: 'default',
|
||||
emoji: '😀',
|
||||
prompt: '',
|
||||
topics: [],
|
||||
messages: [],
|
||||
type: 'assistant',
|
||||
regularPhrases: [],
|
||||
settings: {}
|
||||
}
|
||||
},
|
||||
getProviderByModel: () => null
|
||||
}))
|
||||
|
||||
describe('Gemini Search Models', () => {
|
||||
describe('GEMINI_SEARCH_REGEX', () => {
|
||||
it('should match gemini 2.x models', () => {
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-2.0-flash')).toBe(true)
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-2.0-pro')).toBe(true)
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-2.5-flash')).toBe(true)
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-2.5-pro')).toBe(true)
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-2.5-flash-latest')).toBe(true)
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-2.5-pro-latest')).toBe(true)
|
||||
})
|
||||
|
||||
it('should match gemini latest models', () => {
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-flash-latest')).toBe(true)
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-pro-latest')).toBe(true)
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-flash-lite-latest')).toBe(true)
|
||||
})
|
||||
|
||||
it('should match gemini 3 models', () => {
|
||||
// Preview versions
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-3-pro-preview')).toBe(true)
|
||||
// Future stable versions
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-3-flash')).toBe(true)
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-3-pro')).toBe(true)
|
||||
})
|
||||
|
||||
it('should not match older gemini models', () => {
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-1.5-flash')).toBe(false)
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-1.5-pro')).toBe(false)
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-1.0-pro')).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
@ -254,7 +254,7 @@ export function isGeminiReasoningModel(model?: Model): boolean {
|
||||
|
||||
// Gemini 支持思考模式的模型正则
|
||||
export const GEMINI_THINKING_MODEL_REGEX =
|
||||
/gemini-(?:2\.5.*(?:-latest)?|flash-latest|pro-latest|flash-lite-latest)(?:-[\w-]+)*$/i
|
||||
/gemini-(?:2\.5.*(?:-latest)?|3-(?:flash|pro)(?:-preview)?|flash-latest|pro-latest|flash-lite-latest)(?:-[\w-]+)*$/i
|
||||
|
||||
export const isSupportedThinkingTokenGeminiModel = (model: Model): boolean => {
|
||||
const modelId = getLowerBaseModelName(model.id, '/')
|
||||
|
||||
@ -12,6 +12,7 @@ const visionAllowedModels = [
|
||||
'gemini-1\\.5',
|
||||
'gemini-2\\.0',
|
||||
'gemini-2\\.5',
|
||||
'gemini-3-(?:flash|pro)(?:-preview)?',
|
||||
'gemini-(flash|pro|flash-lite)-latest',
|
||||
'gemini-exp',
|
||||
'claude-3',
|
||||
@ -64,13 +65,13 @@ const visionExcludedModels = [
|
||||
'o1-preview',
|
||||
'AIDC-AI/Marco-o1'
|
||||
]
|
||||
export const VISION_REGEX = new RegExp(
|
||||
const VISION_REGEX = new RegExp(
|
||||
`\\b(?!(?:${visionExcludedModels.join('|')})\\b)(${visionAllowedModels.join('|')})\\b`,
|
||||
'i'
|
||||
)
|
||||
|
||||
// For middleware to identify models that must use the dedicated Image API
|
||||
export const DEDICATED_IMAGE_MODELS = [
|
||||
const DEDICATED_IMAGE_MODELS = [
|
||||
'grok-2-image',
|
||||
'grok-2-image-1212',
|
||||
'grok-2-image-latest',
|
||||
@ -79,7 +80,7 @@ export const DEDICATED_IMAGE_MODELS = [
|
||||
'gpt-image-1'
|
||||
]
|
||||
|
||||
export const IMAGE_ENHANCEMENT_MODELS = [
|
||||
const IMAGE_ENHANCEMENT_MODELS = [
|
||||
'grok-2-image(?:-[\\w-]+)?',
|
||||
'qwen-image-edit',
|
||||
'gpt-image-1',
|
||||
@ -90,9 +91,9 @@ export const IMAGE_ENHANCEMENT_MODELS = [
|
||||
const IMAGE_ENHANCEMENT_MODELS_REGEX = new RegExp(IMAGE_ENHANCEMENT_MODELS.join('|'), 'i')
|
||||
|
||||
// Models that should auto-enable image generation button when selected
|
||||
export const AUTO_ENABLE_IMAGE_MODELS = ['gemini-2.5-flash-image', ...DEDICATED_IMAGE_MODELS]
|
||||
const AUTO_ENABLE_IMAGE_MODELS = ['gemini-2.5-flash-image', ...DEDICATED_IMAGE_MODELS]
|
||||
|
||||
export const OPENAI_TOOL_USE_IMAGE_GENERATION_MODELS = [
|
||||
const OPENAI_TOOL_USE_IMAGE_GENERATION_MODELS = [
|
||||
'o3',
|
||||
'gpt-4o',
|
||||
'gpt-4o-mini',
|
||||
@ -102,9 +103,9 @@ export const OPENAI_TOOL_USE_IMAGE_GENERATION_MODELS = [
|
||||
'gpt-5'
|
||||
]
|
||||
|
||||
export const OPENAI_IMAGE_GENERATION_MODELS = [...OPENAI_TOOL_USE_IMAGE_GENERATION_MODELS, 'gpt-image-1']
|
||||
const OPENAI_IMAGE_GENERATION_MODELS = [...OPENAI_TOOL_USE_IMAGE_GENERATION_MODELS, 'gpt-image-1']
|
||||
|
||||
export const GENERATE_IMAGE_MODELS = [
|
||||
const GENERATE_IMAGE_MODELS = [
|
||||
'gemini-2.0-flash-exp',
|
||||
'gemini-2.0-flash-exp-image-generation',
|
||||
'gemini-2.0-flash-preview-image-generation',
|
||||
@ -169,22 +170,23 @@ export function isPureGenerateImageModel(model: Model): boolean {
|
||||
}
|
||||
|
||||
// Text to image models
|
||||
export const TEXT_TO_IMAGE_REGEX = /flux|diffusion|stabilityai|sd-|dall|cogview|janus|midjourney|mj-|image|gpt-image/i
|
||||
const TEXT_TO_IMAGE_REGEX = /flux|diffusion|stabilityai|sd-|dall|cogview|janus|midjourney|mj-|image|gpt-image/i
|
||||
|
||||
export function isTextToImageModel(model: Model): boolean {
|
||||
const modelId = getLowerBaseModelName(model.id)
|
||||
return TEXT_TO_IMAGE_REGEX.test(modelId)
|
||||
}
|
||||
|
||||
export function isNotSupportedImageSizeModel(model?: Model): boolean {
|
||||
if (!model) {
|
||||
return false
|
||||
}
|
||||
// It's not used now
|
||||
// export function isNotSupportedImageSizeModel(model?: Model): boolean {
|
||||
// if (!model) {
|
||||
// return false
|
||||
// }
|
||||
|
||||
const baseName = getLowerBaseModelName(model.id, '/')
|
||||
// const baseName = getLowerBaseModelName(model.id, '/')
|
||||
|
||||
return baseName.includes('grok-2-image')
|
||||
}
|
||||
// return baseName.includes('grok-2-image')
|
||||
// }
|
||||
|
||||
/**
|
||||
* 判断模型是否支持图片增强(包括编辑、增强、修复等)
|
||||
|
||||
@ -3,7 +3,13 @@ import type { Model } from '@renderer/types'
|
||||
import { SystemProviderIds } from '@renderer/types'
|
||||
import { getLowerBaseModelName, isUserSelectedModelType } from '@renderer/utils'
|
||||
|
||||
import { isGeminiProvider, isNewApiProvider, isOpenAICompatibleProvider, isOpenAIProvider } from '../providers'
|
||||
import {
|
||||
isGeminiProvider,
|
||||
isNewApiProvider,
|
||||
isOpenAICompatibleProvider,
|
||||
isOpenAIProvider,
|
||||
isVertexAiProvider
|
||||
} from '../providers'
|
||||
import { isEmbeddingModel, isRerankModel } from './embedding'
|
||||
import { isAnthropicModel } from './utils'
|
||||
import { isPureGenerateImageModel, isTextToImageModel } from './vision'
|
||||
@ -16,7 +22,7 @@ export const CLAUDE_SUPPORTED_WEBSEARCH_REGEX = new RegExp(
|
||||
export const GEMINI_FLASH_MODEL_REGEX = new RegExp('gemini.*-flash.*$')
|
||||
|
||||
export const GEMINI_SEARCH_REGEX = new RegExp(
|
||||
'gemini-(?:2.*(?:-latest)?|flash-latest|pro-latest|flash-lite-latest)(?:-[\\w-]+)*$',
|
||||
'gemini-(?:2.*(?:-latest)?|3-(?:flash|pro)(?:-preview)?|flash-latest|pro-latest|flash-lite-latest)(?:-[\\w-]+)*$',
|
||||
'i'
|
||||
)
|
||||
|
||||
@ -107,7 +113,7 @@ export function isWebSearchModel(model: Model): boolean {
|
||||
}
|
||||
}
|
||||
|
||||
if (isGeminiProvider(provider) || provider.id === SystemProviderIds.vertexai) {
|
||||
if (isGeminiProvider(provider) || isVertexAiProvider(provider)) {
|
||||
return GEMINI_SEARCH_REGEX.test(modelId)
|
||||
}
|
||||
|
||||
|
||||
@ -275,6 +275,7 @@ export const SYSTEM_PROVIDERS_CONFIG: Record<SystemProviderId, SystemProvider> =
|
||||
type: 'openai',
|
||||
apiKey: '',
|
||||
apiHost: 'https://api.qnaigc.com',
|
||||
anthropicApiHost: 'https://api.qnaigc.com',
|
||||
models: SYSTEM_MODELS.qiniu,
|
||||
isSystem: true,
|
||||
enabled: false
|
||||
@ -665,6 +666,7 @@ export const SYSTEM_PROVIDERS_CONFIG: Record<SystemProviderId, SystemProvider> =
|
||||
type: 'openai',
|
||||
apiKey: '',
|
||||
apiHost: 'https://api.longcat.chat/openai',
|
||||
anthropicApiHost: 'https://api.longcat.chat/anthropic',
|
||||
models: SYSTEM_MODELS.longcat,
|
||||
isSystem: true,
|
||||
enabled: false
|
||||
@ -684,7 +686,7 @@ export const SYSTEM_PROVIDERS_CONFIG: Record<SystemProviderId, SystemProvider> =
|
||||
name: 'AI Gateway',
|
||||
type: 'ai-gateway',
|
||||
apiKey: '',
|
||||
apiHost: 'https://ai-gateway.vercel.sh/v1',
|
||||
apiHost: 'https://ai-gateway.vercel.sh/v1/ai',
|
||||
models: [],
|
||||
isSystem: true,
|
||||
enabled: false
|
||||
@ -1569,6 +1571,10 @@ export function isGeminiProvider(provider: Provider): boolean {
|
||||
return provider.type === 'gemini'
|
||||
}
|
||||
|
||||
export function isVertexAiProvider(provider: Provider): boolean {
|
||||
return provider.type === 'vertexai'
|
||||
}
|
||||
|
||||
export function isAIGatewayProvider(provider: Provider): boolean {
|
||||
return provider.type === 'ai-gateway'
|
||||
}
|
||||
|
||||
@ -308,7 +308,7 @@ export const InputbarCore: FC<InputbarCoreProps> = ({
|
||||
|
||||
const isEnterPressed = event.key === 'Enter' && !event.nativeEvent.isComposing
|
||||
if (isEnterPressed) {
|
||||
if (isSendMessageKeyPressed(event, sendMessageShortcut)) {
|
||||
if (isSendMessageKeyPressed(event, sendMessageShortcut) && !cannotSend) {
|
||||
handleSendMessage()
|
||||
event.preventDefault()
|
||||
return
|
||||
@ -354,6 +354,7 @@ export const InputbarCore: FC<InputbarCoreProps> = ({
|
||||
translate,
|
||||
handleToggleExpanded,
|
||||
sendMessageShortcut,
|
||||
cannotSend,
|
||||
handleSendMessage,
|
||||
setText,
|
||||
setTimeoutTimer,
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import type { PermissionUpdate } from '@anthropic-ai/claude-agent-sdk'
|
||||
import { loggerService } from '@logger'
|
||||
import { useAppDispatch, useAppSelector } from '@renderer/store'
|
||||
import { selectPendingPermissionByToolName, toolPermissionsActions } from '@renderer/store/toolPermissions'
|
||||
import { selectPendingPermission, toolPermissionsActions } from '@renderer/store/toolPermissions'
|
||||
import type { NormalToolResponse } from '@renderer/types'
|
||||
import { Button } from 'antd'
|
||||
import { ChevronDown, CirclePlay, CircleX } from 'lucide-react'
|
||||
@ -17,9 +17,7 @@ interface Props {
|
||||
export function ToolPermissionRequestCard({ toolResponse }: Props) {
|
||||
const { t } = useTranslation()
|
||||
const dispatch = useAppDispatch()
|
||||
const request = useAppSelector((state) =>
|
||||
selectPendingPermissionByToolName(state.toolPermissions, toolResponse.tool.name)
|
||||
)
|
||||
const request = useAppSelector((state) => selectPendingPermission(state.toolPermissions, toolResponse.toolCallId))
|
||||
const [now, setNow] = useState(() => Date.now())
|
||||
const [showDetails, setShowDetails] = useState(false)
|
||||
|
||||
|
||||
@ -1,21 +1,13 @@
|
||||
import { getAgentTypeAvatar } from '@renderer/config/agent'
|
||||
import type { useUpdateAgent } from '@renderer/hooks/agents/useUpdateAgent'
|
||||
import type { useUpdateSession } from '@renderer/hooks/agents/useUpdateSession'
|
||||
import { getAgentTypeLabel } from '@renderer/i18n/label'
|
||||
import type { GetAgentResponse, GetAgentSessionResponse } from '@renderer/types'
|
||||
import { isAgentEntity } from '@renderer/types'
|
||||
import { Avatar } from 'antd'
|
||||
import type { FC } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
||||
import { AccessibleDirsSetting } from './AccessibleDirsSetting'
|
||||
import { AvatarSetting } from './AvatarSetting'
|
||||
import { DescriptionSetting } from './DescriptionSetting'
|
||||
import { ModelSetting } from './ModelSetting'
|
||||
import { NameSetting } from './NameSetting'
|
||||
import { SettingsContainer, SettingsItem, SettingsTitle } from './shared'
|
||||
|
||||
// const logger = loggerService.withContext('AgentEssentialSettings')
|
||||
import { SettingsContainer } from './shared'
|
||||
|
||||
type EssentialSettingsProps =
|
||||
| {
|
||||
@ -30,26 +22,10 @@ type EssentialSettingsProps =
|
||||
}
|
||||
|
||||
const EssentialSettings: FC<EssentialSettingsProps> = ({ agentBase, update, showModelSetting = true }) => {
|
||||
const { t } = useTranslation()
|
||||
|
||||
if (!agentBase) return null
|
||||
|
||||
const isAgent = isAgentEntity(agentBase)
|
||||
|
||||
return (
|
||||
<SettingsContainer>
|
||||
{isAgent && (
|
||||
<SettingsItem inline>
|
||||
<SettingsTitle>{t('agent.type.label')}</SettingsTitle>
|
||||
<div className="flex items-center gap-2">
|
||||
<Avatar size={24} src={getAgentTypeAvatar(agentBase.type)} className="h-6 w-6 text-lg" />
|
||||
<span>{(agentBase?.name ?? agentBase?.type) ? getAgentTypeLabel(agentBase.type) : ''}</span>
|
||||
</div>
|
||||
</SettingsItem>
|
||||
)}
|
||||
{isAgent && (
|
||||
<AvatarSetting agent={agentBase} update={update as ReturnType<typeof useUpdateAgent>['updateAgent']} />
|
||||
)}
|
||||
<NameSetting base={agentBase} update={update} />
|
||||
{showModelSetting && <ModelSetting base={agentBase} update={update} />}
|
||||
<AccessibleDirsSetting base={agentBase} update={update} />
|
||||
|
||||
@ -1,6 +1,8 @@
|
||||
import { EmojiAvatarWithPicker } from '@renderer/components/Avatar/EmojiAvatarWithPicker'
|
||||
import type { AgentBaseWithId, UpdateAgentBaseForm, UpdateAgentFunctionUnion } from '@renderer/types'
|
||||
import { AgentConfigurationSchema, isAgentEntity, isAgentType } from '@renderer/types'
|
||||
import { Input } from 'antd'
|
||||
import { useState } from 'react'
|
||||
import { useCallback, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
||||
import { SettingsItem, SettingsTitle } from './shared'
|
||||
@ -13,26 +15,61 @@ export interface NameSettingsProps {
|
||||
export const NameSetting = ({ base, update }: NameSettingsProps) => {
|
||||
const { t } = useTranslation()
|
||||
const [name, setName] = useState<string | undefined>(base?.name?.trim())
|
||||
|
||||
const updateName = async (name: UpdateAgentBaseForm['name']) => {
|
||||
if (!base) return
|
||||
return update({ id: base.id, name: name?.trim() })
|
||||
}
|
||||
|
||||
// Avatar logic
|
||||
const isAgent = isAgentEntity(base)
|
||||
const isDefault = isAgent ? isAgentType(base.configuration?.avatar) : false
|
||||
const [emoji, setEmoji] = useState(isAgent && !isDefault ? (base.configuration?.avatar ?? '⭐️') : '⭐️')
|
||||
|
||||
const updateAvatar = useCallback(
|
||||
(avatar: string) => {
|
||||
if (!isAgent || !base) return
|
||||
const parsedConfiguration = AgentConfigurationSchema.parse(base.configuration ?? {})
|
||||
const payload = {
|
||||
id: base.id,
|
||||
configuration: {
|
||||
...parsedConfiguration,
|
||||
avatar
|
||||
}
|
||||
}
|
||||
update(payload)
|
||||
},
|
||||
[base, update, isAgent]
|
||||
)
|
||||
|
||||
if (!base) return null
|
||||
|
||||
return (
|
||||
<SettingsItem inline>
|
||||
<SettingsTitle>{t('common.name')}</SettingsTitle>
|
||||
<Input
|
||||
placeholder={t('common.agent_one') + t('common.name')}
|
||||
value={name}
|
||||
onChange={(e) => setName(e.target.value)}
|
||||
onBlur={() => {
|
||||
if (name !== base.name) {
|
||||
updateName(name)
|
||||
}
|
||||
}}
|
||||
className="max-w-70 flex-1"
|
||||
/>
|
||||
<div className="flex max-w-70 flex-1 items-center gap-1">
|
||||
{isAgent && (
|
||||
<EmojiAvatarWithPicker
|
||||
emoji={emoji}
|
||||
onPick={(emoji: string) => {
|
||||
setEmoji(emoji)
|
||||
if (isAgent && emoji === base?.configuration?.avatar) return
|
||||
updateAvatar(emoji)
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
<Input
|
||||
placeholder={t('common.agent_one') + t('common.name')}
|
||||
value={name}
|
||||
onChange={(e) => setName(e.target.value)}
|
||||
onBlur={() => {
|
||||
if (name !== base.name) {
|
||||
updateName(name)
|
||||
}
|
||||
}}
|
||||
className="flex-1"
|
||||
/>
|
||||
</div>
|
||||
</SettingsItem>
|
||||
)
|
||||
}
|
||||
|
||||
@ -8,6 +8,7 @@ import { DEFAULT_CONTEXTCOUNT, DEFAULT_TEMPERATURE, MAX_CONTEXT_COUNT } from '@r
|
||||
import { isEmbeddingModel, isRerankModel } from '@renderer/config/models'
|
||||
import { useTimer } from '@renderer/hooks/useTimer'
|
||||
import { SettingRow } from '@renderer/pages/settings'
|
||||
import { DEFAULT_ASSISTANT_SETTINGS } from '@renderer/services/AssistantService'
|
||||
import type { Assistant, AssistantSettingCustomParameters, AssistantSettings, Model } from '@renderer/types'
|
||||
import { modalConfirm } from '@renderer/utils'
|
||||
import { Col, Divider, Input, InputNumber, Row, Select, Slider } from 'antd'
|
||||
@ -30,7 +31,9 @@ const AssistantModelSettings: FC<Props> = ({ assistant, updateAssistant, updateA
|
||||
const [enableMaxTokens, setEnableMaxTokens] = useState(assistant?.settings?.enableMaxTokens ?? false)
|
||||
const [maxTokens, setMaxTokens] = useState(assistant?.settings?.maxTokens ?? 0)
|
||||
const [streamOutput, setStreamOutput] = useState(assistant?.settings?.streamOutput)
|
||||
const [toolUseMode, setToolUseMode] = useState(assistant?.settings?.toolUseMode ?? 'prompt')
|
||||
const [toolUseMode, setToolUseMode] = useState<AssistantSettings['toolUseMode']>(
|
||||
assistant?.settings?.toolUseMode ?? 'function'
|
||||
)
|
||||
const [defaultModel, setDefaultModel] = useState(assistant?.defaultModel)
|
||||
const [topP, setTopP] = useState(assistant?.settings?.topP ?? 1)
|
||||
const [enableTopP, setEnableTopP] = useState(assistant?.settings?.enableTopP ?? false)
|
||||
@ -157,28 +160,17 @@ const AssistantModelSettings: FC<Props> = ({ assistant, updateAssistant, updateA
|
||||
}
|
||||
|
||||
const onReset = () => {
|
||||
setTemperature(DEFAULT_TEMPERATURE)
|
||||
setEnableTemperature(true)
|
||||
setContextCount(DEFAULT_CONTEXTCOUNT)
|
||||
setEnableMaxTokens(false)
|
||||
setMaxTokens(0)
|
||||
setStreamOutput(true)
|
||||
setTopP(1)
|
||||
setEnableTopP(false)
|
||||
setCustomParameters([])
|
||||
setToolUseMode('prompt')
|
||||
updateAssistantSettings({
|
||||
temperature: DEFAULT_TEMPERATURE,
|
||||
enableTemperature: true,
|
||||
contextCount: DEFAULT_CONTEXTCOUNT,
|
||||
enableMaxTokens: false,
|
||||
maxTokens: 0,
|
||||
streamOutput: true,
|
||||
topP: 1,
|
||||
enableTopP: false,
|
||||
customParameters: [],
|
||||
toolUseMode: 'prompt'
|
||||
})
|
||||
setTemperature(DEFAULT_ASSISTANT_SETTINGS.temperature)
|
||||
setEnableTemperature(DEFAULT_ASSISTANT_SETTINGS.enableTemperature ?? true)
|
||||
setContextCount(DEFAULT_ASSISTANT_SETTINGS.contextCount)
|
||||
setEnableMaxTokens(DEFAULT_ASSISTANT_SETTINGS.enableMaxTokens ?? false)
|
||||
setMaxTokens(DEFAULT_ASSISTANT_SETTINGS.maxTokens ?? 0)
|
||||
setStreamOutput(DEFAULT_ASSISTANT_SETTINGS.streamOutput)
|
||||
setTopP(DEFAULT_ASSISTANT_SETTINGS.topP)
|
||||
setEnableTopP(DEFAULT_ASSISTANT_SETTINGS.enableTopP ?? false)
|
||||
setCustomParameters(DEFAULT_ASSISTANT_SETTINGS.customParameters ?? [])
|
||||
setToolUseMode(DEFAULT_ASSISTANT_SETTINGS.toolUseMode)
|
||||
updateAssistantSettings(DEFAULT_ASSISTANT_SETTINGS)
|
||||
}
|
||||
const modelFilter = (model: Model) => !isEmbeddingModel(model) && !isRerankModel(model)
|
||||
|
||||
|
||||
@ -108,7 +108,6 @@ const InstallNpxUv: FC<Props> = ({ mini = false }) => {
|
||||
<Container>
|
||||
<Alert
|
||||
type={isUvInstalled ? 'success' : 'warning'}
|
||||
banner
|
||||
style={{ borderRadius: 'var(--list-item-border-radius)' }}
|
||||
description={
|
||||
<ColFlex>
|
||||
@ -134,7 +133,6 @@ const InstallNpxUv: FC<Props> = ({ mini = false }) => {
|
||||
/>
|
||||
<Alert
|
||||
type={isBunInstalled ? 'success' : 'warning'}
|
||||
banner
|
||||
style={{ borderRadius: 'var(--list-item-border-radius)' }}
|
||||
description={
|
||||
<ColFlex>
|
||||
|
||||
@ -140,7 +140,7 @@ const MCPSettings: FC = () => {
|
||||
<Route
|
||||
path="mcp-install"
|
||||
element={
|
||||
<SettingContainer theme={theme}>
|
||||
<SettingContainer style={{ backgroundColor: 'inherit' }}>
|
||||
<InstallNpxUv />
|
||||
</SettingContainer>
|
||||
}
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
import 'emoji-picker-element'
|
||||
|
||||
import { CheckOutlined, LoadingOutlined, RollbackOutlined, ThunderboltOutlined } from '@ant-design/icons'
|
||||
import { Button } from '@cherrystudio/ui'
|
||||
import { loggerService } from '@logger'
|
||||
|
||||
@ -7,7 +7,7 @@ import AiProvider from '@renderer/aiCore'
|
||||
import type { CompletionsParams } from '@renderer/aiCore/legacy/middleware/schemas'
|
||||
import type { AiSdkMiddlewareConfig } from '@renderer/aiCore/middleware/AiSdkMiddlewareBuilder'
|
||||
import { buildStreamTextParams } from '@renderer/aiCore/prepareParams'
|
||||
import { isDedicatedImageGenerationModel, isEmbeddingModel } from '@renderer/config/models'
|
||||
import { isDedicatedImageGenerationModel, isEmbeddingModel, isFunctionCallingModel } from '@renderer/config/models'
|
||||
import i18n from '@renderer/i18n'
|
||||
import store from '@renderer/store'
|
||||
import type { FetchChatCompletionParams } from '@renderer/types'
|
||||
@ -18,6 +18,7 @@ import type { Message } from '@renderer/types/newMessage'
|
||||
import type { SdkModel } from '@renderer/types/sdk'
|
||||
import { removeSpecialCharactersForTopicName, uuid } from '@renderer/utils'
|
||||
import { abortCompletion, readyToAbort } from '@renderer/utils/abortController'
|
||||
import { isToolUseModeFunction } from '@renderer/utils/assistant'
|
||||
import { isAbortError } from '@renderer/utils/error'
|
||||
import { purifyMarkdownImages } from '@renderer/utils/markdown'
|
||||
import { isPromptToolUse, isSupportedToolUse } from '@renderer/utils/mcp-tools'
|
||||
@ -126,12 +127,16 @@ export async function fetchChatCompletion({
|
||||
requestOptions: options
|
||||
})
|
||||
|
||||
// Safely fallback to prompt tool use when function calling is not supported by model.
|
||||
const usePromptToolUse =
|
||||
isPromptToolUse(assistant) || (isToolUseModeFunction(assistant) && !isFunctionCallingModel(assistant.model))
|
||||
|
||||
const middlewareConfig: AiSdkMiddlewareConfig = {
|
||||
streamOutput: assistant.settings?.streamOutput ?? true,
|
||||
onChunk: onChunkReceived,
|
||||
model: assistant.model,
|
||||
enableReasoning: capabilities.enableReasoning,
|
||||
isPromptToolUse: isPromptToolUse(assistant),
|
||||
isPromptToolUse: usePromptToolUse,
|
||||
isSupportedToolUse: isSupportedToolUse(assistant),
|
||||
isImageGenerationEndpoint: isDedicatedImageGenerationModel(assistant.model || getDefaultModel()),
|
||||
webSearchPluginConfig: webSearchPluginConfig,
|
||||
|
||||
@ -37,9 +37,10 @@ export const DEFAULT_ASSISTANT_SETTINGS: AssistantSettings = {
|
||||
streamOutput: true,
|
||||
topP: 1,
|
||||
enableTopP: false,
|
||||
toolUseMode: 'prompt',
|
||||
// It would gracefully fallback to prompt if not supported by model.
|
||||
toolUseMode: 'function',
|
||||
customParameters: []
|
||||
}
|
||||
} as const
|
||||
|
||||
export function getDefaultAssistant(): Assistant {
|
||||
return {
|
||||
@ -182,7 +183,7 @@ export const getAssistantSettings = (assistant: Assistant): AssistantSettings =>
|
||||
enableMaxTokens: assistant?.settings?.enableMaxTokens ?? false,
|
||||
maxTokens: getAssistantMaxTokens(),
|
||||
streamOutput: assistant?.settings?.streamOutput ?? true,
|
||||
toolUseMode: assistant?.settings?.toolUseMode ?? 'prompt',
|
||||
toolUseMode: assistant?.settings?.toolUseMode ?? 'function',
|
||||
defaultModel: assistant?.defaultModel ?? undefined,
|
||||
reasoning_effort: assistant?.settings?.reasoning_effort ?? undefined,
|
||||
customParameters: assistant?.settings?.customParameters ?? []
|
||||
|
||||
@ -71,7 +71,7 @@ const persistedReducer = persistReducer(
|
||||
{
|
||||
key: 'cherry-studio',
|
||||
storage,
|
||||
version: 175,
|
||||
version: 176,
|
||||
blacklist: ['runtime', 'messages', 'messageBlocks', 'tabs', 'toolPermissions'],
|
||||
migrate
|
||||
},
|
||||
|
||||
@ -2828,12 +2828,14 @@ const migrateConfig = {
|
||||
'175': (state: RootState) => {
|
||||
try {
|
||||
state.assistants.assistants.forEach((assistant) => {
|
||||
// @ts-expect-error removed type 'off'
|
||||
// @ts-ignore
|
||||
if (assistant.settings?.reasoning_effort === 'off') {
|
||||
// @ts-ignore
|
||||
assistant.settings.reasoning_effort = 'none'
|
||||
}
|
||||
// @ts-expect-error removed type 'off'
|
||||
// @ts-ignore
|
||||
if (assistant.settings?.reasoning_effort_cache === 'off') {
|
||||
// @ts-ignore
|
||||
assistant.settings.reasoning_effort_cache = 'none'
|
||||
}
|
||||
})
|
||||
@ -2843,6 +2845,22 @@ const migrateConfig = {
|
||||
logger.error('migrate 175 error', error as Error)
|
||||
return state
|
||||
}
|
||||
},
|
||||
'176': (state: RootState) => {
|
||||
try {
|
||||
state.llm.providers.forEach((provider) => {
|
||||
if (provider.id === SystemProviderIds.qiniu) {
|
||||
provider.anthropicApiHost = 'https://api.qnaigc.com'
|
||||
}
|
||||
if (provider.id === SystemProviderIds.longcat) {
|
||||
provider.anthropicApiHost = 'https://api.longcat.chat/anthropic'
|
||||
}
|
||||
})
|
||||
return state
|
||||
} catch (error) {
|
||||
logger.error('migrate 176 error', error as Error)
|
||||
return state
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -6,6 +6,7 @@ export type ToolPermissionRequestPayload = {
|
||||
requestId: string
|
||||
toolName: string
|
||||
toolId: string
|
||||
toolCallId: string
|
||||
description?: string
|
||||
requiresPermissions: boolean
|
||||
input: Record<string, unknown>
|
||||
@ -82,12 +83,12 @@ export const selectActiveToolPermission = (state: ToolPermissionsState): ToolPer
|
||||
return activeEntries[0]
|
||||
}
|
||||
|
||||
export const selectPendingPermissionByToolName = (
|
||||
export const selectPendingPermission = (
|
||||
state: ToolPermissionsState,
|
||||
toolName: string
|
||||
toolCallId: string
|
||||
): ToolPermissionEntry | undefined => {
|
||||
const activeEntries = Object.values(state.requests)
|
||||
.filter((entry) => entry.toolName === toolName)
|
||||
.filter((entry) => entry.toolCallId === toolCallId)
|
||||
.filter(
|
||||
(entry) => entry.status === 'pending' || entry.status === 'submitting-allow' || entry.status === 'submitting-deny'
|
||||
)
|
||||
|
||||
79
yarn.lock
79
yarn.lock
@ -102,7 +102,19 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@ai-sdk/anthropic@npm:2.0.44, @ai-sdk/anthropic@npm:^2.0.44":
|
||||
"@ai-sdk/anthropic@npm:2.0.45":
|
||||
version: 2.0.45
|
||||
resolution: "@ai-sdk/anthropic@npm:2.0.45"
|
||||
dependencies:
|
||||
"@ai-sdk/provider": "npm:2.0.0"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.17"
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
checksum: 10c0/ef0e54f032e3b8324c278f3b25d9b388308204d753404c49fd880709a796c2343aee36d335c99f50e683edd39d5b8b6f42b2e9034e1725d8e0db514e2233d104
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@ai-sdk/anthropic@npm:^2.0.44":
|
||||
version: 2.0.44
|
||||
resolution: "@ai-sdk/anthropic@npm:2.0.44"
|
||||
dependencies:
|
||||
@ -179,54 +191,42 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@ai-sdk/google-vertex@npm:^3.0.62":
|
||||
version: 3.0.62
|
||||
resolution: "@ai-sdk/google-vertex@npm:3.0.62"
|
||||
"@ai-sdk/google-vertex@npm:^3.0.68":
|
||||
version: 3.0.68
|
||||
resolution: "@ai-sdk/google-vertex@npm:3.0.68"
|
||||
dependencies:
|
||||
"@ai-sdk/anthropic": "npm:2.0.44"
|
||||
"@ai-sdk/google": "npm:2.0.31"
|
||||
"@ai-sdk/anthropic": "npm:2.0.45"
|
||||
"@ai-sdk/google": "npm:2.0.36"
|
||||
"@ai-sdk/provider": "npm:2.0.0"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.17"
|
||||
google-auth-library: "npm:^9.15.0"
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
checksum: 10c0/673bb51e3e0cbe5235ad5e65379b1cb8f099dbc690ab8552e208553a9f1cc6026d2588e956e73468bc6d267066be276e7a9aba98e32e905809dfbeab4ac0e352
|
||||
checksum: 10c0/6a3f4cb1e649313b46a0c349c717757071f8b012b0a28e59ab7a55fd35d9600f0043f0a4f57417c4cc49e0d3734e89a1e4fb248fc88795b5286c83395d3f617a
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@ai-sdk/google@npm:2.0.31":
|
||||
version: 2.0.31
|
||||
resolution: "@ai-sdk/google@npm:2.0.31"
|
||||
"@ai-sdk/google@npm:2.0.36":
|
||||
version: 2.0.36
|
||||
resolution: "@ai-sdk/google@npm:2.0.36"
|
||||
dependencies:
|
||||
"@ai-sdk/provider": "npm:2.0.0"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.17"
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
checksum: 10c0/d8f143f058fb62e6e67e30564ec92530d7389c22ad91b1e4bbe781c8570bf718cd417e44dcd4855e347e85c4174538a9a884eac666109e17f20d21467ab3e749
|
||||
checksum: 10c0/2c6de5e1cf0703b6b932a3f313bf4bc9439897af39c805169ab04bba397185d99b2b1306f3b817f991ca41fdced0365b072ee39e76382c045930256bce47e0e4
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@ai-sdk/google@npm:^2.0.32":
|
||||
version: 2.0.32
|
||||
resolution: "@ai-sdk/google@npm:2.0.32"
|
||||
"@ai-sdk/google@patch:@ai-sdk/google@npm%3A2.0.36#~/.yarn/patches/@ai-sdk-google-npm-2.0.36-6f3cc06026.patch":
|
||||
version: 2.0.36
|
||||
resolution: "@ai-sdk/google@patch:@ai-sdk/google@npm%3A2.0.36#~/.yarn/patches/@ai-sdk-google-npm-2.0.36-6f3cc06026.patch::version=2.0.36&hash=2da8c3"
|
||||
dependencies:
|
||||
"@ai-sdk/provider": "npm:2.0.0"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.17"
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
checksum: 10c0/052de16f1f66188e126168c8a9cc903448104528c7e44d6867bbf555c9067b9d6d44a4c4e0e014838156ba39095cb417f1b76363eb65212ca4d005f3651e58d2
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@ai-sdk/google@patch:@ai-sdk/google@npm%3A2.0.31#~/.yarn/patches/@ai-sdk-google-npm-2.0.31-b0de047210.patch":
|
||||
version: 2.0.31
|
||||
resolution: "@ai-sdk/google@patch:@ai-sdk/google@npm%3A2.0.31#~/.yarn/patches/@ai-sdk-google-npm-2.0.31-b0de047210.patch::version=2.0.31&hash=9f3835"
|
||||
dependencies:
|
||||
"@ai-sdk/provider": "npm:2.0.0"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.17"
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
checksum: 10c0/dd37dfb7abf402caaae3edb2f1a8dab018fddad6ba3190376723e03a2a0c352329c8e41e60df3fb8436b717d9c2ee4b82dff091848f50d026f62565cbdb158f8
|
||||
checksum: 10c0/ce99a497360377d2917cf3a48278eb6f4337623ce3738ba743cf048c8c2a7731ec4fc27605a50e461e716ed49b3690206ca8e4078f27cb7be162b684bfc2fc22
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
@ -1964,30 +1964,30 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@cherrystudio/ai-core@workspace:^1.0.0-alpha.18, @cherrystudio/ai-core@workspace:packages/aiCore":
|
||||
"@cherrystudio/ai-core@workspace:^1.0.9, @cherrystudio/ai-core@workspace:packages/aiCore":
|
||||
version: 0.0.0-use.local
|
||||
resolution: "@cherrystudio/ai-core@workspace:packages/aiCore"
|
||||
dependencies:
|
||||
"@ai-sdk/anthropic": "npm:^2.0.43"
|
||||
"@ai-sdk/azure": "npm:^2.0.66"
|
||||
"@ai-sdk/deepseek": "npm:^1.0.27"
|
||||
"@ai-sdk/google": "patch:@ai-sdk/google@npm%3A2.0.31#~/.yarn/patches/@ai-sdk-google-npm-2.0.31-b0de047210.patch"
|
||||
"@ai-sdk/openai": "patch:@ai-sdk/openai@npm%3A2.0.64#~/.yarn/patches/@ai-sdk-openai-npm-2.0.64-48f99f5bf3.patch"
|
||||
"@ai-sdk/openai-compatible": "npm:^1.0.26"
|
||||
"@ai-sdk/provider": "npm:^2.0.0"
|
||||
"@ai-sdk/provider-utils": "npm:^3.0.16"
|
||||
"@ai-sdk/xai": "npm:^2.0.31"
|
||||
"@cherrystudio/ai-sdk-provider": "workspace:*"
|
||||
tsdown: "npm:^0.12.9"
|
||||
typescript: "npm:^5.0.0"
|
||||
vitest: "npm:^3.2.4"
|
||||
zod: "npm:^4.1.5"
|
||||
peerDependencies:
|
||||
"@ai-sdk/google": ^2.0.36
|
||||
"@ai-sdk/openai": ^2.0.64
|
||||
"@cherrystudio/ai-sdk-provider": ^0.1.2
|
||||
ai: ^5.0.26
|
||||
languageName: unknown
|
||||
linkType: soft
|
||||
|
||||
"@cherrystudio/ai-sdk-provider@workspace:*, @cherrystudio/ai-sdk-provider@workspace:packages/ai-sdk-provider":
|
||||
"@cherrystudio/ai-sdk-provider@workspace:packages/ai-sdk-provider":
|
||||
version: 0.0.0-use.local
|
||||
resolution: "@cherrystudio/ai-sdk-provider@workspace:packages/ai-sdk-provider"
|
||||
dependencies:
|
||||
@ -13640,10 +13640,11 @@ __metadata:
|
||||
"@ai-sdk/anthropic": "npm:^2.0.44"
|
||||
"@ai-sdk/cerebras": "npm:^1.0.31"
|
||||
"@ai-sdk/gateway": "npm:^2.0.9"
|
||||
"@ai-sdk/google": "npm:^2.0.32"
|
||||
"@ai-sdk/google-vertex": "npm:^3.0.62"
|
||||
"@ai-sdk/google": "patch:@ai-sdk/google@npm%3A2.0.36#~/.yarn/patches/@ai-sdk-google-npm-2.0.36-6f3cc06026.patch"
|
||||
"@ai-sdk/google-vertex": "npm:^3.0.68"
|
||||
"@ai-sdk/huggingface": "patch:@ai-sdk/huggingface@npm%3A0.0.8#~/.yarn/patches/@ai-sdk-huggingface-npm-0.0.8-d4d0aaac93.patch"
|
||||
"@ai-sdk/mistral": "npm:^2.0.23"
|
||||
"@ai-sdk/openai": "patch:@ai-sdk/openai@npm%3A2.0.64#~/.yarn/patches/@ai-sdk-openai-npm-2.0.64-48f99f5bf3.patch"
|
||||
"@ai-sdk/perplexity": "npm:^2.0.17"
|
||||
"@ant-design/v5-patch-for-react-19": "npm:^1.0.3"
|
||||
"@anthropic-ai/claude-agent-sdk": "patch:@anthropic-ai/claude-agent-sdk@npm%3A0.1.30#~/.yarn/patches/@anthropic-ai-claude-agent-sdk-npm-0.1.30-b50a299674.patch"
|
||||
@ -13653,7 +13654,7 @@ __metadata:
|
||||
"@aws-sdk/client-bedrock-runtime": "npm:^3.910.0"
|
||||
"@aws-sdk/client-s3": "npm:^3.910.0"
|
||||
"@biomejs/biome": "npm:2.2.4"
|
||||
"@cherrystudio/ai-core": "workspace:^1.0.0-alpha.18"
|
||||
"@cherrystudio/ai-core": "workspace:^1.0.9"
|
||||
"@cherrystudio/embedjs": "npm:^0.1.31"
|
||||
"@cherrystudio/embedjs-libsql": "npm:^0.1.31"
|
||||
"@cherrystudio/embedjs-loader-csv": "npm:^0.1.31"
|
||||
@ -13808,6 +13809,7 @@ __metadata:
|
||||
electron-window-state: "npm:^5.0.3"
|
||||
emittery: "npm:^1.0.3"
|
||||
emoji-picker-element: "npm:^1.22.1"
|
||||
emoji-picker-element-data: "npm:^1"
|
||||
epub: "patch:epub@npm%3A1.3.0#~/.yarn/patches/epub-npm-1.3.0-8325494ffe.patch"
|
||||
eslint: "npm:^9.22.0"
|
||||
eslint-plugin-import-zod: "npm:^1.2.0"
|
||||
@ -17558,6 +17560,13 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"emoji-picker-element-data@npm:^1":
|
||||
version: 1.8.0
|
||||
resolution: "emoji-picker-element-data@npm:1.8.0"
|
||||
checksum: 10c0/c8976b636205a0cc90d2690859a1193add71a948dadf743962b47c338a4c3715768404d0ccbc02156608b44abf41f3e1d51756e06f1bbed9d164dd4cb1752103
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"emoji-picker-element@npm:^1.22.1":
|
||||
version: 1.26.3
|
||||
resolution: "emoji-picker-element@npm:1.26.3"
|
||||
|
||||
Loading…
Reference in New Issue
Block a user