diff --git a/.yarn/patches/@ai-sdk-google-npm-2.0.31-b0de047210.patch b/.yarn/patches/@ai-sdk-google-npm-2.0.31-b0de047210.patch deleted file mode 100644 index 75c418e591..0000000000 --- a/.yarn/patches/@ai-sdk-google-npm-2.0.31-b0de047210.patch +++ /dev/null @@ -1,26 +0,0 @@ -diff --git a/dist/index.js b/dist/index.js -index ff305b112779b718f21a636a27b1196125a332d9..cf32ff5086d4d9e56f8fe90c98724559083bafc3 100644 ---- a/dist/index.js -+++ b/dist/index.js -@@ -471,7 +471,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) { - - // src/get-model-path.ts - function getModelPath(modelId) { -- return modelId.includes("/") ? modelId : `models/${modelId}`; -+ return modelId.includes("models/") ? modelId : `models/${modelId}`; - } - - // src/google-generative-ai-options.ts -diff --git a/dist/index.mjs b/dist/index.mjs -index 57659290f1cec74878a385626ad75b2a4d5cd3fc..d04e5927ec3725b6ffdb80868bfa1b5a48849537 100644 ---- a/dist/index.mjs -+++ b/dist/index.mjs -@@ -477,7 +477,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) { - - // src/get-model-path.ts - function getModelPath(modelId) { -- return modelId.includes("/") ? modelId : `models/${modelId}`; -+ return modelId.includes("models/") ? modelId : `models/${modelId}`; - } - - // src/google-generative-ai-options.ts diff --git a/.yarn/patches/@ai-sdk-google-npm-2.0.36-6f3cc06026.patch b/.yarn/patches/@ai-sdk-google-npm-2.0.36-6f3cc06026.patch new file mode 100644 index 0000000000..18570d5ced --- /dev/null +++ b/.yarn/patches/@ai-sdk-google-npm-2.0.36-6f3cc06026.patch @@ -0,0 +1,152 @@ +diff --git a/dist/index.js b/dist/index.js +index c2ef089c42e13a8ee4a833899a415564130e5d79..75efa7baafb0f019fb44dd50dec1641eee8879e7 100644 +--- a/dist/index.js ++++ b/dist/index.js +@@ -471,7 +471,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) { + + // src/get-model-path.ts + function getModelPath(modelId) { +- return modelId.includes("/") ? modelId : `models/${modelId}`; ++ return modelId.includes("models/") ? modelId : `models/${modelId}`; + } + + // src/google-generative-ai-options.ts +diff --git a/dist/index.mjs b/dist/index.mjs +index d75c0cc13c41192408c1f3f2d29d76a7bffa6268..ada730b8cb97d9b7d4cb32883a1d1ff416404d9b 100644 +--- a/dist/index.mjs ++++ b/dist/index.mjs +@@ -477,7 +477,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) { + + // src/get-model-path.ts + function getModelPath(modelId) { +- return modelId.includes("/") ? modelId : `models/${modelId}`; ++ return modelId.includes("models/") ? modelId : `models/${modelId}`; + } + + // src/google-generative-ai-options.ts +diff --git a/dist/internal/index.js b/dist/internal/index.js +index 277cac8dc734bea2fb4f3e9a225986b402b24f48..bb704cd79e602eb8b0cee1889e42497d59ccdb7a 100644 +--- a/dist/internal/index.js ++++ b/dist/internal/index.js +@@ -432,7 +432,15 @@ function prepareTools({ + var _a; + tools = (tools == null ? void 0 : tools.length) ? tools : void 0; + const toolWarnings = []; +- const isGemini2 = modelId.includes("gemini-2"); ++ // These changes could be safely removed when @ai-sdk/google v3 released. ++ const isLatest = ( ++ [ ++ 'gemini-flash-latest', ++ 'gemini-flash-lite-latest', ++ 'gemini-pro-latest', ++ ] ++ ).some(id => id === modelId); ++ const isGemini2OrNewer = modelId.includes("gemini-2") || modelId.includes("gemini-3") || isLatest; + const supportsDynamicRetrieval = modelId.includes("gemini-1.5-flash") && !modelId.includes("-8b"); + const supportsFileSearch = modelId.includes("gemini-2.5"); + if (tools == null) { +@@ -458,7 +466,7 @@ function prepareTools({ + providerDefinedTools.forEach((tool) => { + switch (tool.id) { + case "google.google_search": +- if (isGemini2) { ++ if (isGemini2OrNewer) { + googleTools2.push({ googleSearch: {} }); + } else if (supportsDynamicRetrieval) { + googleTools2.push({ +@@ -474,7 +482,7 @@ function prepareTools({ + } + break; + case "google.url_context": +- if (isGemini2) { ++ if (isGemini2OrNewer) { + googleTools2.push({ urlContext: {} }); + } else { + toolWarnings.push({ +@@ -485,7 +493,7 @@ function prepareTools({ + } + break; + case "google.code_execution": +- if (isGemini2) { ++ if (isGemini2OrNewer) { + googleTools2.push({ codeExecution: {} }); + } else { + toolWarnings.push({ +@@ -507,7 +515,7 @@ function prepareTools({ + } + break; + case "google.vertex_rag_store": +- if (isGemini2) { ++ if (isGemini2OrNewer) { + googleTools2.push({ + retrieval: { + vertex_rag_store: { +diff --git a/dist/internal/index.mjs b/dist/internal/index.mjs +index 03b7cc591be9b58bcc2e775a96740d9f98862a10..347d2c12e1cee79f0f8bb258f3844fb0522a6485 100644 +--- a/dist/internal/index.mjs ++++ b/dist/internal/index.mjs +@@ -424,7 +424,15 @@ function prepareTools({ + var _a; + tools = (tools == null ? void 0 : tools.length) ? tools : void 0; + const toolWarnings = []; +- const isGemini2 = modelId.includes("gemini-2"); ++ // These changes could be safely removed when @ai-sdk/google v3 released. ++ const isLatest = ( ++ [ ++ 'gemini-flash-latest', ++ 'gemini-flash-lite-latest', ++ 'gemini-pro-latest', ++ ] ++ ).some(id => id === modelId); ++ const isGemini2OrNewer = modelId.includes("gemini-2") || modelId.includes("gemini-3") || isLatest; + const supportsDynamicRetrieval = modelId.includes("gemini-1.5-flash") && !modelId.includes("-8b"); + const supportsFileSearch = modelId.includes("gemini-2.5"); + if (tools == null) { +@@ -450,7 +458,7 @@ function prepareTools({ + providerDefinedTools.forEach((tool) => { + switch (tool.id) { + case "google.google_search": +- if (isGemini2) { ++ if (isGemini2OrNewer) { + googleTools2.push({ googleSearch: {} }); + } else if (supportsDynamicRetrieval) { + googleTools2.push({ +@@ -466,7 +474,7 @@ function prepareTools({ + } + break; + case "google.url_context": +- if (isGemini2) { ++ if (isGemini2OrNewer) { + googleTools2.push({ urlContext: {} }); + } else { + toolWarnings.push({ +@@ -477,7 +485,7 @@ function prepareTools({ + } + break; + case "google.code_execution": +- if (isGemini2) { ++ if (isGemini2OrNewer) { + googleTools2.push({ codeExecution: {} }); + } else { + toolWarnings.push({ +@@ -499,7 +507,7 @@ function prepareTools({ + } + break; + case "google.vertex_rag_store": +- if (isGemini2) { ++ if (isGemini2OrNewer) { + googleTools2.push({ + retrieval: { + vertex_rag_store: { +@@ -1434,9 +1442,7 @@ var googleTools = { + vertexRagStore + }; + export { +- GoogleGenerativeAILanguageModel, + getGroundingMetadataSchema, +- getUrlContextMetadataSchema, +- googleTools ++ getUrlContextMetadataSchema, GoogleGenerativeAILanguageModel, googleTools + }; + //# sourceMappingURL=index.mjs.map +\ No newline at end of file diff --git a/package.json b/package.json index ea14d7d5f5..ceb0cbf3ac 100644 --- a/package.json +++ b/package.json @@ -74,9 +74,10 @@ "format:check": "biome format && biome lint", "prepare": "git config blame.ignoreRevsFile .git-blame-ignore-revs && husky", "claude": "dotenv -e .env -- claude", - "release:aicore:alpha": "yarn workspace @cherrystudio/ai-core version prerelease --immediate && yarn workspace @cherrystudio/ai-core npm publish --tag alpha --access public", - "release:aicore:beta": "yarn workspace @cherrystudio/ai-core version prerelease --immediate && yarn workspace @cherrystudio/ai-core npm publish --tag beta --access public", - "release:aicore": "yarn workspace @cherrystudio/ai-core version patch --immediate && yarn workspace @cherrystudio/ai-core npm publish --access public" + "release:aicore:alpha": "yarn workspace @cherrystudio/ai-core version prerelease --preid alpha --immediate && yarn workspace @cherrystudio/ai-core build && yarn workspace @cherrystudio/ai-core npm publish --tag alpha --access public", + "release:aicore:beta": "yarn workspace @cherrystudio/ai-core version prerelease --preid beta --immediate && yarn workspace @cherrystudio/ai-core build && yarn workspace @cherrystudio/ai-core npm publish --tag beta --access public", + "release:aicore": "yarn workspace @cherrystudio/ai-core version patch --immediate && yarn workspace @cherrystudio/ai-core build && yarn workspace @cherrystudio/ai-core npm publish --access public", + "release:ai-sdk-provider": "yarn workspace @cherrystudio/ai-sdk-provider version patch --immediate && yarn workspace @cherrystudio/ai-sdk-provider build && yarn workspace @cherrystudio/ai-sdk-provider npm publish --access public" }, "dependencies": { "@anthropic-ai/claude-agent-sdk": "patch:@anthropic-ai/claude-agent-sdk@npm%3A0.1.30#~/.yarn/patches/@anthropic-ai-claude-agent-sdk-npm-0.1.30-b50a299674.patch", @@ -85,6 +86,7 @@ "@napi-rs/system-ocr": "patch:@napi-rs/system-ocr@npm%3A1.0.2#~/.yarn/patches/@napi-rs-system-ocr-npm-1.0.2-59e7a78e8b.patch", "@paymoapp/electron-shutdown-handler": "^1.1.2", "@strongtz/win32-arm64-msvc": "^0.4.7", + "emoji-picker-element-data": "^1", "express": "^5.1.0", "font-list": "^2.0.0", "graceful-fs": "^4.2.11", @@ -111,10 +113,11 @@ "@ai-sdk/anthropic": "^2.0.44", "@ai-sdk/cerebras": "^1.0.31", "@ai-sdk/gateway": "^2.0.9", - "@ai-sdk/google": "^2.0.32", - "@ai-sdk/google-vertex": "^3.0.62", + "@ai-sdk/google": "patch:@ai-sdk/google@npm%3A2.0.36#~/.yarn/patches/@ai-sdk-google-npm-2.0.36-6f3cc06026.patch", + "@ai-sdk/google-vertex": "^3.0.68", "@ai-sdk/huggingface": "patch:@ai-sdk/huggingface@npm%3A0.0.8#~/.yarn/patches/@ai-sdk-huggingface-npm-0.0.8-d4d0aaac93.patch", "@ai-sdk/mistral": "^2.0.23", + "@ai-sdk/openai": "patch:@ai-sdk/openai@npm%3A2.0.64#~/.yarn/patches/@ai-sdk-openai-npm-2.0.64-48f99f5bf3.patch", "@ai-sdk/perplexity": "^2.0.17", "@ant-design/v5-patch-for-react-19": "^1.0.3", "@anthropic-ai/sdk": "^0.41.0", @@ -123,7 +126,7 @@ "@aws-sdk/client-bedrock-runtime": "^3.910.0", "@aws-sdk/client-s3": "^3.910.0", "@biomejs/biome": "2.2.4", - "@cherrystudio/ai-core": "workspace:^1.0.0-alpha.18", + "@cherrystudio/ai-core": "workspace:^1.0.9", "@cherrystudio/embedjs": "^0.1.31", "@cherrystudio/embedjs-libsql": "^0.1.31", "@cherrystudio/embedjs-loader-csv": "^0.1.31", @@ -410,7 +413,7 @@ "@langchain/openai@npm:>=0.2.0 <0.7.0": "patch:@langchain/openai@npm%3A1.0.0#~/.yarn/patches/@langchain-openai-npm-1.0.0-474d0ad9d4.patch", "@ai-sdk/openai@npm:2.0.64": "patch:@ai-sdk/openai@npm%3A2.0.64#~/.yarn/patches/@ai-sdk-openai-npm-2.0.64-48f99f5bf3.patch", "@ai-sdk/openai@npm:^2.0.42": "patch:@ai-sdk/openai@npm%3A2.0.64#~/.yarn/patches/@ai-sdk-openai-npm-2.0.64-48f99f5bf3.patch", - "@ai-sdk/google@npm:2.0.31": "patch:@ai-sdk/google@npm%3A2.0.31#~/.yarn/patches/@ai-sdk-google-npm-2.0.31-b0de047210.patch" + "@ai-sdk/google@npm:2.0.36": "patch:@ai-sdk/google@npm%3A2.0.36#~/.yarn/patches/@ai-sdk-google-npm-2.0.36-6f3cc06026.patch" }, "packageManager": "yarn@4.9.1", "lint-staged": { diff --git a/packages/ai-sdk-provider/package.json b/packages/ai-sdk-provider/package.json index fd0aac2643..bf509ee963 100644 --- a/packages/ai-sdk-provider/package.json +++ b/packages/ai-sdk-provider/package.json @@ -1,6 +1,6 @@ { "name": "@cherrystudio/ai-sdk-provider", - "version": "0.1.0", + "version": "0.1.2", "description": "Cherry Studio AI SDK provider bundle with CherryIN routing.", "keywords": [ "ai-sdk", diff --git a/packages/aiCore/README.md b/packages/aiCore/README.md index 4ca5ea6640..1380019094 100644 --- a/packages/aiCore/README.md +++ b/packages/aiCore/README.md @@ -71,7 +71,7 @@ Cherry Studio AI Core 是一个基于 Vercel AI SDK 的统一 AI Provider 接口 ## 安装 ```bash -npm install @cherrystudio/ai-core ai +npm install @cherrystudio/ai-core ai @ai-sdk/google @ai-sdk/openai ``` ### React Native diff --git a/packages/aiCore/package.json b/packages/aiCore/package.json index bb673392a2..fbbea52d40 100644 --- a/packages/aiCore/package.json +++ b/packages/aiCore/package.json @@ -1,6 +1,6 @@ { "name": "@cherrystudio/ai-core", - "version": "1.0.1", + "version": "1.0.9", "description": "Cherry Studio AI Core - Unified AI Provider Interface Based on Vercel AI SDK", "main": "dist/index.js", "module": "dist/index.mjs", @@ -33,19 +33,19 @@ }, "homepage": "https://github.com/CherryHQ/cherry-studio#readme", "peerDependencies": { + "@ai-sdk/google": "^2.0.36", + "@ai-sdk/openai": "^2.0.64", + "@cherrystudio/ai-sdk-provider": "^0.1.2", "ai": "^5.0.26" }, "dependencies": { "@ai-sdk/anthropic": "^2.0.43", "@ai-sdk/azure": "^2.0.66", "@ai-sdk/deepseek": "^1.0.27", - "@ai-sdk/google": "patch:@ai-sdk/google@npm%3A2.0.31#~/.yarn/patches/@ai-sdk-google-npm-2.0.31-b0de047210.patch", - "@ai-sdk/openai": "patch:@ai-sdk/openai@npm%3A2.0.64#~/.yarn/patches/@ai-sdk-openai-npm-2.0.64-48f99f5bf3.patch", "@ai-sdk/openai-compatible": "^1.0.26", "@ai-sdk/provider": "^2.0.0", "@ai-sdk/provider-utils": "^3.0.16", "@ai-sdk/xai": "^2.0.31", - "@cherrystudio/ai-sdk-provider": "workspace:*", "zod": "^4.1.5" }, "devDependencies": { diff --git a/packages/aiCore/src/core/plugins/built-in/index.ts b/packages/aiCore/src/core/plugins/built-in/index.ts index 1f8916b09a..d7f35d0cd1 100644 --- a/packages/aiCore/src/core/plugins/built-in/index.ts +++ b/packages/aiCore/src/core/plugins/built-in/index.ts @@ -4,12 +4,7 @@ */ export const BUILT_IN_PLUGIN_PREFIX = 'built-in:' -export { googleToolsPlugin } from './googleToolsPlugin' -export { createLoggingPlugin } from './logging' -export { createPromptToolUsePlugin } from './toolUsePlugin/promptToolUsePlugin' -export type { - PromptToolUseConfig, - ToolUseRequestContext, - ToolUseResult -} from './toolUsePlugin/type' -export { webSearchPlugin, type WebSearchPluginConfig } from './webSearchPlugin' +export * from './googleToolsPlugin' +export * from './toolUsePlugin/promptToolUsePlugin' +export * from './toolUsePlugin/type' +export * from './webSearchPlugin' diff --git a/packages/aiCore/src/core/plugins/built-in/webSearchPlugin/index.ts b/packages/aiCore/src/core/plugins/built-in/webSearchPlugin/index.ts index 23ea952323..75692cdf36 100644 --- a/packages/aiCore/src/core/plugins/built-in/webSearchPlugin/index.ts +++ b/packages/aiCore/src/core/plugins/built-in/webSearchPlugin/index.ts @@ -32,7 +32,7 @@ export const webSearchPlugin = (config: WebSearchPluginConfig = DEFAULT_WEB_SEAR }) // 导出类型定义供开发者使用 -export type { WebSearchPluginConfig, WebSearchToolOutputSchema } from './helper' +export * from './helper' // 默认导出 export default webSearchPlugin diff --git a/packages/aiCore/src/core/providers/index.ts b/packages/aiCore/src/core/providers/index.ts index 3ac445cb22..b9ebd6f682 100644 --- a/packages/aiCore/src/core/providers/index.ts +++ b/packages/aiCore/src/core/providers/index.ts @@ -44,7 +44,7 @@ export { // ==================== 基础数据和类型 ==================== // 基础Provider数据源 -export { baseProviderIds, baseProviders } from './schemas' +export { baseProviderIds, baseProviders, isBaseProvider } from './schemas' // 类型定义和Schema export type { diff --git a/packages/aiCore/src/core/providers/schemas.ts b/packages/aiCore/src/core/providers/schemas.ts index 778b1b705a..43a370af9b 100644 --- a/packages/aiCore/src/core/providers/schemas.ts +++ b/packages/aiCore/src/core/providers/schemas.ts @@ -7,7 +7,6 @@ import { createAzure } from '@ai-sdk/azure' import { type AzureOpenAIProviderSettings } from '@ai-sdk/azure' import { createDeepSeek } from '@ai-sdk/deepseek' import { createGoogleGenerativeAI } from '@ai-sdk/google' -import { createHuggingFace } from '@ai-sdk/huggingface' import { createOpenAI, type OpenAIProviderSettings } from '@ai-sdk/openai' import { createOpenAICompatible } from '@ai-sdk/openai-compatible' import type { LanguageModelV2 } from '@ai-sdk/provider' @@ -33,8 +32,7 @@ export const baseProviderIds = [ 'deepseek', 'openrouter', 'cherryin', - 'cherryin-chat', - 'huggingface' + 'cherryin-chat' ] as const /** @@ -158,12 +156,6 @@ export const baseProviders = [ }) }, supportsImageGeneration: true - }, - { - id: 'huggingface', - name: 'HuggingFace', - creator: createHuggingFace, - supportsImageGeneration: true } ] as const satisfies BaseProvider[] diff --git a/src/main/apiServer/routes/models.ts b/src/main/apiServer/routes/models.ts index 8481e1ea59..d776d5ea91 100644 --- a/src/main/apiServer/routes/models.ts +++ b/src/main/apiServer/routes/models.ts @@ -104,12 +104,6 @@ const router = express logger.warn('No models available from providers', { filter }) } - logger.info('Models response ready', { - filter, - total: response.total, - modelIds: response.data.map((m) => m.id) - }) - return res.json(response satisfies ApiModelsResponse) } catch (error: any) { logger.error('Error fetching models', { error }) diff --git a/src/main/apiServer/services/models.ts b/src/main/apiServer/services/models.ts index a32d6d37dc..52f0db857f 100644 --- a/src/main/apiServer/services/models.ts +++ b/src/main/apiServer/services/models.ts @@ -32,7 +32,7 @@ export class ModelsService { for (const model of models) { const provider = providers.find((p) => p.id === model.provider) - logger.debug(`Processing model ${model.id}`) + // logger.debug(`Processing model ${model.id}`) if (!provider) { logger.debug(`Skipping model ${model.id} . Reason: Provider not found.`) continue diff --git a/src/main/services/agents/services/claudecode/__tests__/transform.test.ts b/src/main/services/agents/services/claudecode/__tests__/transform.test.ts index 8f8c1df038..2565f5e605 100644 --- a/src/main/services/agents/services/claudecode/__tests__/transform.test.ts +++ b/src/main/services/agents/services/claudecode/__tests__/transform.test.ts @@ -21,11 +21,16 @@ describe('stripLocalCommandTags', () => { 'line1\nkeep\nError' expect(stripLocalCommandTags(input)).toBe('line1\nkeep\nError') }) + + it('if no tags present, returns original string', () => { + const input = 'just some normal text' + expect(stripLocalCommandTags(input)).toBe(input) + }) }) describe('Claude → AiSDK transform', () => { it('handles tool call streaming lifecycle', () => { - const state = new ClaudeStreamState() + const state = new ClaudeStreamState({ agentSessionId: baseStreamMetadata.session_id }) const parts: ReturnType[number][] = [] const messages: SDKMessage[] = [ @@ -182,14 +187,119 @@ describe('Claude → AiSDK transform', () => { (typeof parts)[number], { type: 'tool-result' } > - expect(toolResult.toolCallId).toBe('tool-1') + expect(toolResult.toolCallId).toBe('session-123:tool-1') expect(toolResult.toolName).toBe('Bash') expect(toolResult.input).toEqual({ command: 'ls' }) expect(toolResult.output).toBe('ok') }) + it('handles tool calls without streaming events (no content_block_start/stop)', () => { + const state = new ClaudeStreamState({ agentSessionId: '12344' }) + const parts: ReturnType[number][] = [] + + const messages: SDKMessage[] = [ + { + ...baseStreamMetadata, + type: 'assistant', + uuid: uuid(20), + message: { + id: 'msg-tool-no-stream', + type: 'message', + role: 'assistant', + model: 'claude-test', + content: [ + { + type: 'tool_use', + id: 'tool-read', + name: 'Read', + input: { file_path: '/test.txt' } + }, + { + type: 'tool_use', + id: 'tool-bash', + name: 'Bash', + input: { command: 'ls -la' } + } + ], + stop_reason: 'tool_use', + stop_sequence: null, + usage: { + input_tokens: 10, + output_tokens: 20 + } + } + } as unknown as SDKMessage, + { + ...baseStreamMetadata, + type: 'user', + uuid: uuid(21), + message: { + role: 'user', + content: [ + { + type: 'tool_result', + tool_use_id: 'tool-read', + content: 'file contents', + is_error: false + } + ] + } + } as SDKMessage, + { + ...baseStreamMetadata, + type: 'user', + uuid: uuid(22), + message: { + role: 'user', + content: [ + { + type: 'tool_result', + tool_use_id: 'tool-bash', + content: 'total 42\n...', + is_error: false + } + ] + } + } as SDKMessage + ] + + for (const message of messages) { + const transformed = transformSDKMessageToStreamParts(message, state) + parts.push(...transformed) + } + + const types = parts.map((part) => part.type) + expect(types).toEqual(['tool-call', 'tool-call', 'tool-result', 'tool-result']) + + const toolCalls = parts.filter((part) => part.type === 'tool-call') as Extract< + (typeof parts)[number], + { type: 'tool-call' } + >[] + expect(toolCalls).toHaveLength(2) + expect(toolCalls[0].toolName).toBe('Read') + expect(toolCalls[0].toolCallId).toBe('12344:tool-read') + expect(toolCalls[1].toolName).toBe('Bash') + expect(toolCalls[1].toolCallId).toBe('12344:tool-bash') + + const toolResults = parts.filter((part) => part.type === 'tool-result') as Extract< + (typeof parts)[number], + { type: 'tool-result' } + >[] + expect(toolResults).toHaveLength(2) + // This is the key assertion - toolName should NOT be 'unknown' + expect(toolResults[0].toolName).toBe('Read') + expect(toolResults[0].toolCallId).toBe('12344:tool-read') + expect(toolResults[0].input).toEqual({ file_path: '/test.txt' }) + expect(toolResults[0].output).toBe('file contents') + + expect(toolResults[1].toolName).toBe('Bash') + expect(toolResults[1].toolCallId).toBe('12344:tool-bash') + expect(toolResults[1].input).toEqual({ command: 'ls -la' }) + expect(toolResults[1].output).toBe('total 42\n...') + }) + it('handles streaming text completion', () => { - const state = new ClaudeStreamState() + const state = new ClaudeStreamState({ agentSessionId: baseStreamMetadata.session_id }) const parts: ReturnType[number][] = [] const messages: SDKMessage[] = [ @@ -300,4 +410,87 @@ describe('Claude → AiSDK transform', () => { expect(finishStep.finishReason).toBe('stop') expect(finishStep.usage).toEqual({ inputTokens: 2, outputTokens: 4, totalTokens: 6 }) }) + + it('emits fallback text when Claude sends a snapshot instead of deltas', () => { + const state = new ClaudeStreamState({ agentSessionId: '12344' }) + const parts: ReturnType[number][] = [] + + const messages: SDKMessage[] = [ + { + ...baseStreamMetadata, + type: 'stream_event', + uuid: uuid(30), + event: { + type: 'message_start', + message: { + id: 'msg-fallback', + type: 'message', + role: 'assistant', + model: 'claude-test', + content: [], + stop_reason: null, + stop_sequence: null, + usage: {} + } + } + } as unknown as SDKMessage, + { + ...baseStreamMetadata, + type: 'stream_event', + uuid: uuid(31), + event: { + type: 'content_block_start', + index: 0, + content_block: { + type: 'text', + text: '' + } + } + } as unknown as SDKMessage, + { + ...baseStreamMetadata, + type: 'assistant', + uuid: uuid(32), + message: { + id: 'msg-fallback-content', + type: 'message', + role: 'assistant', + model: 'claude-test', + content: [ + { + type: 'text', + text: 'Final answer without streaming deltas.' + } + ], + stop_reason: 'end_turn', + stop_sequence: null, + usage: { + input_tokens: 3, + output_tokens: 7 + } + } + } as unknown as SDKMessage + ] + + for (const message of messages) { + const transformed = transformSDKMessageToStreamParts(message, state) + parts.push(...transformed) + } + + const types = parts.map((part) => part.type) + expect(types).toEqual(['start-step', 'text-start', 'text-delta', 'text-end', 'finish-step']) + + const delta = parts.find((part) => part.type === 'text-delta') as Extract< + (typeof parts)[number], + { type: 'text-delta' } + > + expect(delta.text).toBe('Final answer without streaming deltas.') + + const finish = parts.find((part) => part.type === 'finish-step') as Extract< + (typeof parts)[number], + { type: 'finish-step' } + > + expect(finish.usage).toEqual({ inputTokens: 3, outputTokens: 7, totalTokens: 10 }) + expect(finish.finishReason).toBe('stop') + }) }) diff --git a/src/main/services/agents/services/claudecode/claude-stream-state.ts b/src/main/services/agents/services/claudecode/claude-stream-state.ts index 078f048ce8..30b5790c82 100644 --- a/src/main/services/agents/services/claudecode/claude-stream-state.ts +++ b/src/main/services/agents/services/claudecode/claude-stream-state.ts @@ -10,8 +10,21 @@ * Every Claude turn gets its own instance. `resetStep` should be invoked once the finish event has * been emitted to avoid leaking state into the next turn. */ +import { loggerService } from '@logger' import type { FinishReason, LanguageModelUsage, ProviderMetadata } from 'ai' +/** + * Builds a namespaced tool call ID by combining session ID with raw tool call ID. + * This ensures tool calls from different sessions don't conflict even if they have + * the same raw ID from the SDK. + * + * @param sessionId - The agent session ID + * @param rawToolCallId - The raw tool call ID from SDK (e.g., "WebFetch_0") + */ +export function buildNamespacedToolCallId(sessionId: string, rawToolCallId: string): string { + return `${sessionId}:${rawToolCallId}` +} + /** * Shared fields for every block that Claude can stream (text, reasoning, tool). */ @@ -34,6 +47,7 @@ type ReasoningBlockState = BaseBlockState & { type ToolBlockState = BaseBlockState & { kind: 'tool' toolCallId: string + rawToolCallId: string toolName: string inputBuffer: string providerMetadata?: ProviderMetadata @@ -48,12 +62,17 @@ type PendingUsageState = { } type PendingToolCall = { + rawToolCallId: string toolCallId: string toolName: string input: unknown providerMetadata?: ProviderMetadata } +type ClaudeStreamStateOptions = { + agentSessionId: string +} + /** * Tracks the lifecycle of Claude streaming blocks (text, thinking, tool calls) * across individual websocket events. The transformer relies on this class to @@ -61,12 +80,20 @@ type PendingToolCall = { * usage/finish metadata once Anthropic closes a message. */ export class ClaudeStreamState { + private logger + private readonly agentSessionId: string private blocksByIndex = new Map() - private toolIndexById = new Map() + private toolIndexByNamespacedId = new Map() private pendingUsage: PendingUsageState = {} private pendingToolCalls = new Map() private stepActive = false + constructor(options: ClaudeStreamStateOptions) { + this.logger = loggerService.withContext('ClaudeStreamState') + this.agentSessionId = options.agentSessionId + this.logger.silly('ClaudeStreamState', options) + } + /** Marks the beginning of a new AiSDK step. */ beginStep(): void { this.stepActive = true @@ -104,19 +131,21 @@ export class ClaudeStreamState { /** Caches tool metadata so subsequent input deltas and results can find it. */ openToolBlock( index: number, - params: { toolCallId: string; toolName: string; providerMetadata?: ProviderMetadata } + params: { rawToolCallId: string; toolName: string; providerMetadata?: ProviderMetadata } ): ToolBlockState { + const toolCallId = buildNamespacedToolCallId(this.agentSessionId, params.rawToolCallId) const block: ToolBlockState = { kind: 'tool', - id: params.toolCallId, + id: toolCallId, index, - toolCallId: params.toolCallId, + toolCallId, + rawToolCallId: params.rawToolCallId, toolName: params.toolName, inputBuffer: '', providerMetadata: params.providerMetadata } this.blocksByIndex.set(index, block) - this.toolIndexById.set(params.toolCallId, index) + this.toolIndexByNamespacedId.set(toolCallId, index) return block } @@ -124,14 +153,32 @@ export class ClaudeStreamState { return this.blocksByIndex.get(index) } + getFirstOpenTextBlock(): TextBlockState | undefined { + const candidates: TextBlockState[] = [] + for (const block of this.blocksByIndex.values()) { + if (block.kind === 'text') { + candidates.push(block) + } + } + if (candidates.length === 0) { + return undefined + } + candidates.sort((a, b) => a.index - b.index) + return candidates[0] + } + getToolBlockById(toolCallId: string): ToolBlockState | undefined { - const index = this.toolIndexById.get(toolCallId) + const index = this.toolIndexByNamespacedId.get(toolCallId) if (index === undefined) return undefined const block = this.blocksByIndex.get(index) if (!block || block.kind !== 'tool') return undefined return block } + getToolBlockByRawId(rawToolCallId: string): ToolBlockState | undefined { + return this.getToolBlockById(buildNamespacedToolCallId(this.agentSessionId, rawToolCallId)) + } + /** Appends streamed text to a text block, returning the updated state when present. */ appendTextDelta(index: number, text: string): TextBlockState | undefined { const block = this.blocksByIndex.get(index) @@ -158,10 +205,12 @@ export class ClaudeStreamState { /** Records a tool call to be consumed once its result arrives from the user. */ registerToolCall( - toolCallId: string, + rawToolCallId: string, payload: { toolName: string; input: unknown; providerMetadata?: ProviderMetadata } ): void { - this.pendingToolCalls.set(toolCallId, { + const toolCallId = buildNamespacedToolCallId(this.agentSessionId, rawToolCallId) + this.pendingToolCalls.set(rawToolCallId, { + rawToolCallId, toolCallId, toolName: payload.toolName, input: payload.input, @@ -170,10 +219,10 @@ export class ClaudeStreamState { } /** Retrieves and clears the buffered tool call metadata for the given id. */ - consumePendingToolCall(toolCallId: string): PendingToolCall | undefined { - const entry = this.pendingToolCalls.get(toolCallId) + consumePendingToolCall(rawToolCallId: string): PendingToolCall | undefined { + const entry = this.pendingToolCalls.get(rawToolCallId) if (entry) { - this.pendingToolCalls.delete(toolCallId) + this.pendingToolCalls.delete(rawToolCallId) } return entry } @@ -182,13 +231,13 @@ export class ClaudeStreamState { * Persists the final input payload for a tool block once the provider signals * completion so that downstream tool results can reference the original call. */ - completeToolBlock(toolCallId: string, input: unknown, providerMetadata?: ProviderMetadata): void { + completeToolBlock(toolCallId: string, toolName: string, input: unknown, providerMetadata?: ProviderMetadata): void { + const block = this.getToolBlockByRawId(toolCallId) this.registerToolCall(toolCallId, { - toolName: this.getToolBlockById(toolCallId)?.toolName ?? 'unknown', + toolName, input, providerMetadata }) - const block = this.getToolBlockById(toolCallId) if (block) { block.resolvedInput = input } @@ -200,7 +249,7 @@ export class ClaudeStreamState { if (!block) return undefined this.blocksByIndex.delete(index) if (block.kind === 'tool') { - this.toolIndexById.delete(block.toolCallId) + this.toolIndexByNamespacedId.delete(block.toolCallId) } return block } @@ -227,7 +276,7 @@ export class ClaudeStreamState { /** Drops cached block metadata for the currently active message. */ resetBlocks(): void { this.blocksByIndex.clear() - this.toolIndexById.clear() + this.toolIndexByNamespacedId.clear() } /** Resets the entire step lifecycle after emitting a terminal frame. */ @@ -236,6 +285,10 @@ export class ClaudeStreamState { this.resetPendingUsage() this.stepActive = false } + + getNamespacedToolCallId(rawToolCallId: string): string { + return buildNamespacedToolCallId(this.agentSessionId, rawToolCallId) + } } export type { PendingToolCall } diff --git a/src/main/services/agents/services/claudecode/index.ts b/src/main/services/agents/services/claudecode/index.ts index 6405ae94ef..f912539550 100644 --- a/src/main/services/agents/services/claudecode/index.ts +++ b/src/main/services/agents/services/claudecode/index.ts @@ -19,6 +19,7 @@ import { app } from 'electron' import { agentService, type GetAgentSessionResponse } from '../..' import type { AgentServiceInterface, AgentStream, AgentStreamEvent } from '../../interfaces/AgentStreamInterface' import { sessionService } from '../SessionService' +import { buildNamespacedToolCallId } from './claude-stream-state' import { promptForToolApproval } from './tool-permissions' import { ClaudeStreamState, transformSDKMessageToStreamParts } from './transform' @@ -156,7 +157,10 @@ class ClaudeCodeService implements AgentServiceInterface { return { behavior: 'allow', updatedInput: input } } - return promptForToolApproval(toolName, input, options) + return promptForToolApproval(toolName, input, { + ...options, + toolCallId: buildNamespacedToolCallId(session.id, options.toolUseID) + }) } const subAgents: Record = {} @@ -378,7 +382,7 @@ class ClaudeCodeService implements AgentServiceInterface { const jsonOutput: SDKMessage[] = [] let hasCompleted = false const startTime = Date.now() - const streamState = new ClaudeStreamState() + const streamState = new ClaudeStreamState({ agentSessionId: sessionId }) try { for await (const message of query({ prompt: promptStream, options })) { @@ -442,23 +446,6 @@ class ClaudeCodeService implements AgentServiceInterface { } } - if (message.type === 'assistant' || message.type === 'user') { - logger.silly('claude response', { - message, - content: JSON.stringify(message.message.content) - }) - } else if (message.type === 'stream_event') { - // logger.silly('Claude stream event', { - // message, - // event: JSON.stringify(message.event) - // }) - } else { - logger.silly('Claude response', { - message, - event: JSON.stringify(message) - }) - } - const chunks = transformSDKMessageToStreamParts(message, streamState) for (const chunk of chunks) { stream.emit('data', { diff --git a/src/main/services/agents/services/claudecode/tool-permissions.ts b/src/main/services/agents/services/claudecode/tool-permissions.ts index c95f4c679e..5b50f4567e 100644 --- a/src/main/services/agents/services/claudecode/tool-permissions.ts +++ b/src/main/services/agents/services/claudecode/tool-permissions.ts @@ -37,6 +37,7 @@ type RendererPermissionRequestPayload = { requestId: string toolName: string toolId: string + toolCallId: string description?: string requiresPermissions: boolean input: Record @@ -206,10 +207,19 @@ const ensureIpcHandlersRegistered = () => { }) } +type PromptForToolApprovalOptions = { + signal: AbortSignal + suggestions?: PermissionUpdate[] + + // NOTICE: This ID is namespaced with session ID, not the raw SDK tool call ID. + // Format: `${sessionId}:${rawToolCallId}`, e.g., `session_123:WebFetch_0` + toolCallId: string +} + export async function promptForToolApproval( toolName: string, input: Record, - options?: { signal: AbortSignal; suggestions?: PermissionUpdate[] } + options: PromptForToolApprovalOptions ): Promise { if (shouldAutoApproveTools) { logger.debug('promptForToolApproval auto-approving tool for test', { @@ -245,6 +255,7 @@ export async function promptForToolApproval( logger.info('Requesting user approval for tool usage', { requestId, toolName, + toolCallId: options.toolCallId, description: toolMetadata?.description }) @@ -252,6 +263,7 @@ export async function promptForToolApproval( requestId, toolName, toolId: toolMetadata?.id ?? toolName, + toolCallId: options.toolCallId, description: toolMetadata?.description, requiresPermissions: toolMetadata?.requirePermissions ?? false, input: sanitizedInput, @@ -266,6 +278,7 @@ export async function promptForToolApproval( logger.debug('Registering tool permission request', { requestId, toolName, + toolCallId: options.toolCallId, requiresPermissions: requestPayload.requiresPermissions, timeoutMs: TOOL_APPROVAL_TIMEOUT_MS, suggestionCount: sanitizedSuggestions.length @@ -273,7 +286,11 @@ export async function promptForToolApproval( return new Promise((resolve) => { const timeout = setTimeout(() => { - logger.info('User tool permission request timed out', { requestId, toolName }) + logger.info('User tool permission request timed out', { + requestId, + toolName, + toolCallId: options.toolCallId + }) finalizeRequest(requestId, { behavior: 'deny', message: 'Timed out waiting for approval' }, 'timeout') }, TOOL_APPROVAL_TIMEOUT_MS) @@ -287,7 +304,11 @@ export async function promptForToolApproval( if (options?.signal) { const abortListener = () => { - logger.info('Tool permission request aborted before user responded', { requestId, toolName }) + logger.info('Tool permission request aborted before user responded', { + requestId, + toolName, + toolCallId: options.toolCallId + }) finalizeRequest(requestId, defaultDenyUpdate, 'aborted') } diff --git a/src/main/services/agents/services/claudecode/transform.ts b/src/main/services/agents/services/claudecode/transform.ts index 41285175b4..00be683ba8 100644 --- a/src/main/services/agents/services/claudecode/transform.ts +++ b/src/main/services/agents/services/claudecode/transform.ts @@ -110,7 +110,7 @@ const sdkMessageToProviderMetadata = (message: SDKMessage): ProviderMetadata => * blocks across calls so that incremental deltas can be correlated correctly. */ export function transformSDKMessageToStreamParts(sdkMessage: SDKMessage, state: ClaudeStreamState): AgentStreamPart[] { - logger.silly('Transforming SDKMessage', { message: sdkMessage }) + logger.silly('Transforming SDKMessage', { message: JSON.stringify(sdkMessage) }) switch (sdkMessage.type) { case 'assistant': return handleAssistantMessage(sdkMessage, state) @@ -186,14 +186,13 @@ function handleAssistantMessage( for (const block of content) { switch (block.type) { - case 'text': - if (!isStreamingActive) { - const sanitizedText = stripLocalCommandTags(block.text) - if (sanitizedText) { - textBlocks.push(sanitizedText) - } + case 'text': { + const sanitizedText = stripLocalCommandTags(block.text) + if (sanitizedText) { + textBlocks.push(sanitizedText) } break + } case 'tool_use': handleAssistantToolUse(block as ToolUseContent, providerMetadata, state, chunks) break @@ -203,7 +202,16 @@ function handleAssistantMessage( } } - if (!isStreamingActive && textBlocks.length > 0) { + if (textBlocks.length === 0) { + return chunks + } + + const combinedText = textBlocks.join('') + if (!combinedText) { + return chunks + } + + if (!isStreamingActive) { const id = message.uuid?.toString() || generateMessageId() state.beginStep() chunks.push({ @@ -219,7 +227,7 @@ function handleAssistantMessage( chunks.push({ type: 'text-delta', id, - text: textBlocks.join(''), + text: combinedText, providerMetadata }) chunks.push({ @@ -230,7 +238,27 @@ function handleAssistantMessage( return finalizeNonStreamingStep(message, state, chunks) } - return chunks + const existingTextBlock = state.getFirstOpenTextBlock() + const fallbackId = existingTextBlock?.id || message.uuid?.toString() || generateMessageId() + if (!existingTextBlock) { + chunks.push({ + type: 'text-start', + id: fallbackId, + providerMetadata + }) + } + chunks.push({ + type: 'text-delta', + id: fallbackId, + text: combinedText, + providerMetadata + }) + chunks.push({ + type: 'text-end', + id: fallbackId, + providerMetadata + }) + return finalizeNonStreamingStep(message, state, chunks) } /** @@ -243,15 +271,16 @@ function handleAssistantToolUse( state: ClaudeStreamState, chunks: AgentStreamPart[] ): void { + const toolCallId = state.getNamespacedToolCallId(block.id) chunks.push({ type: 'tool-call', - toolCallId: block.id, + toolCallId, toolName: block.name, input: block.input, providerExecuted: true, providerMetadata }) - state.completeToolBlock(block.id, block.input, providerMetadata) + state.completeToolBlock(block.id, block.name, block.input, providerMetadata) } /** @@ -331,10 +360,11 @@ function handleUserMessage( if (block.type === 'tool_result') { const toolResult = block as ToolResultContent const pendingCall = state.consumePendingToolCall(toolResult.tool_use_id) + const toolCallId = pendingCall?.toolCallId ?? state.getNamespacedToolCallId(toolResult.tool_use_id) if (toolResult.is_error) { chunks.push({ type: 'tool-error', - toolCallId: toolResult.tool_use_id, + toolCallId, toolName: pendingCall?.toolName ?? 'unknown', input: pendingCall?.input, error: toolResult.content, @@ -343,7 +373,7 @@ function handleUserMessage( } else { chunks.push({ type: 'tool-result', - toolCallId: toolResult.tool_use_id, + toolCallId, toolName: pendingCall?.toolName ?? 'unknown', input: pendingCall?.input, output: toolResult.content, @@ -457,6 +487,9 @@ function handleStreamEvent( } case 'message_stop': { + if (!state.hasActiveStep()) { + break + } const pending = state.getPendingUsage() chunks.push({ type: 'finish-step', @@ -514,7 +547,7 @@ function handleContentBlockStart( } case 'tool_use': { const block = state.openToolBlock(index, { - toolCallId: contentBlock.id, + rawToolCallId: contentBlock.id, toolName: contentBlock.name, providerMetadata }) diff --git a/src/renderer/src/aiCore/utils/options.ts b/src/renderer/src/aiCore/utils/options.ts index 128a0f5269..7f4cd33608 100644 --- a/src/renderer/src/aiCore/utils/options.ts +++ b/src/renderer/src/aiCore/utils/options.ts @@ -99,9 +99,6 @@ export function buildProviderOptions( serviceTier: serviceTierSetting } break - case 'huggingface': - providerSpecificOptions = buildOpenAIProviderOptions(assistant, model, capabilities) - break case 'anthropic': providerSpecificOptions = buildAnthropicProviderOptions(assistant, model, capabilities) break @@ -144,6 +141,9 @@ export function buildProviderOptions( case 'bedrock': providerSpecificOptions = buildBedrockProviderOptions(assistant, model, capabilities) break + case 'huggingface': + providerSpecificOptions = buildOpenAIProviderOptions(assistant, model, capabilities) + break default: // 对于其他 provider,使用通用的构建逻辑 providerSpecificOptions = { @@ -162,13 +162,17 @@ export function buildProviderOptions( ...getCustomParameters(assistant) } - const rawProviderKey = + let rawProviderKey = { 'google-vertex': 'google', 'google-vertex-anthropic': 'anthropic', 'ai-gateway': 'gateway' }[rawProviderId] || rawProviderId + if (rawProviderKey === 'cherryin') { + rawProviderKey = { gemini: 'google' }[actualProvider.type] || actualProvider.type + } + // 返回 AI Core SDK 要求的格式:{ 'providerId': providerOptions } return { [rawProviderKey]: providerSpecificOptions diff --git a/src/renderer/src/components/EmojiPicker/index.tsx b/src/renderer/src/components/EmojiPicker/index.tsx index 8ba9d3e967..9a4158d469 100644 --- a/src/renderer/src/components/EmojiPicker/index.tsx +++ b/src/renderer/src/components/EmojiPicker/index.tsx @@ -1,35 +1,120 @@ +import 'emoji-picker-element' + import TwemojiCountryFlagsWoff2 from '@renderer/assets/fonts/country-flag-fonts/TwemojiCountryFlags.woff2?url' import { useTheme } from '@renderer/context/ThemeProvider' +import type { LanguageVarious } from '@renderer/types' import { polyfillCountryFlagEmojis } from 'country-flag-emoji-polyfill' +// i18n translations from emoji-picker-element +import de from 'emoji-picker-element/i18n/de' +import en from 'emoji-picker-element/i18n/en' +import es from 'emoji-picker-element/i18n/es' +import fr from 'emoji-picker-element/i18n/fr' +import ja from 'emoji-picker-element/i18n/ja' +import pt_PT from 'emoji-picker-element/i18n/pt_PT' +import ru_RU from 'emoji-picker-element/i18n/ru_RU' +import zh_CN from 'emoji-picker-element/i18n/zh_CN' +import type Picker from 'emoji-picker-element/picker' +import type { EmojiClickEvent, NativeEmoji } from 'emoji-picker-element/shared' +// Emoji data from emoji-picker-element-data (local, no CDN) +// Using CLDR format for full multi-language search support (28 languages) +import dataDE from 'emoji-picker-element-data/de/cldr/data.json?url' +import dataEN from 'emoji-picker-element-data/en/cldr/data.json?url' +import dataES from 'emoji-picker-element-data/es/cldr/data.json?url' +import dataFR from 'emoji-picker-element-data/fr/cldr/data.json?url' +import dataJA from 'emoji-picker-element-data/ja/cldr/data.json?url' +import dataPT from 'emoji-picker-element-data/pt/cldr/data.json?url' +import dataRU from 'emoji-picker-element-data/ru/cldr/data.json?url' +import dataZH from 'emoji-picker-element-data/zh/cldr/data.json?url' +import dataZH_HANT from 'emoji-picker-element-data/zh-hant/cldr/data.json?url' import type { FC } from 'react' import { useEffect, useRef } from 'react' +import { useTranslation } from 'react-i18next' interface Props { onEmojiClick: (emoji: string) => void } +// Mapping from app locale to emoji-picker-element i18n +const i18nMap: Record = { + 'en-US': en, + 'zh-CN': zh_CN, + 'zh-TW': zh_CN, // Closest available + 'de-DE': de, + 'el-GR': en, // No Greek available, fallback to English + 'es-ES': es, + 'fr-FR': fr, + 'ja-JP': ja, + 'pt-PT': pt_PT, + 'ru-RU': ru_RU +} + +// Mapping from app locale to emoji data URL +// Using CLDR format provides native language search support for all locales +const dataSourceMap: Record = { + 'en-US': dataEN, + 'zh-CN': dataZH, + 'zh-TW': dataZH_HANT, + 'de-DE': dataDE, + 'el-GR': dataEN, // No Greek CLDR available, fallback to English + 'es-ES': dataES, + 'fr-FR': dataFR, + 'ja-JP': dataJA, + 'pt-PT': dataPT, + 'ru-RU': dataRU +} + +// Mapping from app locale to emoji-picker-element locale string +// Must match the data source locale for proper IndexedDB caching +const localeMap: Record = { + 'en-US': 'en', + 'zh-CN': 'zh', + 'zh-TW': 'zh-hant', + 'de-DE': 'de', + 'el-GR': 'en', + 'es-ES': 'es', + 'fr-FR': 'fr', + 'ja-JP': 'ja', + 'pt-PT': 'pt', + 'ru-RU': 'ru' +} + const EmojiPicker: FC = ({ onEmojiClick }) => { const { theme } = useTheme() - const ref = useRef(null) + const { i18n } = useTranslation() + const ref = useRef(null) + const currentLocale = i18n.language as LanguageVarious useEffect(() => { polyfillCountryFlagEmojis('Twemoji Mozilla', TwemojiCountryFlagsWoff2) }, []) + // Configure picker with i18n and dataSource useEffect(() => { - const refValue = ref.current + const picker = ref.current + if (picker) { + picker.i18n = i18nMap[currentLocale] || en + picker.dataSource = dataSourceMap[currentLocale] || dataEN + picker.locale = localeMap[currentLocale] || 'en' + } + }, [currentLocale]) - if (refValue) { - const handleEmojiClick = (event: any) => { + useEffect(() => { + const picker = ref.current + + if (picker) { + const handleEmojiClick = (event: EmojiClickEvent) => { event.stopPropagation() - onEmojiClick(event.detail.unicode || event.detail.emoji.unicode) + const { detail } = event + // Use detail.unicode (processed with skin tone) or fallback to emoji's unicode for native emoji + const unicode = detail.unicode || ('unicode' in detail.emoji ? (detail.emoji as NativeEmoji).unicode : '') + onEmojiClick(unicode) } // 添加事件监听器 - refValue.addEventListener('emoji-click', handleEmojiClick) + picker.addEventListener('emoji-click', handleEmojiClick) // 清理事件监听器 return () => { - refValue.removeEventListener('emoji-click', handleEmojiClick) + picker.removeEventListener('emoji-click', handleEmojiClick) } } return diff --git a/src/renderer/src/components/Popups/agent/AgentModal.tsx b/src/renderer/src/components/Popups/agent/AgentModal.tsx index d504699399..2574cbe669 100644 --- a/src/renderer/src/components/Popups/agent/AgentModal.tsx +++ b/src/renderer/src/components/Popups/agent/AgentModal.tsx @@ -1,5 +1,4 @@ import { loggerService } from '@logger' -import ClaudeIcon from '@renderer/assets/images/models/claude.png' import { ErrorBoundary } from '@renderer/components/ErrorBoundary' import { TopView } from '@renderer/components/TopView' import { permissionModeCards } from '@renderer/config/agent' @@ -9,7 +8,6 @@ import SelectAgentBaseModelButton from '@renderer/pages/home/components/SelectAg import type { AddAgentForm, AgentEntity, - AgentType, ApiModel, BaseAgentForm, PermissionMode, @@ -17,30 +15,22 @@ import type { UpdateAgentForm } from '@renderer/types' import { AgentConfigurationSchema, isAgentType } from '@renderer/types' -import { Avatar, Button, Input, Modal, Select } from 'antd' +import { Button, Input, Modal, Select } from 'antd' import { AlertTriangleIcon } from 'lucide-react' import type { ChangeEvent, FormEvent } from 'react' import { useCallback, useEffect, useMemo, useRef, useState } from 'react' import { useTranslation } from 'react-i18next' import styled from 'styled-components' -import type { BaseOption } from './shared' - const { TextArea } = Input const logger = loggerService.withContext('AddAgentPopup') -interface AgentTypeOption extends BaseOption { - type: 'type' - key: AgentEntity['type'] - name: AgentEntity['name'] -} - type AgentWithTools = AgentEntity & { tools?: Tool[] } const buildAgentForm = (existing?: AgentWithTools): BaseAgentForm => ({ type: existing?.type ?? 'claude-code', - name: existing?.name ?? 'Claude Code', + name: existing?.name ?? 'Agent', description: existing?.description, instructions: existing?.instructions, model: existing?.model ?? '', @@ -100,54 +90,6 @@ const PopupContainer: React.FC = ({ agent, afterSubmit, resolve }) => { }) }, []) - // add supported agents type here. - const agentConfig = useMemo( - () => - [ - { - type: 'type', - key: 'claude-code', - label: 'Claude Code', - name: 'Claude Code', - avatar: ClaudeIcon - } - ] as const satisfies AgentTypeOption[], - [] - ) - - const agentOptions = useMemo( - () => - agentConfig.map((option) => ({ - value: option.key, - label: ( - - - {option.label} - - ) - })), - [agentConfig] - ) - - const onAgentTypeChange = useCallback( - (value: AgentType) => { - const prevConfig = agentConfig.find((config) => config.key === form.type) - let newName: string | undefined = form.name - if (prevConfig && prevConfig.name === form.name) { - const newConfig = agentConfig.find((config) => config.key === value) - if (newConfig) { - newName = newConfig.name - } - } - setForm((prev) => ({ - ...prev, - type: value, - name: newName - })) - }, - [agentConfig, form.name, form.type] - ) - const onNameChange = useCallback((e: ChangeEvent) => { setForm((prev) => ({ ...prev, @@ -155,12 +97,12 @@ const PopupContainer: React.FC = ({ agent, afterSubmit, resolve }) => { })) }, []) - const onDescChange = useCallback((e: ChangeEvent) => { - setForm((prev) => ({ - ...prev, - description: e.target.value - })) - }, []) + // const onDescChange = useCallback((e: ChangeEvent) => { + // setForm((prev) => ({ + // ...prev, + // description: e.target.value + // })) + // }, []) const onInstChange = useCallback((e: ChangeEvent) => { setForm((prev) => ({ @@ -334,16 +276,6 @@ const PopupContainer: React.FC = ({ agent, afterSubmit, resolve }) => { - - -