mirror of
https://github.com/CherryHQ/cherry-studio.git
synced 2025-12-24 02:20:10 +08:00
Merge remote-tracking branch 'origin/main' into feat/proxy-api-server
This commit is contained in:
commit
b3c7a1db91
2
.github/workflows/auto-i18n.yml
vendored
2
.github/workflows/auto-i18n.yml
vendored
@ -23,7 +23,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: 🐈⬛ Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
||||
2
.github/workflows/claude-code-review.yml
vendored
2
.github/workflows/claude-code-review.yml
vendored
@ -27,7 +27,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
|
||||
2
.github/workflows/claude-translator.yml
vendored
2
.github/workflows/claude-translator.yml
vendored
@ -32,7 +32,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
|
||||
2
.github/workflows/claude.yml
vendored
2
.github/workflows/claude.yml
vendored
@ -37,7 +37,7 @@ jobs:
|
||||
actions: read # Required for Claude to read CI results on PRs
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
|
||||
6
.github/workflows/github-issue-tracker.yml
vendored
6
.github/workflows/github-issue-tracker.yml
vendored
@ -19,7 +19,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Check Beijing Time
|
||||
id: check_time
|
||||
@ -42,7 +42,7 @@ jobs:
|
||||
|
||||
- name: Add pending label if in quiet hours
|
||||
if: steps.check_time.outputs.should_delay == 'true'
|
||||
uses: actions/github-script@v7
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
github.rest.issues.addLabels({
|
||||
@ -118,7 +118,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
|
||||
2
.github/workflows/nightly-build.yml
vendored
2
.github/workflows/nightly-build.yml
vendored
@ -51,7 +51,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out Git repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: main
|
||||
|
||||
|
||||
2
.github/workflows/pr-ci.yml
vendored
2
.github/workflows/pr-ci.yml
vendored
@ -21,7 +21,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out Git repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v6
|
||||
|
||||
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
@ -25,7 +25,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out Git repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
||||
36
.github/workflows/update-app-upgrade-config.yml
vendored
36
.github/workflows/update-app-upgrade-config.yml
vendored
@ -19,10 +19,9 @@ on:
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
propose-update:
|
||||
update-config:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'workflow_dispatch' || (github.event_name == 'release' && github.event.release.draft == false)
|
||||
|
||||
@ -135,7 +134,7 @@ jobs:
|
||||
|
||||
- name: Checkout default branch
|
||||
if: steps.check.outputs.should_run == 'true'
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ github.event.repository.default_branch }}
|
||||
path: main
|
||||
@ -143,7 +142,7 @@ jobs:
|
||||
|
||||
- name: Checkout x-files/app-upgrade-config branch
|
||||
if: steps.check.outputs.should_run == 'true'
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: x-files/app-upgrade-config
|
||||
path: cs
|
||||
@ -187,25 +186,20 @@ jobs:
|
||||
echo "changed=true" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Create pull request
|
||||
- name: Commit and push changes
|
||||
if: steps.check.outputs.should_run == 'true' && steps.diff.outputs.changed == 'true'
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
path: cs
|
||||
base: x-files/app-upgrade-config
|
||||
branch: chore/update-app-upgrade-config/${{ steps.meta.outputs.safe_tag }}
|
||||
commit-message: "🤖 chore: sync app-upgrade-config for ${{ steps.meta.outputs.tag }}"
|
||||
title: "chore: update app-upgrade-config for ${{ steps.meta.outputs.tag }}"
|
||||
body: |
|
||||
Automated update triggered by `${{ steps.meta.outputs.trigger }}`.
|
||||
working-directory: cs
|
||||
run: |
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
git add app-upgrade-config.json
|
||||
git commit -m "chore: sync app-upgrade-config for ${{ steps.meta.outputs.tag }}" -m "Automated update triggered by \`${{ steps.meta.outputs.trigger }}\`.
|
||||
|
||||
- Source tag: `${{ steps.meta.outputs.tag }}`
|
||||
- Pre-release: `${{ steps.meta.outputs.prerelease }}`
|
||||
- Latest: `${{ steps.meta.outputs.latest }}`
|
||||
- Workflow run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
labels: |
|
||||
automation
|
||||
app-upgrade
|
||||
- Source tag: \`${{ steps.meta.outputs.tag }}\`
|
||||
- Pre-release: \`${{ steps.meta.outputs.prerelease }}\`
|
||||
- Latest: \`${{ steps.meta.outputs.latest }}\`
|
||||
- Workflow run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}"
|
||||
git push origin x-files/app-upgrade-config
|
||||
|
||||
- name: No changes detected
|
||||
if: steps.check.outputs.should_run == 'true' && steps.diff.outputs.changed != 'true'
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
diff --git a/sdk.mjs b/sdk.mjs
|
||||
index bf429a344b7d59f70aead16b639f949b07688a81..f77d50cc5d3fb04292cb3ac7fa7085d02dcc628f 100755
|
||||
index dea7766a3432a1e809f12d6daba4f2834a219689..e0b02ef73da177ba32b903887d7bbbeaa08cc6d3 100755
|
||||
--- a/sdk.mjs
|
||||
+++ b/sdk.mjs
|
||||
@@ -6250,7 +6250,7 @@ function createAbortController(maxListeners = DEFAULT_MAX_LISTENERS) {
|
||||
@ -11,7 +11,7 @@ index bf429a344b7d59f70aead16b639f949b07688a81..f77d50cc5d3fb04292cb3ac7fa7085d0
|
||||
import { createInterface } from "readline";
|
||||
|
||||
// ../src/utils/fsOperations.ts
|
||||
@@ -6619,18 +6619,11 @@ class ProcessTransport {
|
||||
@@ -6644,18 +6644,11 @@ class ProcessTransport {
|
||||
const errorMessage = isNativeBinary(pathToClaudeCodeExecutable) ? `Claude Code native binary not found at ${pathToClaudeCodeExecutable}. Please ensure Claude Code is installed via native installer or specify a valid path with options.pathToClaudeCodeExecutable.` : `Claude Code executable not found at ${pathToClaudeCodeExecutable}. Is options.pathToClaudeCodeExecutable set?`;
|
||||
throw new ReferenceError(errorMessage);
|
||||
}
|
||||
@ -134,108 +134,66 @@ artifactBuildCompleted: scripts/artifact-build-completed.js
|
||||
releaseInfo:
|
||||
releaseNotes: |
|
||||
<!--LANG:en-->
|
||||
A New Era of Intelligence with Cherry Studio 1.7.1
|
||||
Cherry Studio 1.7.2 - Stability & Enhancement Update
|
||||
|
||||
Today we're releasing Cherry Studio 1.7.1 — our most ambitious update yet, introducing Agent: autonomous AI that thinks, plans, and acts.
|
||||
This release focuses on stability improvements, bug fixes, and quality-of-life enhancements.
|
||||
|
||||
For years, AI assistants have been reactive — waiting for your commands, responding to your questions. With Agent, we're changing that. Now, AI can truly work alongside you: understanding complex goals, breaking them into steps, and executing them independently.
|
||||
🔧 Improvements
|
||||
- Enhanced update dialog functionality and state management
|
||||
- Improved ImageViewer context menu UX
|
||||
- Better temperature and top_p parameter handling
|
||||
- User-configurable stream options for OpenAI API
|
||||
- Translation feature now supports document files
|
||||
|
||||
This is what we've been building toward. And it's just the beginning.
|
||||
🤖 AI & Models
|
||||
- Added explicit thinking token support for Gemini 3 Pro Image
|
||||
- Updated DeepSeek logic to match DeepSeek v3.2
|
||||
- Updated AiOnly default models
|
||||
- Updated AI model configurations to latest versions
|
||||
|
||||
🤖 Meet Agent
|
||||
Imagine having a brilliant colleague who never sleeps. Give Agent a goal — write a report, analyze data, refactor code — and watch it work. It reasons through problems, breaks them into steps, calls the right tools, and adapts when things change.
|
||||
♿ Accessibility
|
||||
- Improved screen reader (NVDA) support with aria-label attributes
|
||||
- Added Slovak language support for spell check
|
||||
|
||||
- **Think → Plan → Act**: From goal to execution, fully autonomous
|
||||
- **Deep Reasoning**: Multi-turn thinking that solves real problems
|
||||
- **Tool Mastery**: File operations, web search, code execution, and more
|
||||
- **Skill Plugins**: Extend with custom commands and capabilities
|
||||
- **You Stay in Control**: Real-time approval for sensitive actions
|
||||
- **Full Visibility**: Every thought, every decision, fully transparent
|
||||
|
||||
🌐 Expanding Ecosystem
|
||||
- **New Providers**: HuggingFace, Mistral, CherryIN, AI Gateway, Intel OVMS, Didi MCP
|
||||
- **New Models**: Claude 4.5 Haiku, DeepSeek v3.2, GLM-4.6, Doubao, Ling series
|
||||
- **MCP Integration**: Alibaba Cloud, ModelScope, Higress, MCP.so, TokenFlux and more
|
||||
|
||||
📚 Smarter Knowledge Base
|
||||
- **OpenMinerU**: Self-hosted document processing
|
||||
- **Full-Text Search**: Find anything instantly across your notes
|
||||
- **Enhanced Tool Selection**: Smarter configuration for better AI assistance
|
||||
|
||||
📝 Notes, Reimagined
|
||||
- Full-text search with highlighted results
|
||||
- AI-powered smart rename
|
||||
- Export as image
|
||||
- Auto-wrap for tables
|
||||
|
||||
🖼️ Image & OCR
|
||||
- Intel OVMS painting capabilities
|
||||
- Intel OpenVINO NPU-accelerated OCR
|
||||
|
||||
🌍 Now in 10+ Languages
|
||||
- Added German support
|
||||
- Enhanced internationalization
|
||||
|
||||
⚡ Faster & More Polished
|
||||
- Electron 38 upgrade
|
||||
- New MCP management interface
|
||||
- Dozens of UI refinements
|
||||
|
||||
❤️ Fully Open Source
|
||||
Commercial restrictions removed. Cherry Studio now follows standard AGPL v3 — free for teams of any size.
|
||||
|
||||
The Agent Era is here. We can't wait to see what you'll create.
|
||||
🐛 Bug Fixes
|
||||
- Fixed Quick Assistant shortcut registration issue
|
||||
- Fixed UI freeze on multi-file selection via batch processing
|
||||
- Fixed assistant default model update when editing model capabilities
|
||||
- Fixed provider handling and API key rotation logic
|
||||
- Fixed OVMS API URL path formation
|
||||
- Fixed custom parameters placement for Vercel AI Gateway
|
||||
- Fixed topic message blocks clearing
|
||||
- Fixed input bar blocking enter send while generating
|
||||
|
||||
<!--LANG:zh-CN-->
|
||||
Cherry Studio 1.7.1:开启智能新纪元
|
||||
Cherry Studio 1.7.2 - 稳定性与功能增强更新
|
||||
|
||||
今天,我们正式发布 Cherry Studio 1.7.1 —— 迄今最具雄心的版本,带来全新的 Agent:能够自主思考、规划和行动的 AI。
|
||||
本次更新专注于稳定性改进、问题修复和用户体验提升。
|
||||
|
||||
多年来,AI 助手一直是被动的——等待你的指令,回应你的问题。Agent 改变了这一切。现在,AI 能够真正与你并肩工作:理解复杂目标,将其拆解为步骤,并独立执行。
|
||||
🔧 功能改进
|
||||
- 增强更新对话框功能和状态管理
|
||||
- 优化图片查看器右键菜单体验
|
||||
- 改进温度和 top_p 参数处理逻辑
|
||||
- 支持用户自定义 OpenAI API 流式选项
|
||||
- 翻译功能现已支持文档文件
|
||||
|
||||
这是我们一直在构建的未来。而这,仅仅是开始。
|
||||
🤖 AI 与模型
|
||||
- 为 Gemini 3 Pro Image 添加显式思考 token 支持
|
||||
- 更新 DeepSeek 逻辑以适配 DeepSeek v3.2
|
||||
- 更新 AiOnly 默认模型
|
||||
- 更新 AI 模型配置至最新版本
|
||||
|
||||
🤖 认识 Agent
|
||||
想象一位永不疲倦的得力伙伴。给 Agent 一个目标——撰写报告、分析数据、重构代码——然后看它工作。它会推理问题、拆解步骤、调用工具,并在情况变化时灵活应对。
|
||||
♿ 无障碍支持
|
||||
- 改进屏幕阅读器 (NVDA) 支持,添加 aria-label 属性
|
||||
- 新增斯洛伐克语拼写检查支持
|
||||
|
||||
- **思考 → 规划 → 行动**:从目标到执行,全程自主
|
||||
- **深度推理**:多轮思考,解决真实问题
|
||||
- **工具大师**:文件操作、网络搜索、代码执行,样样精通
|
||||
- **技能插件**:自定义命令,无限扩展
|
||||
- **你掌控全局**:敏感操作,实时审批
|
||||
- **完全透明**:每一步思考,每一个决策,清晰可见
|
||||
|
||||
🌐 生态持续壮大
|
||||
- **新增服务商**:Hugging Face、Mistral、Perplexity、SophNet、AI Gateway、Cerebras AI
|
||||
- **新增模型**:Gemini 3、Gemini 3 Pro(支持图像预览)、GPT-5.1、Claude Opus 4.5
|
||||
- **MCP 集成**:百炼、魔搭、Higress、MCP.so、TokenFlux 等平台
|
||||
|
||||
📚 更智能的知识库
|
||||
- **OpenMinerU**:本地自部署文档处理
|
||||
- **全文搜索**:笔记内容一搜即达
|
||||
- **增强工具选择**:更智能的配置,更好的 AI 协助
|
||||
|
||||
📝 笔记,焕然一新
|
||||
- 全文搜索,结果高亮
|
||||
- AI 智能重命名
|
||||
- 导出为图片
|
||||
- 表格自动换行
|
||||
|
||||
🖼️ 图像与 OCR
|
||||
- Intel OVMS 绘图能力
|
||||
- Intel OpenVINO NPU 加速 OCR
|
||||
|
||||
🌍 支持 10+ 种语言
|
||||
- 新增德语支持
|
||||
- 全面增强国际化
|
||||
|
||||
⚡ 更快、更精致
|
||||
- 升级 Electron 38
|
||||
- 新的 MCP 管理界面
|
||||
- 数十处 UI 细节打磨
|
||||
|
||||
❤️ 完全开源
|
||||
商用限制已移除。Cherry Studio 现遵循标准 AGPL v3 协议——任意规模团队均可自由使用。
|
||||
|
||||
Agent 纪元已至。期待你的创造。
|
||||
🐛 问题修复
|
||||
- 修复快捷助手无法注册快捷键的问题
|
||||
- 修复多文件选择时 UI 冻结问题(通过批处理优化)
|
||||
- 修复编辑模型能力时助手默认模型更新问题
|
||||
- 修复服务商处理和 API 密钥轮换逻辑
|
||||
- 修复 OVMS API URL 路径格式问题
|
||||
- 修复 Vercel AI Gateway 自定义参数位置问题
|
||||
- 修复话题消息块清理问题
|
||||
- 修复生成时输入框阻止回车发送的问题
|
||||
<!--LANG:END-->
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "CherryStudio",
|
||||
"version": "1.7.1",
|
||||
"version": "1.7.2",
|
||||
"private": true,
|
||||
"description": "A powerful AI assistant for producer.",
|
||||
"main": "./out/main/index.js",
|
||||
@ -50,7 +50,7 @@
|
||||
"generate:icons": "electron-icon-builder --input=./build/logo.png --output=build",
|
||||
"analyze:renderer": "VISUALIZER_RENDERER=true yarn build",
|
||||
"analyze:main": "VISUALIZER_MAIN=true yarn build",
|
||||
"typecheck": "concurrently -n \"node,web\" -c \"cyan,magenta\" \"npm run typecheck:node\" \"npm run typecheck:web\"",
|
||||
"typecheck": "npm run typecheck:node && npm run typecheck:web",
|
||||
"typecheck:node": "tsgo --noEmit -p tsconfig.node.json --composite false",
|
||||
"typecheck:web": "tsgo --noEmit -p tsconfig.web.json --composite false",
|
||||
"check:i18n": "dotenv -e .env -- tsx scripts/check-i18n.ts",
|
||||
@ -81,7 +81,7 @@
|
||||
"release:ai-sdk-provider": "yarn workspace @cherrystudio/ai-sdk-provider version patch --immediate && yarn workspace @cherrystudio/ai-sdk-provider build && yarn workspace @cherrystudio/ai-sdk-provider npm publish --access public"
|
||||
},
|
||||
"dependencies": {
|
||||
"@anthropic-ai/claude-agent-sdk": "patch:@anthropic-ai/claude-agent-sdk@npm%3A0.1.53#~/.yarn/patches/@anthropic-ai-claude-agent-sdk-npm-0.1.53-4b77f4cf29.patch",
|
||||
"@anthropic-ai/claude-agent-sdk": "patch:@anthropic-ai/claude-agent-sdk@npm%3A0.1.62#~/.yarn/patches/@anthropic-ai-claude-agent-sdk-npm-0.1.62-23ae56f8c8.patch",
|
||||
"@libsql/client": "0.14.0",
|
||||
"@libsql/win32-x64-msvc": "^0.4.7",
|
||||
"@napi-rs/system-ocr": "patch:@napi-rs/system-ocr@npm%3A1.0.2#~/.yarn/patches/@napi-rs-system-ocr-npm-1.0.2-59e7a78e8b.patch",
|
||||
@ -257,7 +257,6 @@
|
||||
"clsx": "^2.1.1",
|
||||
"code-inspector-plugin": "^0.20.14",
|
||||
"color": "^5.0.0",
|
||||
"concurrently": "^9.2.1",
|
||||
"country-flag-emoji-polyfill": "0.1.8",
|
||||
"dayjs": "^1.11.11",
|
||||
"dexie": "^4.0.8",
|
||||
|
||||
@ -135,10 +135,8 @@ export class StreamEventManager {
|
||||
// 构建新的对话消息
|
||||
const newMessages: ModelMessage[] = [
|
||||
...(context.originalParams.messages || []),
|
||||
{
|
||||
role: 'assistant',
|
||||
content: textBuffer
|
||||
},
|
||||
// 只有当 textBuffer 有内容时才添加 assistant 消息,避免空消息导致 API 错误
|
||||
...(textBuffer ? [{ role: 'assistant' as const, content: textBuffer }] : []),
|
||||
{
|
||||
role: 'user',
|
||||
content: toolResultsText
|
||||
|
||||
@ -90,6 +90,8 @@ export enum IpcChannel {
|
||||
Mcp_AbortTool = 'mcp:abort-tool',
|
||||
Mcp_GetServerVersion = 'mcp:get-server-version',
|
||||
Mcp_Progress = 'mcp:progress',
|
||||
Mcp_GetServerLogs = 'mcp:get-server-logs',
|
||||
Mcp_ServerLog = 'mcp:server-log',
|
||||
// Python
|
||||
Python_Execute = 'python:execute',
|
||||
|
||||
@ -293,6 +295,8 @@ export enum IpcChannel {
|
||||
Selection_ActionWindowClose = 'selection:action-window-close',
|
||||
Selection_ActionWindowMinimize = 'selection:action-window-minimize',
|
||||
Selection_ActionWindowPin = 'selection:action-window-pin',
|
||||
// [Windows only] Electron bug workaround - can be removed once https://github.com/electron/electron/issues/48554 is fixed
|
||||
Selection_ActionWindowResize = 'selection:action-window-resize',
|
||||
Selection_ProcessAction = 'selection:process-action',
|
||||
Selection_UpdateActionData = 'selection:update-action-data',
|
||||
|
||||
|
||||
@ -106,16 +106,11 @@ export function getSdkClient(
|
||||
fetch: customFetch
|
||||
})
|
||||
}
|
||||
let baseURL =
|
||||
const baseURL =
|
||||
provider.type === 'anthropic'
|
||||
? provider.apiHost
|
||||
: (provider.anthropicApiHost && provider.anthropicApiHost.trim()) || provider.apiHost
|
||||
|
||||
// Anthropic SDK automatically appends /v1 to all endpoints (like /v1/messages, /v1/models)
|
||||
// We need to strip api version from baseURL to avoid duplication (e.g., /v3/v1/models)
|
||||
// formatProviderApiHost adds /v1 for AI SDK compatibility, but Anthropic SDK needs it removed
|
||||
baseURL = baseURL.replace(/\/v\d+(?:alpha|beta)?(?=\/|$)/i, '')
|
||||
|
||||
logger.debug('Anthropic API baseURL', { baseURL, providerId: provider.id })
|
||||
|
||||
if (provider.id === 'aihubmix') {
|
||||
|
||||
@ -43,6 +43,23 @@ export function withoutTrailingSharp<T extends string>(url: T): T {
|
||||
return url.replace(/#$/, '') as T
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a URL string ends with a trailing '#' character.
|
||||
*
|
||||
* @template T - The string type to preserve type safety
|
||||
* @param {T} url - The URL string to check
|
||||
* @returns {boolean} True if the URL ends with '#', false otherwise
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* isWithTrailingSharp('https://example.com#') // true
|
||||
* isWithTrailingSharp('https://example.com') // false
|
||||
* ```
|
||||
*/
|
||||
export function isWithTrailingSharp<T extends string>(url: T): boolean {
|
||||
return url.endsWith('#')
|
||||
}
|
||||
|
||||
/**
|
||||
* Matches a version segment in a path that starts with `/v<number>` and optionally
|
||||
* continues with `alpha` or `beta`. The segment may be followed by `/` or the end
|
||||
|
||||
@ -23,6 +23,14 @@ export type MCPProgressEvent = {
|
||||
progress: number // 0-1 range
|
||||
}
|
||||
|
||||
export type MCPServerLogEntry = {
|
||||
timestamp: number
|
||||
level: 'debug' | 'info' | 'warn' | 'error' | 'stderr' | 'stdout'
|
||||
message: string
|
||||
data?: any
|
||||
source?: string
|
||||
}
|
||||
|
||||
export type WebviewKeyEvent = {
|
||||
webviewId: number
|
||||
key: string
|
||||
|
||||
@ -11,6 +11,7 @@ import {
|
||||
formatAzureOpenAIApiHost,
|
||||
formatOllamaApiHost,
|
||||
formatVertexApiHost,
|
||||
isWithTrailingSharp,
|
||||
routeToEndpoint,
|
||||
withoutTrailingSlash
|
||||
} from '../api'
|
||||
@ -63,17 +64,17 @@ export function defaultFormatAzureOpenAIApiHost(host: string): string {
|
||||
*/
|
||||
export function formatProviderApiHost<T extends MinimalProvider>(provider: T, context: ProviderFormatContext): T {
|
||||
const formatted = { ...provider }
|
||||
|
||||
const appendApiVersion = !isWithTrailingSharp(provider.apiHost)
|
||||
// Format anthropicApiHost if present
|
||||
if (formatted.anthropicApiHost) {
|
||||
formatted.anthropicApiHost = formatApiHost(formatted.anthropicApiHost)
|
||||
formatted.anthropicApiHost = formatApiHost(formatted.anthropicApiHost, appendApiVersion)
|
||||
}
|
||||
|
||||
// Format based on provider type
|
||||
if (isAnthropicProvider(provider)) {
|
||||
const baseHost = formatted.anthropicApiHost || formatted.apiHost
|
||||
// AI SDK needs /v1 in baseURL
|
||||
formatted.apiHost = formatApiHost(baseHost)
|
||||
formatted.apiHost = formatApiHost(baseHost, appendApiVersion)
|
||||
if (!formatted.anthropicApiHost) {
|
||||
formatted.anthropicApiHost = formatted.apiHost
|
||||
}
|
||||
@ -82,7 +83,7 @@ export function formatProviderApiHost<T extends MinimalProvider>(provider: T, co
|
||||
} else if (isOllamaProvider(formatted)) {
|
||||
formatted.apiHost = formatOllamaApiHost(formatted.apiHost)
|
||||
} else if (isGeminiProvider(formatted)) {
|
||||
formatted.apiHost = formatApiHost(formatted.apiHost, true, 'v1beta')
|
||||
formatted.apiHost = formatApiHost(formatted.apiHost, appendApiVersion, 'v1beta')
|
||||
} else if (isAzureOpenAIProvider(formatted)) {
|
||||
formatted.apiHost = formatAzureOpenAIApiHost(formatted.apiHost)
|
||||
} else if (isVertexProvider(formatted)) {
|
||||
@ -92,7 +93,7 @@ export function formatProviderApiHost<T extends MinimalProvider>(provider: T, co
|
||||
} else if (isPerplexityProvider(formatted)) {
|
||||
formatted.apiHost = formatApiHost(formatted.apiHost, false)
|
||||
} else {
|
||||
formatted.apiHost = formatApiHost(formatted.apiHost)
|
||||
formatted.apiHost = formatApiHost(formatted.apiHost, appendApiVersion)
|
||||
}
|
||||
|
||||
return formatted
|
||||
|
||||
@ -19,8 +19,8 @@ import { agentService } from './services/agents'
|
||||
import { apiServerService } from './services/ApiServerService'
|
||||
import { appMenuService } from './services/AppMenuService'
|
||||
import { configManager } from './services/ConfigManager'
|
||||
import mcpService from './services/MCPService'
|
||||
import { nodeTraceService } from './services/NodeTraceService'
|
||||
import mcpService from './services/MCPService'
|
||||
import powerMonitorService from './services/PowerMonitorService'
|
||||
import {
|
||||
CHERRY_STUDIO_PROTOCOL,
|
||||
|
||||
@ -765,6 +765,8 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
ipcMain.handle(IpcChannel.Mcp_CheckConnectivity, mcpService.checkMcpConnectivity)
|
||||
ipcMain.handle(IpcChannel.Mcp_AbortTool, mcpService.abortTool)
|
||||
ipcMain.handle(IpcChannel.Mcp_GetServerVersion, mcpService.getServerVersion)
|
||||
ipcMain.handle(IpcChannel.Mcp_GetServerLogs, mcpService.getServerLogs)
|
||||
ipcMain.handle(IpcChannel.Mcp_GetServerLogs, mcpService.getServerLogs)
|
||||
|
||||
// DXT upload handler
|
||||
ipcMain.handle(IpcChannel.Mcp_UploadDxt, async (event, fileBuffer: ArrayBuffer, fileName: string) => {
|
||||
|
||||
@ -33,6 +33,7 @@ import {
|
||||
import { nanoid } from '@reduxjs/toolkit'
|
||||
import { HOME_CHERRY_DIR } from '@shared/config/constant'
|
||||
import type { MCPProgressEvent } from '@shared/config/types'
|
||||
import type { MCPServerLogEntry } from '@shared/config/types'
|
||||
import { IpcChannel } from '@shared/IpcChannel'
|
||||
import { defaultAppHeaders } from '@shared/utils'
|
||||
import {
|
||||
@ -56,6 +57,7 @@ import { CacheService } from './CacheService'
|
||||
import DxtService from './DxtService'
|
||||
import { CallBackServer } from './mcp/oauth/callback'
|
||||
import { McpOAuthClientProvider } from './mcp/oauth/provider'
|
||||
import { ServerLogBuffer } from './mcp/ServerLogBuffer'
|
||||
import { windowService } from './WindowService'
|
||||
|
||||
// Generic type for caching wrapped functions
|
||||
@ -142,6 +144,7 @@ class McpService {
|
||||
private pendingClients: Map<string, Promise<Client>> = new Map()
|
||||
private dxtService = new DxtService()
|
||||
private activeToolCalls: Map<string, AbortController> = new Map()
|
||||
private serverLogs = new ServerLogBuffer(200)
|
||||
|
||||
constructor() {
|
||||
this.initClient = this.initClient.bind(this)
|
||||
@ -172,6 +175,19 @@ class McpService {
|
||||
})
|
||||
}
|
||||
|
||||
private emitServerLog(server: MCPServer, entry: MCPServerLogEntry) {
|
||||
const serverKey = this.getServerKey(server)
|
||||
this.serverLogs.append(serverKey, entry)
|
||||
const mainWindow = windowService.getMainWindow()
|
||||
if (mainWindow) {
|
||||
mainWindow.webContents.send(IpcChannel.Mcp_ServerLog, { ...entry, serverId: server.id })
|
||||
}
|
||||
}
|
||||
|
||||
public getServerLogs(_: Electron.IpcMainInvokeEvent, server: MCPServer): MCPServerLogEntry[] {
|
||||
return this.serverLogs.get(this.getServerKey(server))
|
||||
}
|
||||
|
||||
async initClient(server: MCPServer): Promise<Client> {
|
||||
const serverKey = this.getServerKey(server)
|
||||
|
||||
@ -366,9 +382,25 @@ class McpService {
|
||||
}
|
||||
|
||||
const stdioTransport = new StdioClientTransport(transportOptions)
|
||||
stdioTransport.stderr?.on('data', (data) =>
|
||||
getServerLogger(server).debug(`Stdio stderr`, { data: data.toString() })
|
||||
)
|
||||
stdioTransport.stderr?.on('data', (data) => {
|
||||
const msg = data.toString()
|
||||
getServerLogger(server).debug(`Stdio stderr`, { data: msg })
|
||||
this.emitServerLog(server, {
|
||||
timestamp: Date.now(),
|
||||
level: 'stderr',
|
||||
message: msg.trim(),
|
||||
source: 'stdio'
|
||||
})
|
||||
})
|
||||
;(stdioTransport as any).stdout?.on('data', (data: any) => {
|
||||
const msg = data.toString()
|
||||
this.emitServerLog(server, {
|
||||
timestamp: Date.now(),
|
||||
level: 'stdout',
|
||||
message: msg.trim(),
|
||||
source: 'stdio'
|
||||
})
|
||||
})
|
||||
return stdioTransport
|
||||
} else {
|
||||
throw new Error('Either baseUrl or command must be provided')
|
||||
@ -436,6 +468,13 @@ class McpService {
|
||||
}
|
||||
}
|
||||
|
||||
this.emitServerLog(server, {
|
||||
timestamp: Date.now(),
|
||||
level: 'info',
|
||||
message: 'Server connected',
|
||||
source: 'client'
|
||||
})
|
||||
|
||||
// Store the new client in the cache
|
||||
this.clients.set(serverKey, client)
|
||||
|
||||
@ -446,9 +485,22 @@ class McpService {
|
||||
this.clearServerCache(serverKey)
|
||||
|
||||
logger.debug(`Activated server: ${server.name}`)
|
||||
this.emitServerLog(server, {
|
||||
timestamp: Date.now(),
|
||||
level: 'info',
|
||||
message: 'Server activated',
|
||||
source: 'client'
|
||||
})
|
||||
return client
|
||||
} catch (error) {
|
||||
getServerLogger(server).error(`Error activating server ${server.name}`, error as Error)
|
||||
this.emitServerLog(server, {
|
||||
timestamp: Date.now(),
|
||||
level: 'error',
|
||||
message: `Error activating server: ${(error as Error)?.message}`,
|
||||
data: redactSensitive(error),
|
||||
source: 'client'
|
||||
})
|
||||
throw error
|
||||
}
|
||||
} finally {
|
||||
@ -506,6 +558,16 @@ class McpService {
|
||||
// Set up logging message notification handler
|
||||
client.setNotificationHandler(LoggingMessageNotificationSchema, async (notification) => {
|
||||
logger.debug(`Message from server ${server.name}:`, notification.params)
|
||||
const msg = notification.params?.message
|
||||
if (msg) {
|
||||
this.emitServerLog(server, {
|
||||
timestamp: Date.now(),
|
||||
level: (notification.params?.level as MCPServerLogEntry['level']) || 'info',
|
||||
message: typeof msg === 'string' ? msg : JSON.stringify(msg),
|
||||
data: redactSensitive(notification.params?.data),
|
||||
source: notification.params?.logger || 'server'
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
getServerLogger(server).debug(`Set up notification handlers`)
|
||||
@ -540,6 +602,7 @@ class McpService {
|
||||
this.clients.delete(serverKey)
|
||||
// Clear all caches for this server
|
||||
this.clearServerCache(serverKey)
|
||||
this.serverLogs.remove(serverKey)
|
||||
} else {
|
||||
logger.warn(`No client found for server`, { serverKey })
|
||||
}
|
||||
@ -548,6 +611,12 @@ class McpService {
|
||||
async stopServer(_: Electron.IpcMainInvokeEvent, server: MCPServer) {
|
||||
const serverKey = this.getServerKey(server)
|
||||
getServerLogger(server).debug(`Stopping server`)
|
||||
this.emitServerLog(server, {
|
||||
timestamp: Date.now(),
|
||||
level: 'info',
|
||||
message: 'Stopping server',
|
||||
source: 'client'
|
||||
})
|
||||
await this.closeClient(serverKey)
|
||||
}
|
||||
|
||||
@ -574,6 +643,12 @@ class McpService {
|
||||
async restartServer(_: Electron.IpcMainInvokeEvent, server: MCPServer) {
|
||||
getServerLogger(server).debug(`Restarting server`)
|
||||
const serverKey = this.getServerKey(server)
|
||||
this.emitServerLog(server, {
|
||||
timestamp: Date.now(),
|
||||
level: 'info',
|
||||
message: 'Restarting server',
|
||||
source: 'client'
|
||||
})
|
||||
await this.closeClient(serverKey)
|
||||
// Clear cache before restarting to ensure fresh data
|
||||
this.clearServerCache(serverKey)
|
||||
@ -606,9 +681,22 @@ class McpService {
|
||||
// Attempt to list tools as a way to check connectivity
|
||||
await client.listTools()
|
||||
getServerLogger(server).debug(`Connectivity check successful`)
|
||||
this.emitServerLog(server, {
|
||||
timestamp: Date.now(),
|
||||
level: 'info',
|
||||
message: 'Connectivity check successful',
|
||||
source: 'connectivity'
|
||||
})
|
||||
return true
|
||||
} catch (error) {
|
||||
getServerLogger(server).error(`Connectivity check failed`, error as Error)
|
||||
this.emitServerLog(server, {
|
||||
timestamp: Date.now(),
|
||||
level: 'error',
|
||||
message: `Connectivity check failed: ${(error as Error).message}`,
|
||||
data: redactSensitive(error),
|
||||
source: 'connectivity'
|
||||
})
|
||||
// Close the client if connectivity check fails to ensure a clean state for the next attempt
|
||||
const serverKey = this.getServerKey(server)
|
||||
await this.closeClient(serverKey)
|
||||
|
||||
@ -1393,6 +1393,50 @@ export class SelectionService {
|
||||
actionWindow.setAlwaysOnTop(isPinned)
|
||||
}
|
||||
|
||||
/**
|
||||
* [Windows only] Manual window resize handler
|
||||
*
|
||||
* ELECTRON BUG WORKAROUND:
|
||||
* In Electron, when using `frame: false` + `transparent: true`, the native window
|
||||
* resize functionality is broken on Windows. This is a known Electron bug.
|
||||
* See: https://github.com/electron/electron/issues/48554
|
||||
*
|
||||
* This method can be removed once the Electron bug is fixed.
|
||||
*/
|
||||
public resizeActionWindow(actionWindow: BrowserWindow, deltaX: number, deltaY: number, direction: string): void {
|
||||
const bounds = actionWindow.getBounds()
|
||||
const minWidth = 300
|
||||
const minHeight = 200
|
||||
|
||||
let { x, y, width, height } = bounds
|
||||
|
||||
// Handle horizontal resize
|
||||
if (direction.includes('e')) {
|
||||
width = Math.max(minWidth, width + deltaX)
|
||||
}
|
||||
if (direction.includes('w')) {
|
||||
const newWidth = Math.max(minWidth, width - deltaX)
|
||||
if (newWidth !== width) {
|
||||
x = x + (width - newWidth)
|
||||
width = newWidth
|
||||
}
|
||||
}
|
||||
|
||||
// Handle vertical resize
|
||||
if (direction.includes('s')) {
|
||||
height = Math.max(minHeight, height + deltaY)
|
||||
}
|
||||
if (direction.includes('n')) {
|
||||
const newHeight = Math.max(minHeight, height - deltaY)
|
||||
if (newHeight !== height) {
|
||||
y = y + (height - newHeight)
|
||||
height = newHeight
|
||||
}
|
||||
}
|
||||
|
||||
actionWindow.setBounds({ x, y, width, height })
|
||||
}
|
||||
|
||||
/**
|
||||
* Update trigger mode behavior
|
||||
* Switches between selection-based and alt-key based triggering
|
||||
@ -1510,6 +1554,18 @@ export class SelectionService {
|
||||
}
|
||||
})
|
||||
|
||||
// [Windows only] Electron bug workaround - can be removed once fixed
|
||||
// See: https://github.com/electron/electron/issues/48554
|
||||
ipcMain.handle(
|
||||
IpcChannel.Selection_ActionWindowResize,
|
||||
(event, deltaX: number, deltaY: number, direction: string) => {
|
||||
const actionWindow = BrowserWindow.fromWebContents(event.sender)
|
||||
if (actionWindow) {
|
||||
selectionService?.resizeActionWindow(actionWindow, deltaX, deltaY, direction)
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
this.isIpcHandlerRegistered = true
|
||||
}
|
||||
|
||||
|
||||
@ -35,6 +35,15 @@ function getShortcutHandler(shortcut: Shortcut) {
|
||||
}
|
||||
case 'mini_window':
|
||||
return () => {
|
||||
// 在处理器内部检查QuickAssistant状态,而不是在注册时检查
|
||||
const quickAssistantEnabled = configManager.getEnableQuickAssistant()
|
||||
logger.info(`mini_window shortcut triggered, QuickAssistant enabled: ${quickAssistantEnabled}`)
|
||||
|
||||
if (!quickAssistantEnabled) {
|
||||
logger.warn('QuickAssistant is disabled, ignoring mini_window shortcut trigger')
|
||||
return
|
||||
}
|
||||
|
||||
windowService.toggleMiniWindow()
|
||||
}
|
||||
case 'selection_assistant_toggle':
|
||||
@ -190,11 +199,10 @@ export function registerShortcuts(window: BrowserWindow) {
|
||||
break
|
||||
|
||||
case 'mini_window':
|
||||
//available only when QuickAssistant enabled
|
||||
if (!configManager.getEnableQuickAssistant()) {
|
||||
return
|
||||
}
|
||||
// 移除注册时的条件检查,在处理器内部进行检查
|
||||
logger.info(`Processing mini_window shortcut, enabled: ${shortcut.enabled}`)
|
||||
showMiniWindowAccelerator = formatShortcutKey(shortcut.shortcut)
|
||||
logger.debug(`Mini window accelerator set to: ${showMiniWindowAccelerator}`)
|
||||
break
|
||||
|
||||
case 'selection_assistant_toggle':
|
||||
|
||||
29
src/main/services/__tests__/ServerLogBuffer.test.ts
Normal file
29
src/main/services/__tests__/ServerLogBuffer.test.ts
Normal file
@ -0,0 +1,29 @@
|
||||
import { describe, expect, it } from 'vitest'
|
||||
|
||||
import { ServerLogBuffer } from '../mcp/ServerLogBuffer'
|
||||
|
||||
describe('ServerLogBuffer', () => {
|
||||
it('keeps a bounded number of entries per server', () => {
|
||||
const buffer = new ServerLogBuffer(3)
|
||||
const key = 'srv'
|
||||
|
||||
buffer.append(key, { timestamp: 1, level: 'info', message: 'a' })
|
||||
buffer.append(key, { timestamp: 2, level: 'info', message: 'b' })
|
||||
buffer.append(key, { timestamp: 3, level: 'info', message: 'c' })
|
||||
buffer.append(key, { timestamp: 4, level: 'info', message: 'd' })
|
||||
|
||||
const logs = buffer.get(key)
|
||||
expect(logs).toHaveLength(3)
|
||||
expect(logs[0].message).toBe('b')
|
||||
expect(logs[2].message).toBe('d')
|
||||
})
|
||||
|
||||
it('isolates entries by server key', () => {
|
||||
const buffer = new ServerLogBuffer(5)
|
||||
buffer.append('one', { timestamp: 1, level: 'info', message: 'a' })
|
||||
buffer.append('two', { timestamp: 2, level: 'info', message: 'b' })
|
||||
|
||||
expect(buffer.get('one')).toHaveLength(1)
|
||||
expect(buffer.get('two')).toHaveLength(1)
|
||||
})
|
||||
})
|
||||
@ -78,7 +78,7 @@ export abstract class BaseService {
|
||||
* Get database instance
|
||||
* Automatically waits for initialization to complete
|
||||
*/
|
||||
protected async getDatabase() {
|
||||
public async getDatabase() {
|
||||
const dbManager = await DatabaseManager.getInstance()
|
||||
return dbManager.getDatabase()
|
||||
}
|
||||
|
||||
36
src/main/services/mcp/ServerLogBuffer.ts
Normal file
36
src/main/services/mcp/ServerLogBuffer.ts
Normal file
@ -0,0 +1,36 @@
|
||||
export type MCPServerLogEntry = {
|
||||
timestamp: number
|
||||
level: 'debug' | 'info' | 'warn' | 'error' | 'stderr' | 'stdout'
|
||||
message: string
|
||||
data?: any
|
||||
source?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Lightweight ring buffer for per-server MCP logs.
|
||||
*/
|
||||
export class ServerLogBuffer {
|
||||
private maxEntries: number
|
||||
private logs: Map<string, MCPServerLogEntry[]> = new Map()
|
||||
|
||||
constructor(maxEntries = 200) {
|
||||
this.maxEntries = maxEntries
|
||||
}
|
||||
|
||||
append(serverKey: string, entry: MCPServerLogEntry) {
|
||||
const list = this.logs.get(serverKey) ?? []
|
||||
list.push(entry)
|
||||
if (list.length > this.maxEntries) {
|
||||
list.splice(0, list.length - this.maxEntries)
|
||||
}
|
||||
this.logs.set(serverKey, list)
|
||||
}
|
||||
|
||||
get(serverKey: string): MCPServerLogEntry[] {
|
||||
return [...(this.logs.get(serverKey) ?? [])]
|
||||
}
|
||||
|
||||
remove(serverKey: string) {
|
||||
this.logs.delete(serverKey)
|
||||
}
|
||||
}
|
||||
@ -5,6 +5,7 @@ import type { SpanContext } from '@opentelemetry/api'
|
||||
import type { TerminalConfig, UpgradeChannel } from '@shared/config/constant'
|
||||
import type { LogLevel, LogSourceWithContext } from '@shared/config/logger'
|
||||
import type { FileChangeEvent, WebviewKeyEvent } from '@shared/config/types'
|
||||
import type { MCPServerLogEntry } from '@shared/config/types'
|
||||
import { IpcChannel } from '@shared/IpcChannel'
|
||||
import type { Notification } from '@types'
|
||||
import type {
|
||||
@ -372,7 +373,16 @@ const api = {
|
||||
},
|
||||
abortTool: (callId: string) => ipcRenderer.invoke(IpcChannel.Mcp_AbortTool, callId),
|
||||
getServerVersion: (server: MCPServer): Promise<string | null> =>
|
||||
ipcRenderer.invoke(IpcChannel.Mcp_GetServerVersion, server)
|
||||
ipcRenderer.invoke(IpcChannel.Mcp_GetServerVersion, server),
|
||||
getServerLogs: (server: MCPServer): Promise<MCPServerLogEntry[]> =>
|
||||
ipcRenderer.invoke(IpcChannel.Mcp_GetServerLogs, server),
|
||||
onServerLog: (callback: (log: MCPServerLogEntry & { serverId?: string }) => void) => {
|
||||
const listener = (_event: Electron.IpcRendererEvent, log: MCPServerLogEntry & { serverId?: string }) => {
|
||||
callback(log)
|
||||
}
|
||||
ipcRenderer.on(IpcChannel.Mcp_ServerLog, listener)
|
||||
return () => ipcRenderer.off(IpcChannel.Mcp_ServerLog, listener)
|
||||
}
|
||||
},
|
||||
python: {
|
||||
execute: (script: string, context?: Record<string, any>, timeout?: number) =>
|
||||
@ -456,7 +466,10 @@ const api = {
|
||||
ipcRenderer.invoke(IpcChannel.Selection_ProcessAction, actionItem, isFullScreen),
|
||||
closeActionWindow: () => ipcRenderer.invoke(IpcChannel.Selection_ActionWindowClose),
|
||||
minimizeActionWindow: () => ipcRenderer.invoke(IpcChannel.Selection_ActionWindowMinimize),
|
||||
pinActionWindow: (isPinned: boolean) => ipcRenderer.invoke(IpcChannel.Selection_ActionWindowPin, isPinned)
|
||||
pinActionWindow: (isPinned: boolean) => ipcRenderer.invoke(IpcChannel.Selection_ActionWindowPin, isPinned),
|
||||
// [Windows only] Electron bug workaround - can be removed once https://github.com/electron/electron/issues/48554 is fixed
|
||||
resizeActionWindow: (deltaX: number, deltaY: number, direction: string) =>
|
||||
ipcRenderer.invoke(IpcChannel.Selection_ActionWindowResize, deltaX, deltaY, direction)
|
||||
},
|
||||
agentTools: {
|
||||
respondToPermission: (payload: {
|
||||
|
||||
@ -91,7 +91,9 @@ export default class ModernAiProvider {
|
||||
if (this.isModel(modelOrProvider)) {
|
||||
// 传入的是 Model
|
||||
this.model = modelOrProvider
|
||||
this.actualProvider = provider ? adaptProvider({ provider }) : getActualProvider(modelOrProvider)
|
||||
this.actualProvider = provider
|
||||
? adaptProvider({ provider, model: modelOrProvider })
|
||||
: getActualProvider(modelOrProvider)
|
||||
// 只保存配置,不预先创建executor
|
||||
this.config = providerToAiSdkConfig(this.actualProvider, modelOrProvider)
|
||||
} else {
|
||||
|
||||
@ -2,9 +2,10 @@ import { loggerService } from '@logger'
|
||||
import {
|
||||
getModelSupportedVerbosity,
|
||||
isFunctionCallingModel,
|
||||
isNotSupportTemperatureAndTopP,
|
||||
isOpenAIModel,
|
||||
isSupportFlexServiceTierModel
|
||||
isSupportFlexServiceTierModel,
|
||||
isSupportTemperatureModel,
|
||||
isSupportTopPModel
|
||||
} from '@renderer/config/models'
|
||||
import { REFERENCE_PROMPT } from '@renderer/config/prompts'
|
||||
import { getLMStudioKeepAliveTime } from '@renderer/hooks/useLMStudio'
|
||||
@ -200,7 +201,7 @@ export abstract class BaseApiClient<
|
||||
}
|
||||
|
||||
public getTemperature(assistant: Assistant, model: Model): number | undefined {
|
||||
if (isNotSupportTemperatureAndTopP(model)) {
|
||||
if (!isSupportTemperatureModel(model)) {
|
||||
return undefined
|
||||
}
|
||||
const assistantSettings = getAssistantSettings(assistant)
|
||||
@ -208,7 +209,7 @@ export abstract class BaseApiClient<
|
||||
}
|
||||
|
||||
public getTopP(assistant: Assistant, model: Model): number | undefined {
|
||||
if (isNotSupportTemperatureAndTopP(model)) {
|
||||
if (!isSupportTopPModel(model)) {
|
||||
return undefined
|
||||
}
|
||||
const assistantSettings = getAssistantSettings(assistant)
|
||||
|
||||
@ -124,7 +124,8 @@ export class AnthropicAPIClient extends BaseApiClient<
|
||||
|
||||
override async listModels(): Promise<Anthropic.ModelInfo[]> {
|
||||
const sdk = (await this.getSdkInstance()) as Anthropic
|
||||
const response = await sdk.models.list()
|
||||
// prevent auto appended /v1. It's included in baseUrl.
|
||||
const response = await sdk.models.list({ path: '/models' })
|
||||
return response.data
|
||||
}
|
||||
|
||||
|
||||
@ -173,13 +173,15 @@ export class GeminiAPIClient extends BaseApiClient<
|
||||
return this.sdkInstance
|
||||
}
|
||||
|
||||
const apiVersion = this.getApiVersion()
|
||||
|
||||
this.sdkInstance = new GoogleGenAI({
|
||||
vertexai: false,
|
||||
apiKey: this.apiKey,
|
||||
apiVersion: this.getApiVersion(),
|
||||
apiVersion,
|
||||
httpOptions: {
|
||||
baseUrl: this.getBaseURL(),
|
||||
apiVersion: this.getApiVersion(),
|
||||
apiVersion,
|
||||
headers: {
|
||||
...this.provider.extra_headers
|
||||
}
|
||||
@ -200,7 +202,7 @@ export class GeminiAPIClient extends BaseApiClient<
|
||||
return trailingVersion
|
||||
}
|
||||
|
||||
return 'v1beta'
|
||||
return ''
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -25,7 +25,7 @@ import type {
|
||||
OpenAISdkRawOutput,
|
||||
ReasoningEffortOptionalParams
|
||||
} from '@renderer/types/sdk'
|
||||
import { formatApiHost, withoutTrailingSlash } from '@renderer/utils/api'
|
||||
import { withoutTrailingSlash } from '@renderer/utils/api'
|
||||
import { isOllamaProvider } from '@renderer/utils/provider'
|
||||
|
||||
import { BaseApiClient } from '../BaseApiClient'
|
||||
@ -49,8 +49,9 @@ export abstract class OpenAIBaseClient<
|
||||
}
|
||||
|
||||
// 仅适用于openai
|
||||
override getBaseURL(isSupportedAPIVerion: boolean = true): string {
|
||||
return formatApiHost(this.provider.apiHost, isSupportedAPIVerion)
|
||||
override getBaseURL(): string {
|
||||
// apiHost is formatted when called by AiProvider
|
||||
return this.provider.apiHost
|
||||
}
|
||||
|
||||
override async generateImage({
|
||||
@ -100,6 +101,17 @@ export abstract class OpenAIBaseClient<
|
||||
override async listModels(): Promise<OpenAI.Models.Model[]> {
|
||||
try {
|
||||
const sdk = await this.getSdkInstance()
|
||||
if (this.provider.id === 'openrouter') {
|
||||
// https://openrouter.ai/docs/api/api-reference/embeddings/list-embeddings-models
|
||||
const embedBaseUrl = 'https://openrouter.ai/api/v1/embeddings'
|
||||
const embedSdk = sdk.withOptions({ baseURL: embedBaseUrl })
|
||||
const modelPromise = sdk.models.list()
|
||||
const embedModelPromise = embedSdk.models.list()
|
||||
const [modelResponse, embedModelResponse] = await Promise.all([modelPromise, embedModelPromise])
|
||||
const models = [...modelResponse.data, ...embedModelResponse.data]
|
||||
const uniqueModels = Array.from(new Map(models.map((model) => [model.id, model])).values())
|
||||
return uniqueModels.filter(isSupportedModel)
|
||||
}
|
||||
if (this.provider.id === 'github') {
|
||||
// GitHub Models 其 models 和 chat completions 两个接口的 baseUrl 不一样
|
||||
const baseUrl = 'https://models.github.ai/catalog/'
|
||||
@ -118,7 +130,7 @@ export abstract class OpenAIBaseClient<
|
||||
}
|
||||
|
||||
if (isOllamaProvider(this.provider)) {
|
||||
const baseUrl = withoutTrailingSlash(this.getBaseURL(false))
|
||||
const baseUrl = withoutTrailingSlash(this.getBaseURL())
|
||||
.replace(/\/v1$/, '')
|
||||
.replace(/\/api$/, '')
|
||||
const response = await fetch(`${baseUrl}/api/tags`, {
|
||||
@ -173,6 +185,7 @@ export abstract class OpenAIBaseClient<
|
||||
|
||||
let apiKeyForSdkInstance = this.apiKey
|
||||
let baseURLForSdkInstance = this.getBaseURL()
|
||||
logger.debug('baseURLForSdkInstance', { baseURLForSdkInstance })
|
||||
let headersForSdkInstance = {
|
||||
...this.defaultHeaders(),
|
||||
...this.provider.extra_headers
|
||||
@ -184,7 +197,7 @@ export abstract class OpenAIBaseClient<
|
||||
// this.provider.apiKey不允许修改
|
||||
// this.provider.apiKey = token
|
||||
apiKeyForSdkInstance = token
|
||||
baseURLForSdkInstance = this.getBaseURL(false)
|
||||
baseURLForSdkInstance = this.getBaseURL()
|
||||
headersForSdkInstance = {
|
||||
...headersForSdkInstance,
|
||||
...COPILOT_DEFAULT_HEADERS
|
||||
|
||||
@ -122,6 +122,7 @@ export class OpenAIResponseAPIClient extends OpenAIBaseClient<
|
||||
if (this.sdkInstance) {
|
||||
return this.sdkInstance
|
||||
}
|
||||
const baseUrl = this.getBaseURL()
|
||||
|
||||
if (this.provider.id === 'azure-openai' || this.provider.type === 'azure-openai') {
|
||||
return new AzureOpenAI({
|
||||
@ -134,7 +135,7 @@ export class OpenAIResponseAPIClient extends OpenAIBaseClient<
|
||||
return new OpenAI({
|
||||
dangerouslyAllowBrowser: true,
|
||||
apiKey: this.apiKey,
|
||||
baseURL: this.getBaseURL(),
|
||||
baseURL: baseUrl,
|
||||
defaultHeaders: {
|
||||
...this.defaultHeaders(),
|
||||
...this.provider.extra_headers
|
||||
|
||||
@ -4,60 +4,81 @@
|
||||
*/
|
||||
|
||||
import {
|
||||
isClaude45ReasoningModel,
|
||||
isClaudeReasoningModel,
|
||||
isMaxTemperatureOneModel,
|
||||
isNotSupportTemperatureAndTopP,
|
||||
isSupportedFlexServiceTier,
|
||||
isSupportedThinkingTokenClaudeModel
|
||||
isSupportedThinkingTokenClaudeModel,
|
||||
isSupportTemperatureModel,
|
||||
isSupportTopPModel,
|
||||
isTemperatureTopPMutuallyExclusiveModel
|
||||
} from '@renderer/config/models'
|
||||
import { getAssistantSettings, getProviderByModel } from '@renderer/services/AssistantService'
|
||||
import {
|
||||
DEFAULT_ASSISTANT_SETTINGS,
|
||||
getAssistantSettings,
|
||||
getProviderByModel
|
||||
} from '@renderer/services/AssistantService'
|
||||
import type { Assistant, Model } from '@renderer/types'
|
||||
import { defaultTimeout } from '@shared/config/constant'
|
||||
|
||||
import { getAnthropicThinkingBudget } from '../utils/reasoning'
|
||||
|
||||
/**
|
||||
* Claude 4.5 推理模型:
|
||||
* - 只启用 temperature → 使用 temperature
|
||||
* - 只启用 top_p → 使用 top_p
|
||||
* - 同时启用 → temperature 生效,top_p 被忽略
|
||||
* - 都不启用 → 都不使用
|
||||
* 获取温度参数
|
||||
* Retrieves the temperature parameter, adapting it based on assistant.settings and model capabilities.
|
||||
* - Disabled for Claude reasoning models when reasoning effort is set.
|
||||
* - Disabled for models that do not support temperature.
|
||||
* - Disabled for Claude 4.5 reasoning models when TopP is enabled and temperature is disabled.
|
||||
* Otherwise, returns the temperature value if the assistant has temperature enabled.
|
||||
*/
|
||||
export function getTemperature(assistant: Assistant, model: Model): number | undefined {
|
||||
if (assistant.settings?.reasoning_effort && isClaudeReasoningModel(model)) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
if (!isSupportTemperatureModel(model)) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
if (
|
||||
isNotSupportTemperatureAndTopP(model) ||
|
||||
(isClaude45ReasoningModel(model) && assistant.settings?.enableTopP && !assistant.settings?.enableTemperature)
|
||||
isTemperatureTopPMutuallyExclusiveModel(model) &&
|
||||
assistant.settings?.enableTopP &&
|
||||
!assistant.settings?.enableTemperature
|
||||
) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const assistantSettings = getAssistantSettings(assistant)
|
||||
let temperature = assistantSettings?.temperature
|
||||
if (temperature && isMaxTemperatureOneModel(model)) {
|
||||
temperature = Math.min(1, temperature)
|
||||
}
|
||||
return assistantSettings?.enableTemperature ? temperature : undefined
|
||||
|
||||
// FIXME: assistant.settings.enableTemperature should be always a boolean value.
|
||||
const enableTemperature = assistantSettings?.enableTemperature ?? DEFAULT_ASSISTANT_SETTINGS.enableTemperature
|
||||
return enableTemperature ? temperature : undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取 TopP 参数
|
||||
* Retrieves the TopP parameter, adapting it based on assistant.settings and model capabilities.
|
||||
* - Disabled for Claude reasoning models when reasoning effort is set.
|
||||
* - Disabled for models that do not support TopP.
|
||||
* - Disabled for Claude 4.5 reasoning models when temperature is explicitly enabled.
|
||||
* Otherwise, returns the TopP value if the assistant has TopP enabled.
|
||||
*/
|
||||
export function getTopP(assistant: Assistant, model: Model): number | undefined {
|
||||
if (assistant.settings?.reasoning_effort && isClaudeReasoningModel(model)) {
|
||||
return undefined
|
||||
}
|
||||
if (
|
||||
isNotSupportTemperatureAndTopP(model) ||
|
||||
(isClaude45ReasoningModel(model) && assistant.settings?.enableTemperature)
|
||||
) {
|
||||
if (!isSupportTopPModel(model)) {
|
||||
return undefined
|
||||
}
|
||||
if (isTemperatureTopPMutuallyExclusiveModel(model) && assistant.settings?.enableTemperature) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const assistantSettings = getAssistantSettings(assistant)
|
||||
return assistantSettings?.enableTopP ? assistantSettings?.topP : undefined
|
||||
// FIXME: assistant.settings.enableTopP should be always a boolean value.
|
||||
const enableTopP = assistantSettings.enableTopP ?? DEFAULT_ASSISTANT_SETTINGS.enableTopP
|
||||
return enableTopP ? assistantSettings?.topP : undefined
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -42,7 +42,8 @@ vi.mock('@renderer/utils/api', () => ({
|
||||
routeToEndpoint: vi.fn((host) => ({
|
||||
baseURL: host,
|
||||
endpoint: '/chat/completions'
|
||||
}))
|
||||
})),
|
||||
isWithTrailingSharp: vi.fn((host) => host?.endsWith('#') || false)
|
||||
}))
|
||||
|
||||
// Also mock @shared/api since formatProviderApiHost uses it directly
|
||||
@ -241,12 +242,19 @@ describe('CherryAI provider configuration', () => {
|
||||
// Mock the functions to simulate non-CherryAI provider
|
||||
vi.mocked(isCherryAIProvider).mockReturnValue(false)
|
||||
vi.mocked(getProviderByModel).mockReturnValue(provider)
|
||||
// Mock isWithTrailingSharp to return false for this test
|
||||
vi.mocked(formatApiHost as any).mockImplementation((host, isSupportedAPIVersion = true) => {
|
||||
if (isSupportedAPIVersion === false) {
|
||||
return host
|
||||
}
|
||||
return `${host}/v1`
|
||||
})
|
||||
|
||||
// Call getActualProvider
|
||||
const actualProvider = getActualProvider(model)
|
||||
|
||||
// Verify that formatApiHost was called with default parameters (true)
|
||||
expect(formatApiHost).toHaveBeenCalledWith('https://api.openai.com')
|
||||
// Verify that formatApiHost was called with appendApiVersion parameter
|
||||
expect(formatApiHost).toHaveBeenCalledWith('https://api.openai.com', true)
|
||||
expect(actualProvider.apiHost).toBe('https://api.openai.com/v1')
|
||||
})
|
||||
|
||||
@ -317,12 +325,19 @@ describe('Perplexity provider configuration', () => {
|
||||
vi.mocked(isCherryAIProvider).mockReturnValue(false)
|
||||
vi.mocked(isPerplexityProvider).mockReturnValue(false)
|
||||
vi.mocked(getProviderByModel).mockReturnValue(provider)
|
||||
// Mock isWithTrailingSharp to return false for this test
|
||||
vi.mocked(formatApiHost as any).mockImplementation((host, isSupportedAPIVersion = true) => {
|
||||
if (isSupportedAPIVersion === false) {
|
||||
return host
|
||||
}
|
||||
return `${host}/v1`
|
||||
})
|
||||
|
||||
// Call getActualProvider
|
||||
const actualProvider = getActualProvider(model)
|
||||
|
||||
// Verify that formatApiHost was called with default parameters (true)
|
||||
expect(formatApiHost).toHaveBeenCalledWith('https://api.openai.com')
|
||||
// Verify that formatApiHost was called with appendApiVersion parameter
|
||||
expect(formatApiHost).toHaveBeenCalledWith('https://api.openai.com', true)
|
||||
expect(actualProvider.apiHost).toBe('https://api.openai.com/v1')
|
||||
})
|
||||
|
||||
|
||||
@ -14,7 +14,7 @@ import { convertImageToPng } from '@renderer/utils/image'
|
||||
import type { ImageProps as AntImageProps } from 'antd'
|
||||
import { Dropdown, Image as AntImage, Space } from 'antd'
|
||||
import { Base64 } from 'js-base64'
|
||||
import { DownloadIcon, ImageIcon } from 'lucide-react'
|
||||
import { DownloadIcon } from 'lucide-react'
|
||||
import mime from 'mime'
|
||||
import React from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
@ -73,9 +73,15 @@ const ImageViewer: React.FC<ImageViewerProps> = ({ src, style, ...props }) => {
|
||||
const getContextMenuItems = (src: string, size: number = 14) => {
|
||||
return [
|
||||
{
|
||||
key: 'copy-url',
|
||||
key: 'copy-image',
|
||||
label: t('common.copy'),
|
||||
icon: <CopyIcon size={size} />,
|
||||
onClick: () => handleCopyImage(src)
|
||||
},
|
||||
{
|
||||
key: 'copy-url',
|
||||
label: t('preview.copy.src'),
|
||||
icon: <CopyIcon size={size} />,
|
||||
onClick: () => {
|
||||
navigator.clipboard.writeText(src)
|
||||
window.toast.success(t('message.copy.success'))
|
||||
@ -86,12 +92,6 @@ const ImageViewer: React.FC<ImageViewerProps> = ({ src, style, ...props }) => {
|
||||
label: t('common.download'),
|
||||
icon: <DownloadIcon size={size} />,
|
||||
onClick: () => download(src)
|
||||
},
|
||||
{
|
||||
key: 'copy-image',
|
||||
label: t('preview.copy.image'),
|
||||
icon: <ImageIcon size={size} />,
|
||||
onClick: () => handleCopyImage(src)
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import { loggerService } from '@logger'
|
||||
import { TopView } from '@renderer/components/TopView'
|
||||
import { handleSaveData } from '@renderer/store'
|
||||
import { handleSaveData, useAppDispatch } from '@renderer/store'
|
||||
import { setUpdateState } from '@renderer/store/runtime'
|
||||
import { Button, Modal } from 'antd'
|
||||
import type { ReleaseNoteInfo, UpdateInfo } from 'builder-util-runtime'
|
||||
import { useEffect, useState } from 'react'
|
||||
@ -22,6 +23,7 @@ const PopupContainer: React.FC<Props> = ({ releaseInfo, resolve }) => {
|
||||
const { t } = useTranslation()
|
||||
const [open, setOpen] = useState(true)
|
||||
const [isInstalling, setIsInstalling] = useState(false)
|
||||
const dispatch = useAppDispatch()
|
||||
|
||||
useEffect(() => {
|
||||
if (releaseInfo) {
|
||||
@ -50,6 +52,11 @@ const PopupContainer: React.FC<Props> = ({ releaseInfo, resolve }) => {
|
||||
resolve({})
|
||||
}
|
||||
|
||||
const onIgnore = () => {
|
||||
dispatch(setUpdateState({ ignore: true }))
|
||||
setOpen(false)
|
||||
}
|
||||
|
||||
UpdateDialogPopup.hide = onCancel
|
||||
|
||||
const releaseNotes = releaseInfo?.releaseNotes
|
||||
@ -69,7 +76,7 @@ const PopupContainer: React.FC<Props> = ({ releaseInfo, resolve }) => {
|
||||
centered
|
||||
width={720}
|
||||
footer={[
|
||||
<Button key="later" onClick={onCancel} disabled={isInstalling}>
|
||||
<Button key="later" onClick={onIgnore} disabled={isInstalling}>
|
||||
{t('update.later')}
|
||||
</Button>,
|
||||
<Button key="install" type="primary" onClick={handleInstall} loading={isInstalling}>
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
import { loggerService } from '@logger'
|
||||
import { ErrorBoundary } from '@renderer/components/ErrorBoundary'
|
||||
import { HelpTooltip } from '@renderer/components/TooltipIcons'
|
||||
import { TopView } from '@renderer/components/TopView'
|
||||
import { permissionModeCards } from '@renderer/config/agent'
|
||||
import { useAgents } from '@renderer/hooks/agents/useAgents'
|
||||
@ -340,9 +341,12 @@ const PopupContainer: React.FC<Props> = ({ agent, afterSubmit, resolve }) => {
|
||||
</FormRow>
|
||||
|
||||
<FormItem>
|
||||
<div className="flex items-center gap-2">
|
||||
<Label>
|
||||
{t('common.model')} <RequiredMark>*</RequiredMark>
|
||||
</Label>
|
||||
<HelpTooltip title={t('agent.add.model.tooltip')} />
|
||||
</div>
|
||||
<SelectAgentBaseModelButton
|
||||
agentBase={tempAgentBase}
|
||||
onSelect={handleModelSelect}
|
||||
|
||||
@ -1016,7 +1016,7 @@ describe('Gemini Models', () => {
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(false)
|
||||
).toBe(true)
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-3.0-flash-image-preview',
|
||||
@ -1224,7 +1224,7 @@ describe('Gemini Models', () => {
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(false)
|
||||
).toBe(true)
|
||||
expect(
|
||||
isGeminiReasoningModel({
|
||||
id: 'gemini-3.5-flash-image-preview',
|
||||
|
||||
@ -26,11 +26,13 @@ import {
|
||||
isGenerateImageModels,
|
||||
isMaxTemperatureOneModel,
|
||||
isNotSupportSystemMessageModel,
|
||||
isNotSupportTemperatureAndTopP,
|
||||
isNotSupportTextDeltaModel,
|
||||
isSupportedFlexServiceTier,
|
||||
isSupportedModel,
|
||||
isSupportFlexServiceTierModel,
|
||||
isSupportTemperatureModel,
|
||||
isSupportTopPModel,
|
||||
isTemperatureTopPMutuallyExclusiveModel,
|
||||
isVisionModels,
|
||||
isZhipuModel
|
||||
} from '../utils'
|
||||
@ -303,27 +305,104 @@ describe('model utils', () => {
|
||||
})
|
||||
|
||||
describe('Temperature and top-p support', () => {
|
||||
describe('isNotSupportTemperatureAndTopP', () => {
|
||||
it('returns true for reasoning models', () => {
|
||||
describe('isSupportTemperatureModel', () => {
|
||||
it('returns false for reasoning models (non-open weight)', () => {
|
||||
const model = createModel({ id: 'o1' })
|
||||
reasoningMock.mockReturnValue(true)
|
||||
expect(isNotSupportTemperatureAndTopP(model)).toBe(true)
|
||||
expect(isSupportTemperatureModel(model)).toBe(false)
|
||||
})
|
||||
|
||||
it('returns false for open weight models', () => {
|
||||
it('returns true for open weight models', () => {
|
||||
const openWeight = createModel({ id: 'gpt-oss-debug' })
|
||||
expect(isNotSupportTemperatureAndTopP(openWeight)).toBe(false)
|
||||
expect(isSupportTemperatureModel(openWeight)).toBe(true)
|
||||
})
|
||||
|
||||
it('returns true for chat-only models without reasoning', () => {
|
||||
it('returns false for chat-only models', () => {
|
||||
const chatOnly = createModel({ id: 'o1-preview' })
|
||||
reasoningMock.mockReturnValue(false)
|
||||
expect(isNotSupportTemperatureAndTopP(chatOnly)).toBe(true)
|
||||
expect(isSupportTemperatureModel(chatOnly)).toBe(false)
|
||||
})
|
||||
|
||||
it('returns true for Qwen MT models', () => {
|
||||
it('returns false for Qwen MT models', () => {
|
||||
const qwenMt = createModel({ id: 'qwen-mt-large', provider: 'aliyun' })
|
||||
expect(isNotSupportTemperatureAndTopP(qwenMt)).toBe(true)
|
||||
expect(isSupportTemperatureModel(qwenMt)).toBe(false)
|
||||
})
|
||||
|
||||
it('returns false for null/undefined models', () => {
|
||||
expect(isSupportTemperatureModel(null)).toBe(false)
|
||||
expect(isSupportTemperatureModel(undefined)).toBe(false)
|
||||
})
|
||||
|
||||
it('returns true for regular GPT models', () => {
|
||||
const model = createModel({ id: 'gpt-4' })
|
||||
expect(isSupportTemperatureModel(model)).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('isSupportTopPModel', () => {
|
||||
it('returns false for reasoning models (non-open weight)', () => {
|
||||
const model = createModel({ id: 'o1' })
|
||||
reasoningMock.mockReturnValue(true)
|
||||
expect(isSupportTopPModel(model)).toBe(false)
|
||||
})
|
||||
|
||||
it('returns true for open weight models', () => {
|
||||
const openWeight = createModel({ id: 'gpt-oss-debug' })
|
||||
expect(isSupportTopPModel(openWeight)).toBe(true)
|
||||
})
|
||||
|
||||
it('returns false for chat-only models', () => {
|
||||
const chatOnly = createModel({ id: 'o1-preview' })
|
||||
expect(isSupportTopPModel(chatOnly)).toBe(false)
|
||||
})
|
||||
|
||||
it('returns false for Qwen MT models', () => {
|
||||
const qwenMt = createModel({ id: 'qwen-mt-large', provider: 'aliyun' })
|
||||
expect(isSupportTopPModel(qwenMt)).toBe(false)
|
||||
})
|
||||
|
||||
it('returns false for null/undefined models', () => {
|
||||
expect(isSupportTopPModel(null)).toBe(false)
|
||||
expect(isSupportTopPModel(undefined)).toBe(false)
|
||||
})
|
||||
|
||||
it('returns true for regular GPT models', () => {
|
||||
const model = createModel({ id: 'gpt-4' })
|
||||
expect(isSupportTopPModel(model)).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('isTemperatureTopPMutuallyExclusiveModel', () => {
|
||||
it('returns true for Claude 4.5 reasoning models', () => {
|
||||
const claude45Sonnet = createModel({ id: 'claude-sonnet-4.5-20250514' })
|
||||
expect(isTemperatureTopPMutuallyExclusiveModel(claude45Sonnet)).toBe(true)
|
||||
|
||||
const claude45Opus = createModel({ id: 'claude-opus-4.5-20250514' })
|
||||
expect(isTemperatureTopPMutuallyExclusiveModel(claude45Opus)).toBe(true)
|
||||
})
|
||||
|
||||
it('returns false for Claude 4 models', () => {
|
||||
const claude4Sonnet = createModel({ id: 'claude-sonnet-4-20250514' })
|
||||
expect(isTemperatureTopPMutuallyExclusiveModel(claude4Sonnet)).toBe(false)
|
||||
})
|
||||
|
||||
it('returns false for Claude 3.x models', () => {
|
||||
const claude35Sonnet = createModel({ id: 'claude-3-5-sonnet-20241022' })
|
||||
expect(isTemperatureTopPMutuallyExclusiveModel(claude35Sonnet)).toBe(false)
|
||||
|
||||
const claude3Opus = createModel({ id: 'claude-3-opus-20240229' })
|
||||
expect(isTemperatureTopPMutuallyExclusiveModel(claude3Opus)).toBe(false)
|
||||
})
|
||||
|
||||
it('returns false for other AI models', () => {
|
||||
expect(isTemperatureTopPMutuallyExclusiveModel(createModel({ id: 'gpt-4o' }))).toBe(false)
|
||||
expect(isTemperatureTopPMutuallyExclusiveModel(createModel({ id: 'o1' }))).toBe(false)
|
||||
expect(isTemperatureTopPMutuallyExclusiveModel(createModel({ id: 'gemini-2.0-flash' }))).toBe(false)
|
||||
expect(isTemperatureTopPMutuallyExclusiveModel(createModel({ id: 'qwen-max' }))).toBe(false)
|
||||
})
|
||||
|
||||
it('returns false for null/undefined models', () => {
|
||||
expect(isTemperatureTopPMutuallyExclusiveModel(null)).toBe(false)
|
||||
expect(isTemperatureTopPMutuallyExclusiveModel(undefined)).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@ -240,47 +240,35 @@ export const SYSTEM_MODELS: Record<SystemProviderId | 'defaultModel', Model[]> =
|
||||
],
|
||||
|
||||
burncloud: [
|
||||
{ id: 'claude-3-7-sonnet-20250219-thinking', provider: 'burncloud', name: 'Claude 3.7 thinking', group: 'Claude' },
|
||||
{ id: 'claude-3-7-sonnet-20250219', provider: 'burncloud', name: 'Claude 3.7 Sonnet', group: 'Claude 3.7' },
|
||||
{ id: 'claude-3-5-sonnet-20241022', provider: 'burncloud', name: 'Claude 3.5 Sonnet', group: 'Claude 3.5' },
|
||||
{ id: 'claude-3-5-haiku-20241022', provider: 'burncloud', name: 'Claude 3.5 Haiku', group: 'Claude 3.5' },
|
||||
{ id: 'claude-opus-4-5-20251101', provider: 'burncloud', name: 'Claude 4.5 Opus', group: 'Claude 4.5' },
|
||||
{ id: 'claude-sonnet-4-5-20250929', provider: 'burncloud', name: 'Claude 4.5 Sonnet', group: 'Claude 4.5' },
|
||||
{ id: 'claude-haiku-4-5-20251001', provider: 'burncloud', name: 'Claude 4.5 Haiku', group: 'Claude 4.5' },
|
||||
|
||||
{ id: 'gpt-4.5-preview', provider: 'burncloud', name: 'gpt-4.5-preview', group: 'gpt-4.5' },
|
||||
{ id: 'gpt-4o', provider: 'burncloud', name: 'GPT-4o', group: 'GPT 4o' },
|
||||
{ id: 'gpt-4o-mini', provider: 'burncloud', name: 'GPT-4o-mini', group: 'GPT 4o' },
|
||||
{ id: 'o3', provider: 'burncloud', name: 'GPT-o1-mini', group: 'o1' },
|
||||
{ id: 'o3-mini', provider: 'burncloud', name: 'GPT-o1-preview', group: 'o1' },
|
||||
{ id: 'o1-mini', provider: 'burncloud', name: 'GPT-o1-mini', group: 'o1' },
|
||||
{ id: 'gpt-5', provider: 'burncloud', name: 'GPT 5', group: 'GPT 5' },
|
||||
{ id: 'gpt-5.1', provider: 'burncloud', name: 'GPT 5.1', group: 'GPT 5.1' },
|
||||
|
||||
{ id: 'gemini-2.5-pro-preview-03-25', provider: 'burncloud', name: 'Gemini 2.5 Preview', group: 'Geminit 2.5' },
|
||||
{ id: 'gemini-2.5-pro-exp-03-25', provider: 'burncloud', name: 'Gemini 2.5 Pro Exp', group: 'Geminit 2.5' },
|
||||
{ id: 'gemini-2.0-flash-lite', provider: 'burncloud', name: 'Gemini 2.0 Flash Lite', group: 'Geminit 2.0' },
|
||||
{ id: 'gemini-2.0-flash-exp', provider: 'burncloud', name: 'Gemini 2.0 Flash Exp', group: 'Geminit 2.0' },
|
||||
{ id: 'gemini-2.0-flash', provider: 'burncloud', name: 'Gemini 2.0 Flash', group: 'Geminit 2.0' },
|
||||
{ id: 'gemini-2.5-flash', provider: 'burncloud', name: 'Gemini 2.5 Flash', group: 'Gemini 2.5' },
|
||||
{ id: 'gemini-2.5-flash-image', provider: 'burncloud', name: 'Gemini 2.5 Flash Image', group: 'Gemini 2.5' },
|
||||
{ id: 'gemini-2.5-pro', provider: 'burncloud', name: 'Gemini 2.5 Pro', group: 'Gemini 2.5' },
|
||||
{ id: 'gemini-3-pro-preview', provider: 'burncloud', name: 'Gemini 3 Pro Preview', group: 'Gemini 3' },
|
||||
|
||||
{ id: 'deepseek-r1', name: 'DeepSeek-R1', provider: 'burncloud', group: 'deepseek-ai' },
|
||||
{ id: 'deepseek-v3', name: 'DeepSeek-V3', provider: 'burncloud', group: 'deepseek-ai' }
|
||||
{ id: 'deepseek-reasoner', name: 'DeepSeek Reasoner', provider: 'burncloud', group: 'deepseek-ai' },
|
||||
{ id: 'deepseek-chat', name: 'DeepSeek Chat', provider: 'burncloud', group: 'deepseek-ai' }
|
||||
],
|
||||
ovms: [],
|
||||
ollama: [],
|
||||
lmstudio: [],
|
||||
silicon: [
|
||||
{
|
||||
id: 'deepseek-ai/DeepSeek-R1',
|
||||
name: 'deepseek-ai/DeepSeek-R1',
|
||||
id: 'deepseek-ai/DeepSeek-V3.2',
|
||||
name: 'deepseek-ai/DeepSeek-V3.2',
|
||||
provider: 'silicon',
|
||||
group: 'deepseek-ai'
|
||||
},
|
||||
{
|
||||
id: 'deepseek-ai/DeepSeek-V3',
|
||||
name: 'deepseek-ai/DeepSeek-V3',
|
||||
id: 'Qwen/Qwen3-8B',
|
||||
name: 'Qwen/Qwen3-8B',
|
||||
provider: 'silicon',
|
||||
group: 'deepseek-ai'
|
||||
},
|
||||
{
|
||||
id: 'Qwen/Qwen2.5-7B-Instruct',
|
||||
provider: 'silicon',
|
||||
name: 'Qwen2.5-7B-Instruct',
|
||||
group: 'Qwen'
|
||||
},
|
||||
{
|
||||
@ -288,79 +276,31 @@ export const SYSTEM_MODELS: Record<SystemProviderId | 'defaultModel', Model[]> =
|
||||
name: 'BAAI/bge-m3',
|
||||
provider: 'silicon',
|
||||
group: 'BAAI'
|
||||
},
|
||||
{
|
||||
id: 'Qwen/Qwen3-8B',
|
||||
name: 'Qwen/Qwen3-8B',
|
||||
provider: 'silicon',
|
||||
group: 'Qwen'
|
||||
}
|
||||
],
|
||||
ppio: [
|
||||
{
|
||||
id: 'deepseek/deepseek-r1-0528',
|
||||
id: 'deepseek/deepseek-v3.2',
|
||||
provider: 'ppio',
|
||||
name: 'DeepSeek R1-0528',
|
||||
name: 'DeepSeek V3.2',
|
||||
group: 'deepseek'
|
||||
},
|
||||
{
|
||||
id: 'deepseek/deepseek-v3-0324',
|
||||
id: 'minimax/minimax-m2',
|
||||
provider: 'ppio',
|
||||
name: 'DeepSeek V3-0324',
|
||||
group: 'deepseek'
|
||||
},
|
||||
{
|
||||
id: 'deepseek/deepseek-r1-turbo',
|
||||
provider: 'ppio',
|
||||
name: 'DeepSeek R1 Turbo',
|
||||
group: 'deepseek'
|
||||
},
|
||||
{
|
||||
id: 'deepseek/deepseek-v3-turbo',
|
||||
provider: 'ppio',
|
||||
name: 'DeepSeek V3 Turbo',
|
||||
group: 'deepseek'
|
||||
},
|
||||
{
|
||||
id: 'deepseek/deepseek-r1/community',
|
||||
name: 'DeepSeek: DeepSeek R1 (Community)',
|
||||
provider: 'ppio',
|
||||
group: 'deepseek'
|
||||
},
|
||||
{
|
||||
id: 'deepseek/deepseek-v3/community',
|
||||
name: 'DeepSeek: DeepSeek V3 (Community)',
|
||||
provider: 'ppio',
|
||||
group: 'deepseek'
|
||||
},
|
||||
{
|
||||
id: 'minimaxai/minimax-m1-80k',
|
||||
provider: 'ppio',
|
||||
name: 'MiniMax M1-80K',
|
||||
name: 'MiniMax M2',
|
||||
group: 'minimaxai'
|
||||
},
|
||||
{
|
||||
id: 'qwen/qwen3-235b-a22b-fp8',
|
||||
id: 'qwen/qwen3-235b-a22b-instruct-2507',
|
||||
provider: 'ppio',
|
||||
name: 'Qwen3 235B',
|
||||
name: 'Qwen3-235b-a22b-instruct-2507',
|
||||
group: 'qwen'
|
||||
},
|
||||
{
|
||||
id: 'qwen/qwen3-32b-fp8',
|
||||
id: 'qwen/qwen3-vl-235b-a22b-instruct',
|
||||
provider: 'ppio',
|
||||
name: 'Qwen3 32B',
|
||||
group: 'qwen'
|
||||
},
|
||||
{
|
||||
id: 'qwen/qwen3-30b-a3b-fp8',
|
||||
provider: 'ppio',
|
||||
name: 'Qwen3 30B',
|
||||
group: 'qwen'
|
||||
},
|
||||
{
|
||||
id: 'qwen/qwen2.5-vl-72b-instruct',
|
||||
provider: 'ppio',
|
||||
name: 'Qwen2.5 VL 72B',
|
||||
name: 'Qwen3-vl-235b-a22b-instruct',
|
||||
group: 'qwen'
|
||||
},
|
||||
{
|
||||
@ -378,11 +318,13 @@ export const SYSTEM_MODELS: Record<SystemProviderId | 'defaultModel', Model[]> =
|
||||
],
|
||||
alayanew: [],
|
||||
openai: [
|
||||
{ id: 'gpt-4.5-preview', provider: 'openai', name: ' gpt-4.5-preview', group: 'gpt-4.5' },
|
||||
{ id: 'gpt-4o', provider: 'openai', name: ' GPT-4o', group: 'GPT 4o' },
|
||||
{ id: 'gpt-4o-mini', provider: 'openai', name: ' GPT-4o-mini', group: 'GPT 4o' },
|
||||
{ id: 'o1-mini', provider: 'openai', name: ' o1-mini', group: 'o1' },
|
||||
{ id: 'o1-preview', provider: 'openai', name: ' o1-preview', group: 'o1' }
|
||||
{ id: 'gpt-5.1', provider: 'openai', name: ' GPT 5.1', group: 'GPT 5.1' },
|
||||
{ id: 'gpt-5', provider: 'openai', name: ' GPT 5', group: 'GPT 5' },
|
||||
{ id: 'gpt-5-mini', provider: 'openai', name: ' GPT 5 Mini', group: 'GPT 5' },
|
||||
{ id: 'gpt-5-nano', provider: 'openai', name: ' GPT 5 Nano', group: 'GPT 5' },
|
||||
{ id: 'gpt-5-pro', provider: 'openai', name: ' GPT 5 Pro', group: 'GPT 5' },
|
||||
{ id: 'gpt-5-chat', provider: 'openai', name: ' GPT 5 Chat', group: 'GPT 5' },
|
||||
{ id: 'gpt-image-1', provider: 'openai', name: ' GPT Image 1', group: 'GPT Image' }
|
||||
],
|
||||
'azure-openai': [
|
||||
{
|
||||
@ -400,96 +342,54 @@ export const SYSTEM_MODELS: Record<SystemProviderId | 'defaultModel', Model[]> =
|
||||
],
|
||||
gemini: [
|
||||
{
|
||||
id: 'gemini-1.5-flash',
|
||||
id: 'gemini-2.5-flash',
|
||||
provider: 'gemini',
|
||||
name: 'Gemini 1.5 Flash',
|
||||
group: 'Gemini 1.5'
|
||||
name: 'Gemini 2.5 Flash',
|
||||
group: 'Gemini 2.5'
|
||||
},
|
||||
{
|
||||
id: 'gemini-1.5-flash-8b',
|
||||
id: 'gemini-2.5-pro',
|
||||
provider: 'gemini',
|
||||
name: 'Gemini 1.5 Flash (8B)',
|
||||
group: 'Gemini 1.5'
|
||||
},
|
||||
{
|
||||
id: 'gemini-1.5-pro',
|
||||
name: 'Gemini 1.5 Pro',
|
||||
provider: 'gemini',
|
||||
group: 'Gemini 1.5'
|
||||
},
|
||||
{
|
||||
id: 'gemini-2.0-flash',
|
||||
provider: 'gemini',
|
||||
name: 'Gemini 2.0 Flash',
|
||||
group: 'Gemini 2.0'
|
||||
name: 'Gemini 2.5 Pro',
|
||||
group: 'Gemini 2.5'
|
||||
},
|
||||
{
|
||||
id: 'gemini-2.5-flash-image-preview',
|
||||
provider: 'gemini',
|
||||
name: 'Gemini 2.5 Flash Image',
|
||||
group: 'Gemini 2.5'
|
||||
},
|
||||
{
|
||||
id: 'gemini-3-pro-image-preview',
|
||||
provider: 'gemini',
|
||||
name: 'Gemini 3 Pro Image Privew',
|
||||
group: 'Gemini 3'
|
||||
},
|
||||
{
|
||||
id: 'gemini-3-pro-preview',
|
||||
provider: 'gemini',
|
||||
name: 'Gemini 3 Pro Preview',
|
||||
group: 'Gemini 3'
|
||||
}
|
||||
],
|
||||
anthropic: [
|
||||
{
|
||||
id: 'claude-haiku-4-5-20251001',
|
||||
provider: 'anthropic',
|
||||
name: 'Claude Haiku 4.5',
|
||||
group: 'Claude 4.5'
|
||||
},
|
||||
{
|
||||
id: 'claude-sonnet-4-5-20250929',
|
||||
id: 'claude-sonnet-4-5',
|
||||
provider: 'anthropic',
|
||||
name: 'Claude Sonnet 4.5',
|
||||
group: 'Claude 4.5'
|
||||
},
|
||||
{
|
||||
id: 'claude-sonnet-4-20250514',
|
||||
id: 'claude-haiku-4-5',
|
||||
provider: 'anthropic',
|
||||
name: 'Claude Sonnet 4',
|
||||
group: 'Claude 4'
|
||||
name: 'Claude Haiku 4.5',
|
||||
group: 'Claude 4.5'
|
||||
},
|
||||
{
|
||||
id: 'claude-opus-4-20250514',
|
||||
id: 'claude-opus-4-5',
|
||||
provider: 'anthropic',
|
||||
name: 'Claude Opus 4',
|
||||
group: 'Claude 4'
|
||||
},
|
||||
{
|
||||
id: 'claude-3-7-sonnet-20250219',
|
||||
provider: 'anthropic',
|
||||
name: 'Claude 3.7 Sonnet',
|
||||
group: 'Claude 3.7'
|
||||
},
|
||||
{
|
||||
id: 'claude-3-5-sonnet-20241022',
|
||||
provider: 'anthropic',
|
||||
name: 'Claude 3.5 Sonnet',
|
||||
group: 'Claude 3.5'
|
||||
},
|
||||
{
|
||||
id: 'claude-3-5-haiku-20241022',
|
||||
provider: 'anthropic',
|
||||
name: 'Claude 3.5 Haiku',
|
||||
group: 'Claude 3.5'
|
||||
},
|
||||
{
|
||||
id: 'claude-3-5-sonnet-20240620',
|
||||
provider: 'anthropic',
|
||||
name: 'Claude 3.5 Sonnet (Legacy)',
|
||||
group: 'Claude 3.5'
|
||||
},
|
||||
{
|
||||
id: 'claude-3-opus-20240229',
|
||||
provider: 'anthropic',
|
||||
name: 'Claude 3 Opus',
|
||||
group: 'Claude 3'
|
||||
},
|
||||
{
|
||||
id: 'claude-3-haiku-20240307',
|
||||
provider: 'anthropic',
|
||||
name: 'Claude 3 Haiku',
|
||||
group: 'Claude 3'
|
||||
name: 'Claude Opus 4.5',
|
||||
group: 'Claude 4.5'
|
||||
}
|
||||
],
|
||||
deepseek: [
|
||||
@ -1073,18 +973,6 @@ export const SYSTEM_MODELS: Record<SystemProviderId | 'defaultModel', Model[]> =
|
||||
provider: 'grok',
|
||||
name: 'Grok 3 Mini Fast',
|
||||
group: 'Grok'
|
||||
},
|
||||
{
|
||||
id: 'grok-2-vision-1212',
|
||||
provider: 'grok',
|
||||
name: 'Grok 2 Vision 1212',
|
||||
group: 'Grok'
|
||||
},
|
||||
{
|
||||
id: 'grok-2-1212',
|
||||
provider: 'grok',
|
||||
name: 'Grok 2 1212',
|
||||
group: 'Grok'
|
||||
}
|
||||
],
|
||||
mistral: [
|
||||
@ -1808,34 +1696,58 @@ export const SYSTEM_MODELS: Record<SystemProviderId | 'defaultModel', Model[]> =
|
||||
],
|
||||
aionly: [
|
||||
{
|
||||
id: 'claude-opus-4.1',
|
||||
name: 'claude-opus-4.1',
|
||||
id: 'claude-opus-4-5-20251101',
|
||||
name: 'Claude Opus 4.5',
|
||||
provider: 'aionly',
|
||||
group: 'claude'
|
||||
group: 'Anthropic'
|
||||
},
|
||||
{
|
||||
id: 'claude-sonnet4',
|
||||
name: 'claude-sonnet4',
|
||||
id: 'claude-haiku-4-5-20251001',
|
||||
name: 'Claude Haiku 4.5',
|
||||
provider: 'aionly',
|
||||
group: 'claude'
|
||||
group: 'Anthropic'
|
||||
},
|
||||
{
|
||||
id: 'claude-3.5-sonnet-v2',
|
||||
name: 'claude-3.5-sonnet-v2',
|
||||
id: 'claude-sonnet-4-5-20250929',
|
||||
name: 'Claude Sonnet 4.5',
|
||||
provider: 'aionly',
|
||||
group: 'claude'
|
||||
group: 'Anthropic'
|
||||
},
|
||||
{
|
||||
id: 'gpt-4.1',
|
||||
name: 'gpt-4.1',
|
||||
id: 'gpt-5.1',
|
||||
name: 'GPT-5.1',
|
||||
provider: 'aionly',
|
||||
group: 'gpt'
|
||||
group: 'OpenAI'
|
||||
},
|
||||
{
|
||||
id: 'gpt-5.1-chat',
|
||||
name: 'GPT-5.1 Chat',
|
||||
provider: 'aionly',
|
||||
group: 'OpenAI'
|
||||
},
|
||||
{
|
||||
id: 'gpt-5-pro',
|
||||
name: 'GPT 5 Pro',
|
||||
provider: 'aionly',
|
||||
group: 'OpenAI'
|
||||
},
|
||||
{
|
||||
id: 'gemini-3-pro-preview',
|
||||
name: 'Gemini 3 Pro Preview',
|
||||
provider: 'aionly',
|
||||
group: 'Google'
|
||||
},
|
||||
{
|
||||
id: 'gemini-2.5-pro',
|
||||
name: 'Gemini 2.5 Pro',
|
||||
provider: 'aionly',
|
||||
group: 'Google'
|
||||
},
|
||||
{
|
||||
id: 'gemini-2.5-flash',
|
||||
name: 'gemini-2.5-flash',
|
||||
name: 'Gemini 2.5 Flash',
|
||||
provider: 'aionly',
|
||||
group: 'gemini'
|
||||
group: 'Google'
|
||||
}
|
||||
],
|
||||
longcat: [
|
||||
|
||||
@ -277,6 +277,10 @@ export const GEMINI_THINKING_MODEL_REGEX =
|
||||
export const isSupportedThinkingTokenGeminiModel = (model: Model): boolean => {
|
||||
const modelId = getLowerBaseModelName(model.id, '/')
|
||||
if (GEMINI_THINKING_MODEL_REGEX.test(modelId)) {
|
||||
// ref: https://docs.cloud.google.com/vertex-ai/generative-ai/docs/models/gemini/3-pro-image
|
||||
if (modelId.includes('gemini-3-pro-image')) {
|
||||
return true
|
||||
}
|
||||
if (modelId.includes('image') || modelId.includes('tts')) {
|
||||
return false
|
||||
}
|
||||
|
||||
@ -15,6 +15,7 @@ import {
|
||||
isSupportVerbosityModel
|
||||
} from './openai'
|
||||
import { isQwenMTModel } from './qwen'
|
||||
import { isClaude45ReasoningModel } from './reasoning'
|
||||
import { isFunctionCallingModel } from './tooluse'
|
||||
import { isGenerateImageModel, isTextToImageModel, isVisionModel } from './vision'
|
||||
export const NOT_SUPPORTED_REGEX = /(?:^tts|whisper|speech)/i
|
||||
@ -44,20 +45,71 @@ export function isSupportedModel(model: OpenAI.Models.Model): boolean {
|
||||
return !NOT_SUPPORTED_REGEX.test(modelId)
|
||||
}
|
||||
|
||||
export function isNotSupportTemperatureAndTopP(model: Model): boolean {
|
||||
/**
|
||||
* Check if the model supports temperature parameter
|
||||
* @param model - The model to check
|
||||
* @returns true if the model supports temperature parameter
|
||||
*/
|
||||
export function isSupportTemperatureModel(model: Model | undefined | null): boolean {
|
||||
if (!model) {
|
||||
return true
|
||||
}
|
||||
|
||||
if (
|
||||
(isOpenAIReasoningModel(model) && !isOpenAIOpenWeightModel(model)) ||
|
||||
isOpenAIChatCompletionOnlyModel(model) ||
|
||||
isQwenMTModel(model)
|
||||
) {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// OpenAI reasoning models (except open weight) don't support temperature
|
||||
if (isOpenAIReasoningModel(model) && !isOpenAIOpenWeightModel(model)) {
|
||||
return false
|
||||
}
|
||||
|
||||
// OpenAI chat completion only models don't support temperature
|
||||
if (isOpenAIChatCompletionOnlyModel(model)) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Qwen MT models don't support temperature
|
||||
if (isQwenMTModel(model)) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the model supports top_p parameter
|
||||
* @param model - The model to check
|
||||
* @returns true if the model supports top_p parameter
|
||||
*/
|
||||
export function isSupportTopPModel(model: Model | undefined | null): boolean {
|
||||
if (!model) {
|
||||
return false
|
||||
}
|
||||
|
||||
// OpenAI reasoning models (except open weight) don't support top_p
|
||||
if (isOpenAIReasoningModel(model) && !isOpenAIOpenWeightModel(model)) {
|
||||
return false
|
||||
}
|
||||
|
||||
// OpenAI chat completion only models don't support top_p
|
||||
if (isOpenAIChatCompletionOnlyModel(model)) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Qwen MT models don't support top_p
|
||||
if (isQwenMTModel(model)) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the model enforces mutual exclusivity between temperature and top_p parameters.
|
||||
* Currently only Claude 4.5 reasoning models require this constraint.
|
||||
* @param model - The model to check
|
||||
* @returns true if temperature and top_p are mutually exclusive for this model
|
||||
*/
|
||||
export function isTemperatureTopPMutuallyExclusiveModel(model: Model | undefined | null): boolean {
|
||||
if (!model) return false
|
||||
return isClaude45ReasoningModel(model)
|
||||
}
|
||||
|
||||
export function isGemmaModel(model?: Model): boolean {
|
||||
|
||||
@ -306,7 +306,7 @@ export const SEARCH_SUMMARY_PROMPT_KNOWLEDGE_ONLY = `
|
||||
**Use user's language to rephrase the question.**
|
||||
Follow these guidelines:
|
||||
1. If the question is a simple writing task, greeting (e.g., Hi, Hello, How are you), or does not require searching for information (unless the greeting contains a follow-up question), return 'not_needed' in the 'question' XML block. This indicates that no search is required.
|
||||
2. For knowledge, You need rewrite user query into 'rewrite' XML block with one alternative version while preserving the original intent and meaning. Also include the original question in the 'question' block.
|
||||
2. For knowledge, You need rewrite user query into 'rewrite' XML block with one alternative version while preserving the original intent and meaning. Also include the rephrased or decomposed question(s) in the 'question' block.
|
||||
3. Always return the rephrased question inside the 'question' XML block.
|
||||
4. Always wrap the rephrased question in the appropriate XML blocks: use <knowledge></knowledge> for queries that can be answered from a pre-existing knowledge base. Ensure that the rephrased question is always contained within a <question></question> block inside the wrapper.
|
||||
5. *use knowledge to rephrase the question*
|
||||
|
||||
@ -6,6 +6,9 @@
|
||||
"failed": "Failed to add a agent",
|
||||
"invalid_agent": "Invalid Agent"
|
||||
},
|
||||
"model": {
|
||||
"tooltip": "Currently, only models that support Anthropic endpoints are available for the Agent feature."
|
||||
},
|
||||
"title": "Add Agent",
|
||||
"type": {
|
||||
"placeholder": "Select an agent type"
|
||||
@ -2514,7 +2517,8 @@
|
||||
},
|
||||
"preview": {
|
||||
"copy": {
|
||||
"image": "Copy as image"
|
||||
"image": "Copy as image",
|
||||
"src": "Copy Image Source"
|
||||
},
|
||||
"dialog": "Open Dialog",
|
||||
"label": "Preview",
|
||||
@ -3908,6 +3912,7 @@
|
||||
"jsonSaveError": "Failed to save JSON configuration.",
|
||||
"jsonSaveSuccess": "JSON configuration has been saved.",
|
||||
"logoUrl": "Logo URL",
|
||||
"logs": "Logs",
|
||||
"longRunning": "Long Running Mode",
|
||||
"longRunningTooltip": "When enabled, the server supports long-running tasks. When receiving progress notifications, the timeout will be reset and the maximum execution time will be extended to 10 minutes.",
|
||||
"marketplaces": "Marketplaces",
|
||||
@ -3927,6 +3932,7 @@
|
||||
"name": "Name",
|
||||
"newServer": "MCP Server",
|
||||
"noDescriptionAvailable": "No description available",
|
||||
"noLogs": "No logs yet",
|
||||
"noServers": "No servers configured",
|
||||
"not_support": "Model not supported",
|
||||
"npx_list": {
|
||||
|
||||
@ -6,6 +6,9 @@
|
||||
"failed": "添加 Agent 失败",
|
||||
"invalid_agent": "无效的 Agent"
|
||||
},
|
||||
"model": {
|
||||
"tooltip": "目前,只有支持 Anthropic 端点的模型可用于 Agent 功能。"
|
||||
},
|
||||
"title": "添加 Agent",
|
||||
"type": {
|
||||
"placeholder": "选择 Agent 类型"
|
||||
@ -2514,7 +2517,8 @@
|
||||
},
|
||||
"preview": {
|
||||
"copy": {
|
||||
"image": "复制为图片"
|
||||
"image": "复制为图片",
|
||||
"src": "复制图片源"
|
||||
},
|
||||
"dialog": "打开预览窗口",
|
||||
"label": "预览",
|
||||
@ -3908,6 +3912,7 @@
|
||||
"jsonSaveError": "保存 JSON 配置失败",
|
||||
"jsonSaveSuccess": "JSON 配置已保存",
|
||||
"logoUrl": "标志网址",
|
||||
"logs": "日志",
|
||||
"longRunning": "长时间运行模式",
|
||||
"longRunningTooltip": "启用后,服务器支持长时间任务,接收到进度通知时会重置超时计时器,并延长最大超时时间至10分钟",
|
||||
"marketplaces": "市场",
|
||||
@ -3927,6 +3932,7 @@
|
||||
"name": "名称",
|
||||
"newServer": "MCP 服务器",
|
||||
"noDescriptionAvailable": "暂无描述",
|
||||
"noLogs": "暂无日志",
|
||||
"noServers": "未配置服务器",
|
||||
"not_support": "模型不支持",
|
||||
"npx_list": {
|
||||
|
||||
@ -6,6 +6,9 @@
|
||||
"failed": "無法新增代理人",
|
||||
"invalid_agent": "無效的 Agent"
|
||||
},
|
||||
"model": {
|
||||
"tooltip": "目前,僅支援 Anthropic 端點的模型可供代理功能使用。"
|
||||
},
|
||||
"title": "新增代理",
|
||||
"type": {
|
||||
"placeholder": "選擇 Agent 類型"
|
||||
@ -2220,8 +2223,8 @@
|
||||
"untitled_folder": "新資料夾",
|
||||
"untitled_note": "無標題筆記",
|
||||
"upload_failed": "筆記上傳失敗",
|
||||
"upload_files": "[to be translated]:Upload Files",
|
||||
"upload_folder": "[to be translated]:Upload Folder",
|
||||
"upload_files": "上傳檔案",
|
||||
"upload_folder": "上傳資料夾",
|
||||
"upload_success": "筆記上傳成功",
|
||||
"uploading_files": "正在上傳 {{count}} 個檔案..."
|
||||
},
|
||||
@ -2514,7 +2517,8 @@
|
||||
},
|
||||
"preview": {
|
||||
"copy": {
|
||||
"image": "複製為圖片"
|
||||
"image": "複製為圖片",
|
||||
"src": "複製圖片來源"
|
||||
},
|
||||
"dialog": "開啟預覽窗口",
|
||||
"label": "預覽",
|
||||
@ -3908,6 +3912,7 @@
|
||||
"jsonSaveError": "保存 JSON 配置失敗",
|
||||
"jsonSaveSuccess": "JSON 配置已儲存",
|
||||
"logoUrl": "標誌網址",
|
||||
"logs": "日誌",
|
||||
"longRunning": "長時間運行模式",
|
||||
"longRunningTooltip": "啟用後,伺服器支援長時間任務,接收到進度通知時會重置超時計時器,並延長最大超時時間至10分鐘",
|
||||
"marketplaces": "市場",
|
||||
@ -3927,6 +3932,7 @@
|
||||
"name": "名稱",
|
||||
"newServer": "MCP 伺服器",
|
||||
"noDescriptionAvailable": "描述不存在",
|
||||
"noLogs": "暫無日誌",
|
||||
"noServers": "未設定伺服器",
|
||||
"not_support": "不支援此模型",
|
||||
"npx_list": {
|
||||
|
||||
@ -6,6 +6,9 @@
|
||||
"failed": "Agent hinzufügen fehlgeschlagen",
|
||||
"invalid_agent": "Ungültiger Agent"
|
||||
},
|
||||
"model": {
|
||||
"tooltip": "Derzeit sind für die Agent-Funktion nur Modelle verfügbar, die Anthropic-Endpunkte unterstützen."
|
||||
},
|
||||
"title": "Agent hinzufügen",
|
||||
"type": {
|
||||
"placeholder": "Agent-Typ auswählen"
|
||||
@ -2220,10 +2223,10 @@
|
||||
"untitled_folder": "Neuer Ordner",
|
||||
"untitled_note": "Unbenannte Notiz",
|
||||
"upload_failed": "Notizen-Upload fehlgeschlagen",
|
||||
"upload_files": "[to be translated]:Upload Files",
|
||||
"upload_folder": "[to be translated]:Upload Folder",
|
||||
"upload_files": "Dateien hochladen",
|
||||
"upload_folder": "Ordner hochladen",
|
||||
"upload_success": "Notizen erfolgreich hochgeladen",
|
||||
"uploading_files": "[to be translated]:Uploading {{count}} files..."
|
||||
"uploading_files": "Lade {{count}} Dateien hoch..."
|
||||
},
|
||||
"notification": {
|
||||
"assistant": "Assistenten-Antwort",
|
||||
@ -2514,7 +2517,8 @@
|
||||
},
|
||||
"preview": {
|
||||
"copy": {
|
||||
"image": "Als Bild kopieren"
|
||||
"image": "Als Bild kopieren",
|
||||
"src": "Bildquelle kopieren"
|
||||
},
|
||||
"dialog": "Vorschaufenster öffnen",
|
||||
"label": "Vorschau",
|
||||
@ -3908,6 +3912,7 @@
|
||||
"jsonSaveError": "JSON-Konfiguration speichern fehlgeschlagen",
|
||||
"jsonSaveSuccess": "JSON-Konfiguration erfolgreich gespeichert",
|
||||
"logoUrl": "Logo-URL",
|
||||
"logs": "Protokolle",
|
||||
"longRunning": "Lang laufender Modus",
|
||||
"longRunningTooltip": "Nach Aktivierung unterstützt der Server lange Aufgaben. Wenn ein Fortschrittsbenachrichtigung empfangen wird, wird der Timeout-Timer zurückgesetzt und die maximale Timeout-Zeit auf 10 Minuten verlängert",
|
||||
"marketplaces": "Marktplätze",
|
||||
@ -3927,6 +3932,7 @@
|
||||
"name": "Name",
|
||||
"newServer": "MCP-Server",
|
||||
"noDescriptionAvailable": "Keine Beschreibung",
|
||||
"noLogs": "Noch keine Protokolle",
|
||||
"noServers": "Server nicht konfiguriert",
|
||||
"not_support": "Modell nicht unterstützt",
|
||||
"npx_list": {
|
||||
|
||||
@ -6,6 +6,9 @@
|
||||
"failed": "Αποτυχία προσθήκης πράκτορα",
|
||||
"invalid_agent": "Μη έγκυρος Agent"
|
||||
},
|
||||
"model": {
|
||||
"tooltip": "Προς το παρόν, μόνο μοντέλα που υποστηρίζουν τελικά σημεία Anthropic είναι διαθέσιμα για τη λειτουργία Agent."
|
||||
},
|
||||
"title": "Προσθήκη Agent",
|
||||
"type": {
|
||||
"placeholder": "Επιλέξτε τύπο Agent"
|
||||
@ -2220,10 +2223,10 @@
|
||||
"untitled_folder": "Νέος φάκελος",
|
||||
"untitled_note": "σημείωση χωρίς τίτλο",
|
||||
"upload_failed": "Η σημείωση δεν ανέβηκε",
|
||||
"upload_files": "[to be translated]:Upload Files",
|
||||
"upload_folder": "[to be translated]:Upload Folder",
|
||||
"upload_files": "Ανέβασμα Αρχείων",
|
||||
"upload_folder": "Ανέβασμα Φακέλου",
|
||||
"upload_success": "Οι σημειώσεις μεταφορτώθηκαν με επιτυχία",
|
||||
"uploading_files": "[to be translated]:Uploading {{count}} files..."
|
||||
"uploading_files": "Ανεβάζονται {{count}} αρχεία..."
|
||||
},
|
||||
"notification": {
|
||||
"assistant": "Απάντηση Βοηθού",
|
||||
@ -2514,7 +2517,8 @@
|
||||
},
|
||||
"preview": {
|
||||
"copy": {
|
||||
"image": "Αντιγραφή ως εικόνα"
|
||||
"image": "Αντιγραφή ως εικόνα",
|
||||
"src": "Αντιγραφή πηγής εικόνας"
|
||||
},
|
||||
"dialog": "Άνοιγμα παραθύρου προεπισκόπησης",
|
||||
"label": "Προεπισκόπηση",
|
||||
@ -3908,6 +3912,7 @@
|
||||
"jsonSaveError": "Αποτυχία αποθήκευσης της διαμορφωτικής ρύθμισης JSON",
|
||||
"jsonSaveSuccess": "Η διαμορφωτική ρύθμιση JSON αποθηκεύτηκε επιτυχώς",
|
||||
"logoUrl": "URL Λογότυπου",
|
||||
"logs": "Αρχεία καταγραφής",
|
||||
"longRunning": "Μακροχρόνια λειτουργία",
|
||||
"longRunningTooltip": "Όταν ενεργοποιηθεί, ο διακομιστής υποστηρίζει μακροχρόνιες εργασίες, επαναφέρει το χρονικό όριο μετά από λήψη ειδοποίησης προόδου και επεκτείνει το μέγιστο χρονικό όριο σε 10 λεπτά.",
|
||||
"marketplaces": "Αγορές",
|
||||
@ -3927,6 +3932,7 @@
|
||||
"name": "Όνομα",
|
||||
"newServer": "Διακομιστής MCP",
|
||||
"noDescriptionAvailable": "Δεν υπάρχει διαθέσιμη περιγραφή",
|
||||
"noLogs": "Δεν υπάρχουν αρχεία καταγραφής ακόμα",
|
||||
"noServers": "Δεν έχουν ρυθμιστεί διακομιστές",
|
||||
"not_support": "Το μοντέλο δεν υποστηρίζεται",
|
||||
"npx_list": {
|
||||
|
||||
@ -6,6 +6,9 @@
|
||||
"failed": "Error al añadir agente",
|
||||
"invalid_agent": "Agent inválido"
|
||||
},
|
||||
"model": {
|
||||
"tooltip": "Actualmente, solo los modelos que admiten puntos finales de Anthropic están disponibles para la función Agente."
|
||||
},
|
||||
"title": "Agregar Agente",
|
||||
"type": {
|
||||
"placeholder": "Seleccionar tipo de Agente"
|
||||
@ -2220,10 +2223,10 @@
|
||||
"untitled_folder": "Nueva carpeta",
|
||||
"untitled_note": "Nota sin título",
|
||||
"upload_failed": "Error al cargar la nota",
|
||||
"upload_files": "[to be translated]:Upload Files",
|
||||
"upload_folder": "[to be translated]:Upload Folder",
|
||||
"upload_files": "Subir archivos",
|
||||
"upload_folder": "Carpeta de subida",
|
||||
"upload_success": "Nota cargada con éxito",
|
||||
"uploading_files": "[to be translated]:Uploading {{count}} files..."
|
||||
"uploading_files": "Subiendo {{count}} archivos..."
|
||||
},
|
||||
"notification": {
|
||||
"assistant": "Respuesta del asistente",
|
||||
@ -2514,7 +2517,8 @@
|
||||
},
|
||||
"preview": {
|
||||
"copy": {
|
||||
"image": "Copiar como imagen"
|
||||
"image": "Copiar como imagen",
|
||||
"src": "Copia la fuente de la imagen"
|
||||
},
|
||||
"dialog": "Abrir la ventana de vista previa",
|
||||
"label": "Vista previa",
|
||||
@ -3908,6 +3912,7 @@
|
||||
"jsonSaveError": "Fallo al guardar la configuración JSON",
|
||||
"jsonSaveSuccess": "Configuración JSON guardada exitosamente",
|
||||
"logoUrl": "URL del logotipo",
|
||||
"logs": "Registros",
|
||||
"longRunning": "Modo de ejecución prolongada",
|
||||
"longRunningTooltip": "Una vez habilitado, el servidor admite tareas de larga duración, reinicia el temporizador de tiempo de espera al recibir notificaciones de progreso y amplía el tiempo máximo de espera hasta 10 minutos.",
|
||||
"marketplaces": "Mercados",
|
||||
@ -3927,6 +3932,7 @@
|
||||
"name": "Nombre",
|
||||
"newServer": "Servidor MCP",
|
||||
"noDescriptionAvailable": "Sin descripción disponible por ahora",
|
||||
"noLogs": "Aún no hay registros",
|
||||
"noServers": "No se han configurado servidores",
|
||||
"not_support": "El modelo no es compatible",
|
||||
"npx_list": {
|
||||
|
||||
@ -6,6 +6,9 @@
|
||||
"failed": "Échec de l'ajout de l'agent",
|
||||
"invalid_agent": "Agent invalide"
|
||||
},
|
||||
"model": {
|
||||
"tooltip": "Actuellement, seuls les modèles qui prennent en charge les points de terminaison Anthropic sont disponibles pour la fonctionnalité Agent."
|
||||
},
|
||||
"title": "Ajouter un agent",
|
||||
"type": {
|
||||
"placeholder": "Sélectionner le type d'Agent"
|
||||
@ -2220,10 +2223,10 @@
|
||||
"untitled_folder": "nouveau dossier",
|
||||
"untitled_note": "Note sans titre",
|
||||
"upload_failed": "Échec du téléchargement de la note",
|
||||
"upload_files": "[to be translated]:Upload Files",
|
||||
"upload_folder": "[to be translated]:Upload Folder",
|
||||
"upload_files": "Télécharger des fichiers",
|
||||
"upload_folder": "Puis dossier de téléchargement",
|
||||
"upload_success": "Note téléchargée avec succès",
|
||||
"uploading_files": "[to be translated]:Uploading {{count}} files..."
|
||||
"uploading_files": "Téléchargement de {{count}} fichiers..."
|
||||
},
|
||||
"notification": {
|
||||
"assistant": "Réponse de l'assistant",
|
||||
@ -2514,7 +2517,8 @@
|
||||
},
|
||||
"preview": {
|
||||
"copy": {
|
||||
"image": "Copier en tant qu'image"
|
||||
"image": "Copier en tant qu'image",
|
||||
"src": "Copier la source de l'image"
|
||||
},
|
||||
"dialog": "Ouvrir la fenêtre d'aperçu",
|
||||
"label": "Aperçu",
|
||||
@ -3908,6 +3912,7 @@
|
||||
"jsonSaveError": "Échec de la sauvegarde de la configuration JSON",
|
||||
"jsonSaveSuccess": "Configuration JSON sauvegardée",
|
||||
"logoUrl": "Адрес логотипа",
|
||||
"logs": "Journaux",
|
||||
"longRunning": "Mode d'exécution prolongée",
|
||||
"longRunningTooltip": "Une fois activé, le serveur prend en charge les tâches de longue durée, réinitialise le minuteur de temporisation à la réception des notifications de progression, et prolonge le délai d'expiration maximal à 10 minutes.",
|
||||
"marketplaces": "Places de marché",
|
||||
@ -3927,6 +3932,7 @@
|
||||
"name": "Nom",
|
||||
"newServer": "Сервер MCP",
|
||||
"noDescriptionAvailable": "Aucune description disponible pour le moment",
|
||||
"noLogs": "Aucun journal pour le moment",
|
||||
"noServers": "Aucun serveur configuré",
|
||||
"not_support": "Модель не поддерживается",
|
||||
"npx_list": {
|
||||
|
||||
@ -6,6 +6,9 @@
|
||||
"failed": "エージェントの追加に失敗しました",
|
||||
"invalid_agent": "無効なエージェント"
|
||||
},
|
||||
"model": {
|
||||
"tooltip": "現在、エージェント機能では、Anthropicエンドポイントをサポートするモデルのみが利用可能です。"
|
||||
},
|
||||
"title": "エージェントを追加",
|
||||
"type": {
|
||||
"placeholder": "エージェントタイプを選択"
|
||||
@ -2220,10 +2223,10 @@
|
||||
"untitled_folder": "新ファイル夹",
|
||||
"untitled_note": "無題のメモ",
|
||||
"upload_failed": "ノートのアップロードに失敗しました",
|
||||
"upload_files": "[to be translated]:Upload Files",
|
||||
"upload_folder": "[to be translated]:Upload Folder",
|
||||
"upload_files": "ファイルをアップロード",
|
||||
"upload_folder": "アップロードフォルダ",
|
||||
"upload_success": "ノートのアップロードが成功しました",
|
||||
"uploading_files": "[to be translated]:Uploading {{count}} files..."
|
||||
"uploading_files": "{{count}} 個のファイルをアップロード中..."
|
||||
},
|
||||
"notification": {
|
||||
"assistant": "助手回應",
|
||||
@ -2514,7 +2517,8 @@
|
||||
},
|
||||
"preview": {
|
||||
"copy": {
|
||||
"image": "画像としてコピー"
|
||||
"image": "画像としてコピー",
|
||||
"src": "画像ソースをコピー"
|
||||
},
|
||||
"dialog": "ダイアログを開く",
|
||||
"label": "プレビュー",
|
||||
@ -3908,6 +3912,7 @@
|
||||
"jsonSaveError": "JSON設定の保存に失敗しました",
|
||||
"jsonSaveSuccess": "JSON設定が保存されました。",
|
||||
"logoUrl": "ロゴURL",
|
||||
"logs": "ログ",
|
||||
"longRunning": "長時間運行モード",
|
||||
"longRunningTooltip": "このオプションを有効にすると、サーバーは長時間のタスクをサポートします。進行状況通知を受信すると、タイムアウトがリセットされ、最大実行時間が10分に延長されます。",
|
||||
"marketplaces": "マーケットプレイス",
|
||||
@ -3927,6 +3932,7 @@
|
||||
"name": "名前",
|
||||
"newServer": "MCP サーバー",
|
||||
"noDescriptionAvailable": "説明がありません",
|
||||
"noLogs": "ログはまだありません",
|
||||
"noServers": "サーバーが設定されていません",
|
||||
"not_support": "モデルはサポートされていません",
|
||||
"npx_list": {
|
||||
|
||||
@ -6,6 +6,9 @@
|
||||
"failed": "Falha ao adicionar agente",
|
||||
"invalid_agent": "Agent inválido"
|
||||
},
|
||||
"model": {
|
||||
"tooltip": "Atualmente, apenas modelos que suportam endpoints da Anthropic estão disponíveis para o recurso Agente."
|
||||
},
|
||||
"title": "Adicionar Agente",
|
||||
"type": {
|
||||
"placeholder": "Selecionar tipo de Agente"
|
||||
@ -2220,10 +2223,10 @@
|
||||
"untitled_folder": "Nova pasta",
|
||||
"untitled_note": "Nota sem título",
|
||||
"upload_failed": "Falha ao carregar a nota",
|
||||
"upload_files": "[to be translated]:Upload Files",
|
||||
"upload_folder": "[to be translated]:Upload Folder",
|
||||
"upload_files": "Carregar Ficheiros",
|
||||
"upload_folder": "Carregar Pasta",
|
||||
"upload_success": "Nota carregada com sucesso",
|
||||
"uploading_files": "[to be translated]:Uploading {{count}} files..."
|
||||
"uploading_files": "A enviar {{count}} ficheiros..."
|
||||
},
|
||||
"notification": {
|
||||
"assistant": "Resposta do assistente",
|
||||
@ -2514,7 +2517,8 @@
|
||||
},
|
||||
"preview": {
|
||||
"copy": {
|
||||
"image": "Copiar como imagem"
|
||||
"image": "Copiar como imagem",
|
||||
"src": "Copiar Origem da Imagem"
|
||||
},
|
||||
"dialog": "Abrir janela de pré-visualização",
|
||||
"label": "Pré-visualização",
|
||||
@ -3908,6 +3912,7 @@
|
||||
"jsonSaveError": "Falha ao salvar configuração JSON",
|
||||
"jsonSaveSuccess": "Configuração JSON salva com sucesso",
|
||||
"logoUrl": "URL do Logotipo",
|
||||
"logs": "Registros",
|
||||
"longRunning": "Modo de execução prolongada",
|
||||
"longRunningTooltip": "Quando ativado, o servidor suporta tarefas de longa duração, redefinindo o temporizador de tempo limite ao receber notificações de progresso e estendendo o tempo máximo de tempo limite para 10 minutos.",
|
||||
"marketplaces": "Mercados",
|
||||
@ -3927,6 +3932,7 @@
|
||||
"name": "Nome",
|
||||
"newServer": "Servidor MCP",
|
||||
"noDescriptionAvailable": "Nenhuma descrição disponível no momento",
|
||||
"noLogs": "Ainda sem registos",
|
||||
"noServers": "Nenhum servidor configurado",
|
||||
"not_support": "Modelo Não Suportado",
|
||||
"npx_list": {
|
||||
|
||||
@ -6,6 +6,9 @@
|
||||
"failed": "Не удалось добавить агента",
|
||||
"invalid_agent": "Недействительный агент"
|
||||
},
|
||||
"model": {
|
||||
"tooltip": "В настоящее время для функции агента доступны только модели, поддерживающие конечные точки Anthropic."
|
||||
},
|
||||
"title": "Добавить агента",
|
||||
"type": {
|
||||
"placeholder": "Выбор типа агента"
|
||||
@ -2220,10 +2223,10 @@
|
||||
"untitled_folder": "Новая папка",
|
||||
"untitled_note": "Незаглавленная заметка",
|
||||
"upload_failed": "Не удалось загрузить заметку",
|
||||
"upload_files": "[to be translated]:Upload Files",
|
||||
"upload_folder": "[to be translated]:Upload Folder",
|
||||
"upload_files": "Загрузить файлы",
|
||||
"upload_folder": "Загрузить папку",
|
||||
"upload_success": "Заметка успешно загружена",
|
||||
"uploading_files": "[to be translated]:Uploading {{count}} files..."
|
||||
"uploading_files": "Загрузка {{count}} файлов..."
|
||||
},
|
||||
"notification": {
|
||||
"assistant": "Ответ ассистента",
|
||||
@ -2514,7 +2517,8 @@
|
||||
},
|
||||
"preview": {
|
||||
"copy": {
|
||||
"image": "Скопировать как изображение"
|
||||
"image": "Скопировать как изображение",
|
||||
"src": "Копировать источник изображения"
|
||||
},
|
||||
"dialog": "Открыть диалог",
|
||||
"label": "Предварительный просмотр",
|
||||
@ -3908,6 +3912,7 @@
|
||||
"jsonSaveError": "Не удалось сохранить конфигурацию JSON",
|
||||
"jsonSaveSuccess": "JSON конфигурация сохранена",
|
||||
"logoUrl": "URL логотипа",
|
||||
"logs": "Журналы",
|
||||
"longRunning": "Длительный режим работы",
|
||||
"longRunningTooltip": "Включив эту опцию, сервер будет поддерживать длительные задачи. При получении уведомлений о ходе выполнения будет сброшен тайм-аут и максимальное время выполнения будет увеличено до 10 минут.",
|
||||
"marketplaces": "Торговые площадки",
|
||||
@ -3927,6 +3932,7 @@
|
||||
"name": "Имя",
|
||||
"newServer": "MCP сервер",
|
||||
"noDescriptionAvailable": "Описание отсутствует",
|
||||
"noLogs": "Логов пока нет",
|
||||
"noServers": "Серверы не настроены",
|
||||
"not_support": "Модель не поддерживается",
|
||||
"npx_list": {
|
||||
|
||||
@ -93,7 +93,7 @@ const ThinkingButton: FC<Props> = ({ quickPanel, model, assistantId }): ReactEle
|
||||
level: option,
|
||||
label: getReasoningEffortOptionsLabel(option),
|
||||
description: '',
|
||||
icon: ThinkingIcon(option),
|
||||
icon: ThinkingIcon({ option }),
|
||||
isSelected: currentReasoningEffort === option,
|
||||
action: () => onThinkingChange(option)
|
||||
}))
|
||||
@ -135,7 +135,7 @@ const ThinkingButton: FC<Props> = ({ quickPanel, model, assistantId }): ReactEle
|
||||
{
|
||||
label: t('assistants.settings.reasoning_effort.label'),
|
||||
description: '',
|
||||
icon: ThinkingIcon(currentReasoningEffort),
|
||||
icon: ThinkingIcon({ option: currentReasoningEffort }),
|
||||
isMenu: true,
|
||||
action: () => openQuickPanel()
|
||||
}
|
||||
@ -163,16 +163,18 @@ const ThinkingButton: FC<Props> = ({ quickPanel, model, assistantId }): ReactEle
|
||||
aria-label={ariaLabel}
|
||||
aria-pressed={currentReasoningEffort !== 'none'}
|
||||
style={isFixedReasoning ? { cursor: 'default' } : undefined}>
|
||||
{ThinkingIcon(currentReasoningEffort)}
|
||||
{ThinkingIcon({ option: currentReasoningEffort, isFixedReasoning })}
|
||||
</ActionIconButton>
|
||||
</Tooltip>
|
||||
)
|
||||
}
|
||||
|
||||
const ThinkingIcon = (option?: ThinkingOption) => {
|
||||
const ThinkingIcon = (props: { option?: ThinkingOption; isFixedReasoning?: boolean }) => {
|
||||
let IconComponent: React.FC<React.SVGProps<SVGSVGElement>> | null = null
|
||||
|
||||
switch (option) {
|
||||
if (props.isFixedReasoning) {
|
||||
IconComponent = MdiLightbulbAutoOutline
|
||||
} else {
|
||||
switch (props.option) {
|
||||
case 'minimal':
|
||||
IconComponent = MdiLightbulbOn30
|
||||
break
|
||||
@ -195,6 +197,7 @@ const ThinkingIcon = (option?: ThinkingOption) => {
|
||||
IconComponent = MdiLightbulbOffOutline
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return <IconComponent className="icon" width={18} height={18} style={{ marginTop: -2 }} />
|
||||
}
|
||||
|
||||
@ -20,6 +20,10 @@ const UpdateAppButton: FC = () => {
|
||||
return null
|
||||
}
|
||||
|
||||
if (update.ignore) {
|
||||
return null
|
||||
}
|
||||
|
||||
const handleOpenUpdateDialog = () => {
|
||||
UpdateDialogPopup.show({ releaseInfo: update.info || null })
|
||||
}
|
||||
@ -30,7 +34,7 @@ const UpdateAppButton: FC = () => {
|
||||
className="nodrag"
|
||||
onClick={handleOpenUpdateDialog}
|
||||
icon={<SyncOutlined />}
|
||||
color="orange"
|
||||
color="primary"
|
||||
variant="outlined"
|
||||
size="small">
|
||||
{t('button.update_available')}
|
||||
|
||||
@ -31,6 +31,8 @@ import { getErrorMessage, uuid } from '@renderer/utils'
|
||||
import { isNewApiProvider } from '@renderer/utils/provider'
|
||||
import { Avatar, Button, Empty, InputNumber, Segmented, Select, Upload } from 'antd'
|
||||
import TextArea from 'antd/es/input/TextArea'
|
||||
import type { RcFile } from 'antd/es/upload'
|
||||
import type { UploadFile } from 'antd/es/upload/interface'
|
||||
import type { FC } from 'react'
|
||||
import React from 'react'
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
@ -553,7 +555,31 @@ const NewApiPage: FC<{ Options: string[] }> = ({ Options }) => {
|
||||
maxCount={16}
|
||||
showUploadList={true}
|
||||
listType="picture"
|
||||
beforeUpload={handleImageUpload}>
|
||||
beforeUpload={handleImageUpload}
|
||||
fileList={editImageFiles.map((file, idx): UploadFile<any> => {
|
||||
const rcFile: RcFile = {
|
||||
...file,
|
||||
uid: String(idx),
|
||||
lastModifiedDate: file.lastModified ? new Date(file.lastModified) : new Date()
|
||||
}
|
||||
return {
|
||||
uid: rcFile.uid,
|
||||
name: rcFile.name || `image_${idx + 1}.png`,
|
||||
status: 'done',
|
||||
url: URL.createObjectURL(file),
|
||||
originFileObj: rcFile,
|
||||
lastModifiedDate: rcFile.lastModifiedDate
|
||||
}
|
||||
})}
|
||||
onRemove={(file) => {
|
||||
setEditImageFiles((prev) =>
|
||||
prev.filter((f) => {
|
||||
const idx = prev.indexOf(f)
|
||||
return String(idx) !== file.uid
|
||||
})
|
||||
)
|
||||
return true
|
||||
}}>
|
||||
<ImagePlaceholder>
|
||||
<ImageSizeImage src={IcImageUp} theme={theme} />
|
||||
</ImagePlaceholder>
|
||||
|
||||
@ -64,7 +64,7 @@ export const AccessibleDirsSetting = ({ base, update }: AccessibleDirsSettingPro
|
||||
return (
|
||||
<SettingsItem>
|
||||
<SettingsTitle
|
||||
actions={
|
||||
contentAfter={
|
||||
<Tooltip title={t('agent.session.accessible_paths.add')}>
|
||||
<Button type="text" icon={<Plus size={16} />} shape="circle" onClick={addAccessiblePath} />
|
||||
</Tooltip>
|
||||
|
||||
@ -69,7 +69,7 @@ export const AdvancedSettings: React.FC<AdvancedSettingsProps> = ({ agentBase, u
|
||||
<SettingsContainer>
|
||||
<SettingsItem divider={false}>
|
||||
<SettingsTitle
|
||||
actions={
|
||||
contentAfter={
|
||||
<Tooltip title={t('agent.settings.advance.maxTurns.description')} placement="left">
|
||||
<Info size={16} className="text-foreground-400" />
|
||||
</Tooltip>
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
import { HelpTooltip } from '@renderer/components/TooltipIcons'
|
||||
import SelectAgentBaseModelButton from '@renderer/pages/home/components/SelectAgentBaseModelButton'
|
||||
import type { AgentBaseWithId, ApiModel, UpdateAgentFunctionUnion } from '@renderer/types'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
@ -22,7 +23,9 @@ export const ModelSetting = ({ base, update, isDisabled }: ModelSettingProps) =>
|
||||
|
||||
return (
|
||||
<SettingsItem inline>
|
||||
<SettingsTitle id="model">{t('common.model')}</SettingsTitle>
|
||||
<SettingsTitle id="model" contentAfter={<HelpTooltip title={t('agent.add.model.tooltip')} />}>
|
||||
{t('common.model')}
|
||||
</SettingsTitle>
|
||||
<SelectAgentBaseModelButton
|
||||
agentBase={base}
|
||||
onSelect={async (model) => {
|
||||
|
||||
@ -10,14 +10,14 @@ import styled from 'styled-components'
|
||||
import { SettingDivider } from '..'
|
||||
|
||||
export interface SettingsTitleProps extends React.ComponentPropsWithRef<'div'> {
|
||||
actions?: ReactNode
|
||||
contentAfter?: ReactNode
|
||||
}
|
||||
|
||||
export const SettingsTitle: React.FC<SettingsTitleProps> = ({ children, actions }) => {
|
||||
export const SettingsTitle: React.FC<SettingsTitleProps> = ({ children, contentAfter }) => {
|
||||
return (
|
||||
<div className={cn(actions ? 'justify-between' : undefined, 'mb-1 flex items-center gap-2')}>
|
||||
<div className={cn(contentAfter ? 'justify-between' : undefined, 'mb-1 flex items-center gap-2')}>
|
||||
<span className="flex items-center gap-1 font-bold">{children}</span>
|
||||
{actions !== undefined && actions}
|
||||
{contentAfter !== undefined && contentAfter}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@ -9,8 +9,9 @@ import MCPDescription from '@renderer/pages/settings/MCPSettings/McpDescription'
|
||||
import type { MCPPrompt, MCPResource, MCPServer, MCPTool } from '@renderer/types'
|
||||
import { parseKeyValueString } from '@renderer/utils/env'
|
||||
import { formatMcpError } from '@renderer/utils/error'
|
||||
import type { MCPServerLogEntry } from '@shared/config/types'
|
||||
import type { TabsProps } from 'antd'
|
||||
import { Badge, Button, Flex, Form, Input, Radio, Select, Switch, Tabs } from 'antd'
|
||||
import { Badge, Button, Flex, Form, Input, Modal, Radio, Select, Switch, Tabs, Tag, Typography } from 'antd'
|
||||
import TextArea from 'antd/es/input/TextArea'
|
||||
import { ChevronDown, SaveIcon } from 'lucide-react'
|
||||
import React, { useCallback, useEffect, useState } from 'react'
|
||||
@ -88,8 +89,11 @@ const McpSettings: React.FC = () => {
|
||||
|
||||
const [showAdvanced, setShowAdvanced] = useState(false)
|
||||
const [serverVersion, setServerVersion] = useState<string | null>(null)
|
||||
const [logModalOpen, setLogModalOpen] = useState(false)
|
||||
const [logs, setLogs] = useState<(MCPServerLogEntry & { serverId?: string })[]>([])
|
||||
|
||||
const { theme } = useTheme()
|
||||
const { Text } = Typography
|
||||
|
||||
const navigate = useNavigate()
|
||||
|
||||
@ -234,12 +238,43 @@ const McpSettings: React.FC = () => {
|
||||
}
|
||||
}
|
||||
|
||||
const fetchServerLogs = async () => {
|
||||
try {
|
||||
const history = await window.api.mcp.getServerLogs(server)
|
||||
setLogs(history)
|
||||
} catch (error) {
|
||||
logger.warn('Failed to load server logs', error as Error)
|
||||
}
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
const unsubscribe = window.api.mcp.onServerLog((log) => {
|
||||
if (log.serverId && log.serverId !== server.id) return
|
||||
setLogs((prev) => {
|
||||
const merged = [...prev, log]
|
||||
if (merged.length > 200) {
|
||||
return merged.slice(merged.length - 200)
|
||||
}
|
||||
return merged
|
||||
})
|
||||
})
|
||||
|
||||
return () => {
|
||||
unsubscribe?.()
|
||||
}
|
||||
}, [server.id])
|
||||
|
||||
useEffect(() => {
|
||||
setLogs([])
|
||||
}, [server.id])
|
||||
|
||||
useEffect(() => {
|
||||
if (server.isActive) {
|
||||
fetchTools()
|
||||
fetchPrompts()
|
||||
fetchResources()
|
||||
fetchServerVersion()
|
||||
fetchServerLogs()
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [server.id, server.isActive])
|
||||
@ -736,6 +771,9 @@ const McpSettings: React.FC = () => {
|
||||
<ServerName className="text-nowrap">{server?.name}</ServerName>
|
||||
{serverVersion && <VersionBadge count={serverVersion} color="blue" />}
|
||||
</Flex>
|
||||
<Button size="small" onClick={() => setLogModalOpen(true)}>
|
||||
{t('settings.mcp.logs', 'View Logs')}
|
||||
</Button>
|
||||
<Button
|
||||
danger
|
||||
icon={<DeleteIcon size={14} className="lucide-custom" />}
|
||||
@ -770,6 +808,37 @@ const McpSettings: React.FC = () => {
|
||||
/>
|
||||
</SettingGroup>
|
||||
</SettingContainer>
|
||||
|
||||
<Modal
|
||||
title={t('settings.mcp.logs', 'Server Logs')}
|
||||
open={logModalOpen}
|
||||
onCancel={() => setLogModalOpen(false)}
|
||||
footer={null}
|
||||
width={720}
|
||||
centered
|
||||
transitionName="animation-move-down"
|
||||
bodyStyle={{ maxHeight: '60vh', minHeight: '40vh', overflowY: 'auto' }}
|
||||
afterOpenChange={(open) => {
|
||||
if (open) {
|
||||
fetchServerLogs()
|
||||
}
|
||||
}}>
|
||||
<LogList>
|
||||
{logs.length === 0 && <Text type="secondary">{t('settings.mcp.noLogs', 'No logs yet')}</Text>}
|
||||
{logs.map((log, idx) => (
|
||||
<LogItem key={`${log.timestamp}-${idx}`}>
|
||||
<Flex gap={8} align="baseline">
|
||||
<Timestamp>{new Date(log.timestamp).toLocaleTimeString()}</Timestamp>
|
||||
<Tag color={mapLogLevelColor(log.level)}>{log.level}</Tag>
|
||||
<Text>{log.message}</Text>
|
||||
</Flex>
|
||||
{log.data && (
|
||||
<PreBlock>{typeof log.data === 'string' ? log.data : JSON.stringify(log.data, null, 2)}</PreBlock>
|
||||
)}
|
||||
</LogItem>
|
||||
))}
|
||||
</LogList>
|
||||
</Modal>
|
||||
</Container>
|
||||
)
|
||||
}
|
||||
@ -792,6 +861,52 @@ const AdvancedSettingsButton = styled.div`
|
||||
align-items: center;
|
||||
`
|
||||
|
||||
const LogList = styled.div`
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 12px;
|
||||
`
|
||||
|
||||
const LogItem = styled.div`
|
||||
background: var(--color-bg-2, #1f1f1f);
|
||||
color: var(--color-text-1, #e6e6e6);
|
||||
border-radius: 8px;
|
||||
padding: 10px 12px;
|
||||
border: 1px solid var(--color-border, rgba(255, 255, 255, 0.08));
|
||||
`
|
||||
|
||||
const Timestamp = styled.span`
|
||||
color: var(--color-text-3, #9aa2b1);
|
||||
font-size: 12px;
|
||||
`
|
||||
|
||||
const PreBlock = styled.pre`
|
||||
margin: 6px 0 0;
|
||||
padding: 8px;
|
||||
background: var(--color-bg-3, #111418);
|
||||
color: var(--color-text-1, #e6e6e6);
|
||||
border-radius: 6px;
|
||||
font-size: 12px;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-word;
|
||||
border: 1px solid var(--color-border, rgba(255, 255, 255, 0.08));
|
||||
`
|
||||
|
||||
function mapLogLevelColor(level: MCPServerLogEntry['level']) {
|
||||
switch (level) {
|
||||
case 'error':
|
||||
case 'stderr':
|
||||
return 'red'
|
||||
case 'warn':
|
||||
return 'orange'
|
||||
case 'info':
|
||||
case 'stdout':
|
||||
return 'blue'
|
||||
default:
|
||||
return 'default'
|
||||
}
|
||||
}
|
||||
|
||||
const VersionBadge = styled(Badge)`
|
||||
.ant-badge-count {
|
||||
background-color: var(--color-primary);
|
||||
|
||||
@ -0,0 +1,74 @@
|
||||
import { useProvider } from '@renderer/hooks/useProvider'
|
||||
import { Select } from 'antd'
|
||||
import type { FC } from 'react'
|
||||
import { useCallback, useMemo } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
||||
interface CherryINSettingsProps {
|
||||
providerId: string
|
||||
apiHost: string
|
||||
setApiHost: (host: string) => void
|
||||
}
|
||||
|
||||
const API_HOST_OPTIONS = [
|
||||
{
|
||||
value: 'https://open.cherryin.cc',
|
||||
labelKey: '加速域名',
|
||||
description: 'open.cherryin.cc'
|
||||
},
|
||||
{
|
||||
value: 'https://open.cherryin.net',
|
||||
labelKey: '国际域名',
|
||||
description: 'open.cherryin.net'
|
||||
},
|
||||
{
|
||||
value: 'https://open.cherryin.ai',
|
||||
labelKey: '备用域名',
|
||||
description: 'open.cherryin.ai'
|
||||
}
|
||||
]
|
||||
|
||||
const CherryINSettings: FC<CherryINSettingsProps> = ({ providerId, apiHost, setApiHost }) => {
|
||||
const { updateProvider } = useProvider(providerId)
|
||||
const { t } = useTranslation()
|
||||
|
||||
const getCurrentHost = useMemo(() => {
|
||||
const matchedOption = API_HOST_OPTIONS.find((option) => apiHost?.includes(option.value.replace('https://', '')))
|
||||
return matchedOption?.value ?? API_HOST_OPTIONS[0].value
|
||||
}, [apiHost])
|
||||
|
||||
const handleHostChange = useCallback(
|
||||
(value: string) => {
|
||||
setApiHost(value)
|
||||
updateProvider({ apiHost: value, anthropicApiHost: value })
|
||||
},
|
||||
[setApiHost, updateProvider]
|
||||
)
|
||||
|
||||
const options = useMemo(
|
||||
() =>
|
||||
API_HOST_OPTIONS.map((option) => ({
|
||||
value: option.value,
|
||||
label: (
|
||||
<div className="flex flex-col gap-0.5">
|
||||
<span>{t(option.labelKey)}</span>
|
||||
<span className="text-[var(--color-text-3)] text-xs">{t(option.description)}</span>
|
||||
</div>
|
||||
)
|
||||
})),
|
||||
[t]
|
||||
)
|
||||
|
||||
return (
|
||||
<Select
|
||||
value={getCurrentHost}
|
||||
onChange={handleHostChange}
|
||||
options={options}
|
||||
style={{ width: '100%', marginTop: 5 }}
|
||||
optionLabelProp="label"
|
||||
labelRender={(option) => option.value}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
export default CherryINSettings
|
||||
@ -10,7 +10,6 @@ import { PROVIDER_URLS } from '@renderer/config/providers'
|
||||
import { useTheme } from '@renderer/context/ThemeProvider'
|
||||
import { useAllProviders, useProvider, useProviders } from '@renderer/hooks/useProvider'
|
||||
import { useTimer } from '@renderer/hooks/useTimer'
|
||||
import i18n from '@renderer/i18n'
|
||||
import AnthropicSettings from '@renderer/pages/settings/ProviderSettings/AnthropicSettings'
|
||||
import { ModelList } from '@renderer/pages/settings/ProviderSettings/ModelList'
|
||||
import { checkApi } from '@renderer/services/ApiService'
|
||||
@ -53,6 +52,7 @@ import {
|
||||
} from '..'
|
||||
import ApiOptionsSettingsPopup from './ApiOptionsSettings/ApiOptionsSettingsPopup'
|
||||
import AwsBedrockSettings from './AwsBedrockSettings'
|
||||
import CherryINSettings from './CherryINSettings'
|
||||
import CustomHeaderPopup from './CustomHeaderPopup'
|
||||
import DMXAPISettings from './DMXAPISettings'
|
||||
import GithubCopilotSettings from './GithubCopilotSettings'
|
||||
@ -100,13 +100,15 @@ const ProviderSetting: FC<Props> = ({ providerId }) => {
|
||||
const [anthropicApiHost, setAnthropicHost] = useState<string | undefined>(provider.anthropicApiHost)
|
||||
const [apiVersion, setApiVersion] = useState(provider.apiVersion)
|
||||
const [activeHostField, setActiveHostField] = useState<HostField>('apiHost')
|
||||
const { t } = useTranslation()
|
||||
const { t, i18n } = useTranslation()
|
||||
const { theme } = useTheme()
|
||||
const { setTimeoutTimer } = useTimer()
|
||||
const dispatch = useAppDispatch()
|
||||
|
||||
const isAzureOpenAI = isAzureOpenAIProvider(provider)
|
||||
const isDmxapi = provider.id === 'dmxapi'
|
||||
const isCherryIN = provider.id === 'cherryin'
|
||||
const isChineseUser = i18n.language.startsWith('zh')
|
||||
const noAPIInputProviders = ['aws-bedrock'] as const satisfies SystemProviderId[]
|
||||
const hideApiInput = noAPIInputProviders.some((id) => id === provider.id)
|
||||
const noAPIKeyInputProviders = ['copilot', 'vertexai'] as const satisfies SystemProviderId[]
|
||||
@ -339,13 +341,16 @@ const ProviderSetting: FC<Props> = ({ providerId }) => {
|
||||
}, [provider.anthropicApiHost])
|
||||
|
||||
const canConfigureAnthropicHost = useMemo(() => {
|
||||
if (isCherryIN) {
|
||||
return false
|
||||
}
|
||||
if (isNewApiProvider(provider)) {
|
||||
return true
|
||||
}
|
||||
return (
|
||||
provider.type !== 'anthropic' && isSystemProviderId(provider.id) && isAnthropicCompatibleProviderId(provider.id)
|
||||
)
|
||||
}, [provider])
|
||||
}, [isCherryIN, provider])
|
||||
|
||||
const anthropicHostPreview = useMemo(() => {
|
||||
const rawHost = anthropicApiHost ?? provider.anthropicApiHost
|
||||
@ -514,6 +519,9 @@ const ProviderSetting: FC<Props> = ({ providerId }) => {
|
||||
</SettingSubtitle>
|
||||
{activeHostField === 'apiHost' && (
|
||||
<>
|
||||
{isCherryIN && isChineseUser ? (
|
||||
<CherryINSettings providerId={provider.id} apiHost={apiHost} setApiHost={setApiHost} />
|
||||
) : (
|
||||
<Space.Compact style={{ width: '100%', marginTop: 5 }}>
|
||||
<Input
|
||||
value={apiHost}
|
||||
@ -527,6 +535,7 @@ const ProviderSetting: FC<Props> = ({ providerId }) => {
|
||||
</Button>
|
||||
)}
|
||||
</Space.Compact>
|
||||
)}
|
||||
{isVertexProvider(provider) && (
|
||||
<SettingHelpTextRow>
|
||||
<SettingHelpText>{t('settings.provider.vertex_ai.api_host_help')}</SettingHelpText>
|
||||
|
||||
@ -7,6 +7,7 @@ import type { AssistantPreset } from '@renderer/types'
|
||||
import { getLeadingEmoji } from '@renderer/utils'
|
||||
import { Button, Dropdown } from 'antd'
|
||||
import { t } from 'i18next'
|
||||
import { isArray } from 'lodash'
|
||||
import { ArrowDownAZ, Ellipsis, PlusIcon, SquareArrowOutUpRight } from 'lucide-react'
|
||||
import { type FC, memo, useCallback, useEffect, useRef, useState } from 'react'
|
||||
import styled from 'styled-components'
|
||||
@ -142,7 +143,7 @@ const AssistantPresetCard: FC<Props> = ({ preset, onClick, activegroup, getLocal
|
||||
{getLocalizedGroupName('我的')}
|
||||
</CustomTag>
|
||||
)}
|
||||
{!!preset.group?.length &&
|
||||
{isArray(preset.group) &&
|
||||
preset.group.map((group) => (
|
||||
<CustomTag key={group} color="#A0A0A0" size={11}>
|
||||
{getLocalizedGroupName(group)}
|
||||
|
||||
@ -7,7 +7,6 @@ import {
|
||||
UNLIMITED_CONTEXT_COUNT
|
||||
} from '@renderer/config/constant'
|
||||
import { isQwenMTModel } from '@renderer/config/models/qwen'
|
||||
import { CHERRYAI_PROVIDER } from '@renderer/config/providers'
|
||||
import { UNKNOWN } from '@renderer/config/translate'
|
||||
import { getStoreProviders } from '@renderer/hooks/useStore'
|
||||
import i18n from '@renderer/i18n'
|
||||
@ -27,7 +26,7 @@ import { uuid } from '@renderer/utils'
|
||||
|
||||
const logger = loggerService.withContext('AssistantService')
|
||||
|
||||
export const DEFAULT_ASSISTANT_SETTINGS: AssistantSettings = {
|
||||
export const DEFAULT_ASSISTANT_SETTINGS = {
|
||||
temperature: DEFAULT_TEMPERATURE,
|
||||
enableTemperature: true,
|
||||
contextCount: DEFAULT_CONTEXTCOUNT,
|
||||
@ -39,7 +38,7 @@ export const DEFAULT_ASSISTANT_SETTINGS: AssistantSettings = {
|
||||
// It would gracefully fallback to prompt if not supported by model.
|
||||
toolUseMode: 'function',
|
||||
customParameters: []
|
||||
} as const
|
||||
} as const satisfies AssistantSettings
|
||||
|
||||
export function getDefaultAssistant(): Assistant {
|
||||
return {
|
||||
@ -142,7 +141,7 @@ export function getProviderByModel(model?: Model): Provider {
|
||||
|
||||
if (!provider) {
|
||||
const defaultProvider = providers.find((p) => p.id === getDefaultModel()?.provider)
|
||||
return defaultProvider || CHERRYAI_PROVIDER || providers[0]
|
||||
return defaultProvider || providers[0]
|
||||
}
|
||||
|
||||
return provider
|
||||
|
||||
@ -162,7 +162,7 @@ export const searchKnowledgeBase = async (
|
||||
|
||||
const searchResults: KnowledgeSearchResult[] = await window.api.knowledgeBase.search(
|
||||
{
|
||||
search: rewrite || query,
|
||||
search: query || rewrite || '',
|
||||
base: baseParams
|
||||
},
|
||||
currentSpan?.spanContext()
|
||||
|
||||
@ -45,7 +45,7 @@ function normalizeModels<T>(models: T[], transformer: (entry: T) => Model | null
|
||||
}
|
||||
|
||||
function adaptSdkModel(provider: Provider, model: SdkModel): Model | null {
|
||||
const id = pickPreferredString([(model as any)?.id, (model as any)?.modelId])
|
||||
const id = pickPreferredString([(model as any)?.id, (model as any)?.modelId, (model as any)?.name])
|
||||
const name = pickPreferredString([
|
||||
(model as any)?.display_name,
|
||||
(model as any)?.displayName,
|
||||
|
||||
@ -67,7 +67,7 @@ const persistedReducer = persistReducer(
|
||||
{
|
||||
key: 'cherry-studio',
|
||||
storage,
|
||||
version: 182,
|
||||
version: 183,
|
||||
blacklist: ['runtime', 'messages', 'messageBlocks', 'tabs', 'toolPermissions'],
|
||||
migrate
|
||||
},
|
||||
|
||||
@ -2981,6 +2981,22 @@ const migrateConfig = {
|
||||
logger.error('migrate 182 error', error as Error)
|
||||
return state
|
||||
}
|
||||
},
|
||||
'183': (state: RootState) => {
|
||||
try {
|
||||
state.llm.providers.forEach((provider) => {
|
||||
if (provider.id === SystemProviderIds.cherryin) {
|
||||
provider.apiHost = 'https://open.cherryin.cc'
|
||||
provider.anthropicApiHost = 'https://open.cherryin.cc'
|
||||
}
|
||||
})
|
||||
state.llm.providers = moveProvider(state.llm.providers, SystemProviderIds.poe, 10)
|
||||
logger.info('migrate 183 success')
|
||||
return state
|
||||
} catch (error) {
|
||||
logger.error('migrate 183 error', error as Error)
|
||||
return state
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -34,6 +34,7 @@ export interface UpdateState {
|
||||
downloaded: boolean
|
||||
downloadProgress: number
|
||||
available: boolean
|
||||
ignore: boolean
|
||||
}
|
||||
|
||||
export interface RuntimeState {
|
||||
@ -79,7 +80,8 @@ const initialState: RuntimeState = {
|
||||
downloading: false,
|
||||
downloaded: false,
|
||||
downloadProgress: 0,
|
||||
available: false
|
||||
available: false,
|
||||
ignore: false
|
||||
},
|
||||
export: {
|
||||
isExporting: false
|
||||
|
||||
@ -10,6 +10,7 @@ import {
|
||||
formatVertexApiHost,
|
||||
getTrailingApiVersion,
|
||||
hasAPIVersion,
|
||||
isWithTrailingSharp,
|
||||
maskApiKey,
|
||||
routeToEndpoint,
|
||||
splitApiKeyString,
|
||||
@ -439,6 +440,43 @@ describe('api', () => {
|
||||
it('returns undefined for empty string', () => {
|
||||
expect(getTrailingApiVersion('')).toBeUndefined()
|
||||
})
|
||||
|
||||
it('returns undefined when URL ends with # regardless of version', () => {
|
||||
expect(getTrailingApiVersion('https://api.example.com/v1#')).toBeUndefined()
|
||||
expect(getTrailingApiVersion('https://api.example.com/v2beta#')).toBeUndefined()
|
||||
expect(getTrailingApiVersion('https://gateway.ai.cloudflare.com/v1#')).toBeUndefined()
|
||||
expect(getTrailingApiVersion('https://api.example.com/service/v1#')).toBeUndefined()
|
||||
})
|
||||
|
||||
it('handles URLs with # and trailing slash correctly', () => {
|
||||
expect(getTrailingApiVersion('https://api.example.com/v1/#')).toBeUndefined()
|
||||
expect(getTrailingApiVersion('https://api.example.com/v2beta/#')).toBeUndefined()
|
||||
})
|
||||
|
||||
it('handles URLs with version followed by # and additional path', () => {
|
||||
expect(getTrailingApiVersion('https://api.example.com/v1#endpoint')).toBeUndefined()
|
||||
expect(getTrailingApiVersion('https://api.example.com/v2beta#chat/completions')).toBeUndefined()
|
||||
})
|
||||
|
||||
it('handles complex URLs with multiple # characters', () => {
|
||||
expect(getTrailingApiVersion('https://api.example.com/v1#path#')).toBeUndefined()
|
||||
expect(getTrailingApiVersion('https://gateway.ai.cloudflare.com/v1/xxx/v2beta#')).toBeUndefined()
|
||||
})
|
||||
|
||||
it('handles URLs ending with # when version is not at the end', () => {
|
||||
expect(getTrailingApiVersion('https://api.example.com/v1/service#')).toBeUndefined()
|
||||
expect(getTrailingApiVersion('https://api.example.com/v1/api/chat#')).toBeUndefined()
|
||||
})
|
||||
|
||||
it('distinguishes between URLs with and without trailing #', () => {
|
||||
// Without # - should extract version
|
||||
expect(getTrailingApiVersion('https://api.example.com/v1')).toBe('v1')
|
||||
expect(getTrailingApiVersion('https://api.example.com/v2beta')).toBe('v2beta')
|
||||
|
||||
// With # - should return undefined
|
||||
expect(getTrailingApiVersion('https://api.example.com/v1#')).toBeUndefined()
|
||||
expect(getTrailingApiVersion('https://api.example.com/v2beta#')).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe('withoutTrailingApiVersion', () => {
|
||||
@ -484,6 +522,70 @@ describe('api', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('isWithTrailingSharp', () => {
|
||||
it('returns true when URL ends with #', () => {
|
||||
expect(isWithTrailingSharp('https://api.example.com#')).toBe(true)
|
||||
expect(isWithTrailingSharp('http://localhost:3000#')).toBe(true)
|
||||
expect(isWithTrailingSharp('#')).toBe(true)
|
||||
})
|
||||
|
||||
it('returns false when URL does not end with #', () => {
|
||||
expect(isWithTrailingSharp('https://api.example.com')).toBe(false)
|
||||
expect(isWithTrailingSharp('http://localhost:3000')).toBe(false)
|
||||
expect(isWithTrailingSharp('')).toBe(false)
|
||||
})
|
||||
|
||||
it('returns false when URL has # in the middle but not at the end', () => {
|
||||
expect(isWithTrailingSharp('https://api.example.com#path')).toBe(false)
|
||||
expect(isWithTrailingSharp('https://api.example.com#section/path')).toBe(false)
|
||||
expect(isWithTrailingSharp('https://api.example.com#path#other')).toBe(false)
|
||||
})
|
||||
|
||||
it('handles URLs with multiple # characters', () => {
|
||||
expect(isWithTrailingSharp('https://api.example.com##')).toBe(true)
|
||||
expect(isWithTrailingSharp('https://api.example.com#path#')).toBe(true)
|
||||
expect(isWithTrailingSharp('https://api.example.com###')).toBe(true)
|
||||
})
|
||||
|
||||
it('handles URLs with trailing whitespace after #', () => {
|
||||
expect(isWithTrailingSharp('https://api.example.com# ')).toBe(false)
|
||||
expect(isWithTrailingSharp('https://api.example.com#\t')).toBe(false)
|
||||
expect(isWithTrailingSharp('https://api.example.com#\n')).toBe(false)
|
||||
})
|
||||
|
||||
it('handles URLs with whitespace before trailing #', () => {
|
||||
expect(isWithTrailingSharp(' https://api.example.com#')).toBe(true)
|
||||
expect(isWithTrailingSharp('\thttps://localhost:3000#')).toBe(true)
|
||||
})
|
||||
|
||||
it('preserves type safety with generic parameter', () => {
|
||||
const url1: string = 'https://api.example.com#'
|
||||
const url2 = 'https://example.com' as const
|
||||
|
||||
expect(isWithTrailingSharp(url1)).toBe(true)
|
||||
expect(isWithTrailingSharp(url2)).toBe(false)
|
||||
})
|
||||
|
||||
it('handles complex real-world URLs', () => {
|
||||
expect(isWithTrailingSharp('https://open.cherryin.net/v1/chat/completions#')).toBe(true)
|
||||
expect(isWithTrailingSharp('https://api.openai.com/v1/engines/gpt-4#')).toBe(true)
|
||||
expect(isWithTrailingSharp('https://gateway.ai.cloudflare.com/v1/xxx/v1beta#')).toBe(true)
|
||||
|
||||
expect(isWithTrailingSharp('https://open.cherryin.net/v1/chat/completions')).toBe(false)
|
||||
expect(isWithTrailingSharp('https://api.openai.com/v1/engines/gpt-4')).toBe(false)
|
||||
expect(isWithTrailingSharp('https://gateway.ai.cloudflare.com/v1/xxx/v1beta')).toBe(false)
|
||||
})
|
||||
|
||||
it('handles edge cases', () => {
|
||||
expect(isWithTrailingSharp('#')).toBe(true)
|
||||
expect(isWithTrailingSharp(' #')).toBe(true)
|
||||
expect(isWithTrailingSharp('# ')).toBe(false)
|
||||
expect(isWithTrailingSharp('path#')).toBe(true)
|
||||
expect(isWithTrailingSharp('/path/with/trailing/#')).toBe(true)
|
||||
expect(isWithTrailingSharp('/path/without/trailing/')).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('withoutTrailingSharp', () => {
|
||||
it('removes trailing # from URL', () => {
|
||||
expect(withoutTrailingSharp('https://api.example.com#')).toBe('https://api.example.com')
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import { isMac } from '@renderer/config/constant'
|
||||
import { isMac, isWin } from '@renderer/config/constant'
|
||||
import { useSelectionAssistant } from '@renderer/hooks/useSelectionAssistant'
|
||||
import { useSettings } from '@renderer/hooks/useSettings'
|
||||
import i18n from '@renderer/i18n'
|
||||
@ -8,11 +8,14 @@ import { IpcChannel } from '@shared/IpcChannel'
|
||||
import { Button, Slider, Tooltip } from 'antd'
|
||||
import { Droplet, Minus, Pin, X } from 'lucide-react'
|
||||
import { DynamicIcon } from 'lucide-react/dynamic'
|
||||
import type { FC } from 'react'
|
||||
import type { FC, MouseEvent as ReactMouseEvent } from 'react'
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import styled from 'styled-components'
|
||||
|
||||
// [Windows only] Electron bug workaround type - can be removed once https://github.com/electron/electron/issues/48554 is fixed
|
||||
type ResizeDirection = 'n' | 's' | 'e' | 'w' | 'ne' | 'nw' | 'se' | 'sw'
|
||||
|
||||
import ActionGeneral from './components/ActionGeneral'
|
||||
import ActionTranslate from './components/ActionTranslate'
|
||||
|
||||
@ -185,11 +188,62 @@ const SelectionActionApp: FC = () => {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* [Windows only] Manual window resize handler
|
||||
*
|
||||
* ELECTRON BUG WORKAROUND:
|
||||
* In Electron, when using `frame: false` + `transparent: true`, the native window
|
||||
* resize functionality is broken on Windows. This is a known Electron bug.
|
||||
* See: https://github.com/electron/electron/issues/48554
|
||||
*
|
||||
* This custom resize implementation can be removed once the Electron bug is fixed.
|
||||
*/
|
||||
const handleResizeStart = useCallback((e: ReactMouseEvent, direction: ResizeDirection) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
|
||||
let lastX = e.screenX
|
||||
let lastY = e.screenY
|
||||
|
||||
const handleMouseMove = (moveEvent: MouseEvent) => {
|
||||
const deltaX = moveEvent.screenX - lastX
|
||||
const deltaY = moveEvent.screenY - lastY
|
||||
|
||||
if (deltaX !== 0 || deltaY !== 0) {
|
||||
window.api.selection.resizeActionWindow(deltaX, deltaY, direction)
|
||||
lastX = moveEvent.screenX
|
||||
lastY = moveEvent.screenY
|
||||
}
|
||||
}
|
||||
|
||||
const handleMouseUp = () => {
|
||||
window.removeEventListener('mousemove', handleMouseMove)
|
||||
window.removeEventListener('mouseup', handleMouseUp)
|
||||
}
|
||||
|
||||
window.addEventListener('mousemove', handleMouseMove)
|
||||
window.addEventListener('mouseup', handleMouseUp)
|
||||
}, [])
|
||||
|
||||
//we don't need to render the component if action is not set
|
||||
if (!action) return null
|
||||
|
||||
return (
|
||||
<WindowFrame $opacity={opacity / 100}>
|
||||
{/* [Windows only] Custom resize handles - Electron bug workaround, can be removed once fixed */}
|
||||
{isWin && (
|
||||
<>
|
||||
<ResizeHandle $direction="n" onMouseDown={(e) => handleResizeStart(e, 'n')} />
|
||||
<ResizeHandle $direction="s" onMouseDown={(e) => handleResizeStart(e, 's')} />
|
||||
<ResizeHandle $direction="e" onMouseDown={(e) => handleResizeStart(e, 'e')} />
|
||||
<ResizeHandle $direction="w" onMouseDown={(e) => handleResizeStart(e, 'w')} />
|
||||
<ResizeHandle $direction="ne" onMouseDown={(e) => handleResizeStart(e, 'ne')} />
|
||||
<ResizeHandle $direction="nw" onMouseDown={(e) => handleResizeStart(e, 'nw')} />
|
||||
<ResizeHandle $direction="se" onMouseDown={(e) => handleResizeStart(e, 'se')} />
|
||||
<ResizeHandle $direction="sw" onMouseDown={(e) => handleResizeStart(e, 'sw')} />
|
||||
</>
|
||||
)}
|
||||
|
||||
<TitleBar $isWindowFocus={isWindowFocus} style={isMac ? { paddingLeft: '70px' } : {}}>
|
||||
{action.icon && (
|
||||
<TitleBarIcon>
|
||||
@ -431,4 +485,90 @@ const OpacitySlider = styled.div`
|
||||
}
|
||||
`
|
||||
|
||||
/**
|
||||
* [Windows only] Custom resize handle styled component
|
||||
*
|
||||
* ELECTRON BUG WORKAROUND:
|
||||
* This component can be removed once https://github.com/electron/electron/issues/48554 is fixed.
|
||||
*/
|
||||
const ResizeHandle = styled.div<{ $direction: ResizeDirection }>`
|
||||
position: absolute;
|
||||
-webkit-app-region: no-drag;
|
||||
z-index: 10;
|
||||
|
||||
${({ $direction }) => {
|
||||
const edgeSize = '6px'
|
||||
const cornerSize = '12px'
|
||||
|
||||
switch ($direction) {
|
||||
case 'n':
|
||||
return `
|
||||
top: 0;
|
||||
left: ${cornerSize};
|
||||
right: ${cornerSize};
|
||||
height: ${edgeSize};
|
||||
cursor: ns-resize;
|
||||
`
|
||||
case 's':
|
||||
return `
|
||||
bottom: 0;
|
||||
left: ${cornerSize};
|
||||
right: ${cornerSize};
|
||||
height: ${edgeSize};
|
||||
cursor: ns-resize;
|
||||
`
|
||||
case 'e':
|
||||
return `
|
||||
right: 0;
|
||||
top: ${cornerSize};
|
||||
bottom: ${cornerSize};
|
||||
width: ${edgeSize};
|
||||
cursor: ew-resize;
|
||||
`
|
||||
case 'w':
|
||||
return `
|
||||
left: 0;
|
||||
top: ${cornerSize};
|
||||
bottom: ${cornerSize};
|
||||
width: ${edgeSize};
|
||||
cursor: ew-resize;
|
||||
`
|
||||
case 'ne':
|
||||
return `
|
||||
top: 0;
|
||||
right: 0;
|
||||
width: ${cornerSize};
|
||||
height: ${cornerSize};
|
||||
cursor: nesw-resize;
|
||||
`
|
||||
case 'nw':
|
||||
return `
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: ${cornerSize};
|
||||
height: ${cornerSize};
|
||||
cursor: nwse-resize;
|
||||
`
|
||||
case 'se':
|
||||
return `
|
||||
bottom: 0;
|
||||
right: 0;
|
||||
width: ${cornerSize};
|
||||
height: ${cornerSize};
|
||||
cursor: nwse-resize;
|
||||
`
|
||||
case 'sw':
|
||||
return `
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
width: ${cornerSize};
|
||||
height: ${cornerSize};
|
||||
cursor: nesw-resize;
|
||||
`
|
||||
default:
|
||||
return ''
|
||||
}
|
||||
}}
|
||||
`
|
||||
|
||||
export default SelectionActionApp
|
||||
|
||||
85
yarn.lock
85
yarn.lock
@ -503,9 +503,9 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@anthropic-ai/claude-agent-sdk@npm:0.1.53":
|
||||
version: 0.1.53
|
||||
resolution: "@anthropic-ai/claude-agent-sdk@npm:0.1.53"
|
||||
"@anthropic-ai/claude-agent-sdk@npm:0.1.62":
|
||||
version: 0.1.62
|
||||
resolution: "@anthropic-ai/claude-agent-sdk@npm:0.1.62"
|
||||
dependencies:
|
||||
"@img/sharp-darwin-arm64": "npm:^0.33.5"
|
||||
"@img/sharp-darwin-x64": "npm:^0.33.5"
|
||||
@ -534,13 +534,13 @@ __metadata:
|
||||
optional: true
|
||||
"@img/sharp-win32-x64":
|
||||
optional: true
|
||||
checksum: 10c0/9b8e444f113e1f6a425d87287c653a5a441836c6100e954fdc33ce9149c8d87ca1a7d495563f4fac583cbaf14946fe18c321eb555b3f0e44a5de8433ba06bdaf
|
||||
checksum: 10c0/bca0978651cd28798cd71a0071618eca37253905841fa0e20ec59f69ac4865e2c6c4e5fec034bc7b85a5748df5c3c37e3193d6adbd1cad73668f112d049390a3
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@anthropic-ai/claude-agent-sdk@patch:@anthropic-ai/claude-agent-sdk@npm%3A0.1.53#~/.yarn/patches/@anthropic-ai-claude-agent-sdk-npm-0.1.53-4b77f4cf29.patch":
|
||||
version: 0.1.53
|
||||
resolution: "@anthropic-ai/claude-agent-sdk@patch:@anthropic-ai/claude-agent-sdk@npm%3A0.1.53#~/.yarn/patches/@anthropic-ai-claude-agent-sdk-npm-0.1.53-4b77f4cf29.patch::version=0.1.53&hash=b05505"
|
||||
"@anthropic-ai/claude-agent-sdk@patch:@anthropic-ai/claude-agent-sdk@npm%3A0.1.62#~/.yarn/patches/@anthropic-ai-claude-agent-sdk-npm-0.1.62-23ae56f8c8.patch":
|
||||
version: 0.1.62
|
||||
resolution: "@anthropic-ai/claude-agent-sdk@patch:@anthropic-ai/claude-agent-sdk@npm%3A0.1.62#~/.yarn/patches/@anthropic-ai-claude-agent-sdk-npm-0.1.62-23ae56f8c8.patch::version=0.1.62&hash=b8fdbe"
|
||||
dependencies:
|
||||
"@img/sharp-darwin-arm64": "npm:^0.33.5"
|
||||
"@img/sharp-darwin-x64": "npm:^0.33.5"
|
||||
@ -569,7 +569,7 @@ __metadata:
|
||||
optional: true
|
||||
"@img/sharp-win32-x64":
|
||||
optional: true
|
||||
checksum: 10c0/54abfc37ca1e1617503b1a70d31a165b95cb898e6192637d3ab450be081bc8c89933714d1b150f5c3ef3948b3c481f81b9dfaf45fa1edff745477edf3e3c58e5
|
||||
checksum: 10c0/6c59cfc3d3b7d903d946c5da6e0c2ad6798ae837b67c2a9e679df2803d7577823f8feec26e48fa9f815b9ff19612c66e2682fdd182be0344b60febb6e64ac85e
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
@ -10046,7 +10046,7 @@ __metadata:
|
||||
"@ai-sdk/perplexity": "npm:^2.0.20"
|
||||
"@ai-sdk/test-server": "npm:^0.0.1"
|
||||
"@ant-design/v5-patch-for-react-19": "npm:^1.0.3"
|
||||
"@anthropic-ai/claude-agent-sdk": "patch:@anthropic-ai/claude-agent-sdk@npm%3A0.1.53#~/.yarn/patches/@anthropic-ai-claude-agent-sdk-npm-0.1.53-4b77f4cf29.patch"
|
||||
"@anthropic-ai/claude-agent-sdk": "patch:@anthropic-ai/claude-agent-sdk@npm%3A0.1.62#~/.yarn/patches/@anthropic-ai-claude-agent-sdk-npm-0.1.62-23ae56f8c8.patch"
|
||||
"@anthropic-ai/sdk": "npm:^0.41.0"
|
||||
"@anthropic-ai/vertex-sdk": "patch:@anthropic-ai/vertex-sdk@npm%3A0.11.4#~/.yarn/patches/@anthropic-ai-vertex-sdk-npm-0.11.4-c19cb41edb.patch"
|
||||
"@aws-sdk/client-bedrock": "npm:^3.910.0"
|
||||
@ -10187,7 +10187,6 @@ __metadata:
|
||||
clsx: "npm:^2.1.1"
|
||||
code-inspector-plugin: "npm:^0.20.14"
|
||||
color: "npm:^5.0.0"
|
||||
concurrently: "npm:^9.2.1"
|
||||
country-flag-emoji-polyfill: "npm:0.1.8"
|
||||
dayjs: "npm:^1.11.11"
|
||||
dexie: "npm:^4.0.8"
|
||||
@ -11504,16 +11503,6 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"chalk@npm:4.1.2, chalk@npm:^4.0.0, chalk@npm:^4.0.2, chalk@npm:^4.1.0, chalk@npm:^4.1.1, chalk@npm:^4.1.2":
|
||||
version: 4.1.2
|
||||
resolution: "chalk@npm:4.1.2"
|
||||
dependencies:
|
||||
ansi-styles: "npm:^4.1.0"
|
||||
supports-color: "npm:^7.1.0"
|
||||
checksum: 10c0/4a3fef5cc34975c898ffe77141450f679721df9dde00f6c304353fa9c8b571929123b26a0e4617bde5018977eb655b31970c297b91b63ee83bb82aeb04666880
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"chalk@npm:^3.0.0":
|
||||
version: 3.0.0
|
||||
resolution: "chalk@npm:3.0.0"
|
||||
@ -11524,6 +11513,16 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"chalk@npm:^4.0.0, chalk@npm:^4.0.2, chalk@npm:^4.1.0, chalk@npm:^4.1.1, chalk@npm:^4.1.2":
|
||||
version: 4.1.2
|
||||
resolution: "chalk@npm:4.1.2"
|
||||
dependencies:
|
||||
ansi-styles: "npm:^4.1.0"
|
||||
supports-color: "npm:^7.1.0"
|
||||
checksum: 10c0/4a3fef5cc34975c898ffe77141450f679721df9dde00f6c304353fa9c8b571929123b26a0e4617bde5018977eb655b31970c297b91b63ee83bb82aeb04666880
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"chalk@npm:^5.4.1":
|
||||
version: 5.4.1
|
||||
resolution: "chalk@npm:5.4.1"
|
||||
@ -12146,23 +12145,6 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"concurrently@npm:^9.2.1":
|
||||
version: 9.2.1
|
||||
resolution: "concurrently@npm:9.2.1"
|
||||
dependencies:
|
||||
chalk: "npm:4.1.2"
|
||||
rxjs: "npm:7.8.2"
|
||||
shell-quote: "npm:1.8.3"
|
||||
supports-color: "npm:8.1.1"
|
||||
tree-kill: "npm:1.2.2"
|
||||
yargs: "npm:17.7.2"
|
||||
bin:
|
||||
conc: dist/bin/concurrently.js
|
||||
concurrently: dist/bin/concurrently.js
|
||||
checksum: 10c0/da37f239f82eb7ac24f5ddb56259861e5f1d6da2ade7602b6ea7ad3101b13b5ccec02a77b7001402d1028ff2fdc38eed55644b32853ad5abf30e057002a963aa
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"conf@npm:^10.2.0":
|
||||
version: 10.2.0
|
||||
resolution: "conf@npm:10.2.0"
|
||||
@ -23026,15 +23008,6 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"rxjs@npm:7.8.2":
|
||||
version: 7.8.2
|
||||
resolution: "rxjs@npm:7.8.2"
|
||||
dependencies:
|
||||
tslib: "npm:^2.1.0"
|
||||
checksum: 10c0/1fcd33d2066ada98ba8f21fcbbcaee9f0b271de1d38dc7f4e256bfbc6ffcdde68c8bfb69093de7eeb46f24b1fb820620bf0223706cff26b4ab99a7ff7b2e2c45
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"safe-buffer@npm:5.2.1, safe-buffer@npm:^5.0.1, safe-buffer@npm:^5.1.1, safe-buffer@npm:~5.2.0":
|
||||
version: 5.2.1
|
||||
resolution: "safe-buffer@npm:5.2.1"
|
||||
@ -23367,13 +23340,6 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"shell-quote@npm:1.8.3":
|
||||
version: 1.8.3
|
||||
resolution: "shell-quote@npm:1.8.3"
|
||||
checksum: 10c0/bee87c34e1e986cfb4c30846b8e6327d18874f10b535699866f368ade11ea4ee45433d97bf5eada22c4320c27df79c3a6a7eb1bf3ecfc47f2c997d9e5e2672fd
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"shiki@npm:3.12.0, shiki@npm:^3.12.0":
|
||||
version: 3.12.0
|
||||
resolution: "shiki@npm:3.12.0"
|
||||
@ -24099,15 +24065,6 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"supports-color@npm:8.1.1":
|
||||
version: 8.1.1
|
||||
resolution: "supports-color@npm:8.1.1"
|
||||
dependencies:
|
||||
has-flag: "npm:^4.0.0"
|
||||
checksum: 10c0/ea1d3c275dd604c974670f63943ed9bd83623edc102430c05adb8efc56ba492746b6e95386e7831b872ec3807fd89dd8eb43f735195f37b5ec343e4234cc7e89
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"supports-color@npm:^7.1.0":
|
||||
version: 7.2.0
|
||||
resolution: "supports-color@npm:7.2.0"
|
||||
@ -24645,7 +24602,7 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"tree-kill@npm:1.2.2, tree-kill@npm:^1.2.2":
|
||||
"tree-kill@npm:^1.2.2":
|
||||
version: 1.2.2
|
||||
resolution: "tree-kill@npm:1.2.2"
|
||||
bin:
|
||||
@ -26314,7 +26271,7 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"yargs@npm:17.7.2, yargs@npm:^17.0.1, yargs@npm:^17.5.1, yargs@npm:^17.6.2, yargs@npm:^17.7.2":
|
||||
"yargs@npm:^17.0.1, yargs@npm:^17.5.1, yargs@npm:^17.6.2, yargs@npm:^17.7.2":
|
||||
version: 17.7.2
|
||||
resolution: "yargs@npm:17.7.2"
|
||||
dependencies:
|
||||
|
||||
Loading…
Reference in New Issue
Block a user