Merge branch 'main' of github.com:CherryHQ/cherry-studio into v2

This commit is contained in:
fullex 2025-09-24 13:13:58 +08:00
commit ac3dfcbfbe
73 changed files with 1117 additions and 193 deletions

View File

@ -98,7 +98,7 @@ jobs:
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NODE_OPTIONS: --max-old-space-size=8192
MAIN_VITE_CHERRYIN_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYIN_CLIENT_SECRET }}
MAIN_VITE_CHERRYAI_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYAI_CLIENT_SECRET }}
MAIN_VITE_MINERU_API_KEY: ${{ vars.MAIN_VITE_MINERU_API_KEY }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
RENDERER_VITE_PPIO_APP_SECRET: ${{ vars.RENDERER_VITE_PPIO_APP_SECRET }}
@ -115,7 +115,7 @@ jobs:
APPLE_TEAM_ID: ${{ vars.APPLE_TEAM_ID }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NODE_OPTIONS: --max-old-space-size=8192
MAIN_VITE_CHERRYIN_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYIN_CLIENT_SECRET }}
MAIN_VITE_CHERRYAI_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYAI_CLIENT_SECRET }}
MAIN_VITE_MINERU_API_KEY: ${{ vars.MAIN_VITE_MINERU_API_KEY }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
RENDERER_VITE_PPIO_APP_SECRET: ${{ vars.RENDERER_VITE_PPIO_APP_SECRET }}
@ -127,7 +127,7 @@ jobs:
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NODE_OPTIONS: --max-old-space-size=8192
MAIN_VITE_CHERRYIN_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYIN_CLIENT_SECRET }}
MAIN_VITE_CHERRYAI_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYAI_CLIENT_SECRET }}
MAIN_VITE_MINERU_API_KEY: ${{ vars.MAIN_VITE_MINERU_API_KEY }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
RENDERER_VITE_PPIO_APP_SECRET: ${{ vars.RENDERER_VITE_PPIO_APP_SECRET }}

View File

@ -85,7 +85,7 @@ jobs:
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NODE_OPTIONS: --max-old-space-size=8192
MAIN_VITE_CHERRYIN_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYIN_CLIENT_SECRET }}
MAIN_VITE_CHERRYAI_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYAI_CLIENT_SECRET }}
MAIN_VITE_MINERU_API_KEY: ${{ vars.MAIN_VITE_MINERU_API_KEY }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
RENDERER_VITE_PPIO_APP_SECRET: ${{ vars.RENDERER_VITE_PPIO_APP_SECRET }}
@ -103,7 +103,7 @@ jobs:
APPLE_TEAM_ID: ${{ vars.APPLE_TEAM_ID }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NODE_OPTIONS: --max-old-space-size=8192
MAIN_VITE_CHERRYIN_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYIN_CLIENT_SECRET }}
MAIN_VITE_CHERRYAI_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYAI_CLIENT_SECRET }}
MAIN_VITE_MINERU_API_KEY: ${{ vars.MAIN_VITE_MINERU_API_KEY }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
RENDERER_VITE_PPIO_APP_SECRET: ${{ vars.RENDERER_VITE_PPIO_APP_SECRET }}
@ -115,7 +115,7 @@ jobs:
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NODE_OPTIONS: --max-old-space-size=8192
MAIN_VITE_CHERRYIN_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYIN_CLIENT_SECRET }}
MAIN_VITE_CHERRYAI_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYAI_CLIENT_SECRET }}
MAIN_VITE_MINERU_API_KEY: ${{ vars.MAIN_VITE_MINERU_API_KEY }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
RENDERER_VITE_PPIO_APP_SECRET: ${{ vars.RENDERER_VITE_PPIO_APP_SECRET }}

View File

@ -15,7 +15,7 @@
".gitignore",
"scripts/cloudflare-worker.js",
"src/main/integration/nutstore/sso/lib/**",
"src/main/integration/cherryin/index.js",
"src/main/integration/cherryai/index.js",
"src/main/integration/nutstore/sso/lib/**",
"src/renderer/src/ui/**",
"packages/**/dist",

View File

@ -128,16 +128,16 @@ afterSign: scripts/notarize.js
artifactBuildCompleted: scripts/artifact-build-completed.js
releaseInfo:
releaseNotes: |
✨ 新功能:
- 新增 CherryIN 服务商
- 新增 AiOnly AI 服务商
- 更新 MCP 服务器卡片布局和样式,改为列表视图
🐛 问题修复:
- 修复 Anthropic API URL 处理,移除尾部斜杠并添加端点路径处理
- 修复 MessageEditor 缺少 QuickPanelProvider 包装的问题
- 修复 MiniWindow 高度问题
- 修复 QwenMT 模型的翻译内容处理逻辑
- 修复无法将外部笔记添加到知识库的问题
🚀 性能优化:
- 优化输入栏提及模型状态缓存,在渲染间保持状态
- 重构网络搜索参数支持模型内置搜索,新增 OpenAI Chat 和 OpenRouter 支持
🔧 重构改进:
- 更新 HeroUIProvider 导入路径,改善上下文管理
- 更新依赖项和 VSCode 开发环境配置
- 升级 @cherrystudio/ai-core 到 v1.0.0-alpha.17
- 提升输入框响应速度
- 优化模型切换性能
- 改进翻译功能的引用和邮件格式处理

View File

@ -59,7 +59,7 @@ export default defineConfig([
'.gitignore',
'scripts/cloudflare-worker.js',
'src/main/integration/nutstore/sso/lib/**',
'src/main/integration/cherryin/index.js',
'src/main/integration/cherryai/index.js',
'src/main/integration/nutstore/sso/lib/**',
'src/renderer/src/ui/**',
'packages/**/dist'

View File

@ -8,6 +8,7 @@ export enum IpcChannel {
App_ShowUpdateDialog = 'app:show-update-dialog',
App_CheckForUpdate = 'app:check-for-update',
App_Reload = 'app:reload',
App_Quit = 'app:quit',
App_Info = 'app:info',
App_Proxy = 'app:proxy',
App_SetLaunchToTray = 'app:set-launch-to-tray',
@ -361,6 +362,6 @@ export enum IpcChannel {
// OCR
OCR_ocr = 'ocr:ocr',
// Cherryin
Cherryin_GetSignature = 'cherryin:get-signature'
// CherryAI
Cherryai_GetSignature = 'cherryai:get-signature'
}

View File

@ -0,0 +1,252 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Privacy Policy</title>
<style>
* {
margin: 0;
padding: 0;
box-sizing: border-box;
}
body {
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Helvetica Neue', Arial, sans-serif;
line-height: 1.6;
color: #333;
background: transparent;
margin: 0 auto;
}
body.dark {
background: transparent;
color: rgba(255, 255, 255, 0.85);
}
h1 {
font-size: 24px;
font-weight: 600;
margin-bottom: 20px;
color: #1a1a1a;
}
body.dark h1 {
color: rgba(255, 255, 255, 0.95);
}
h2 {
font-size: 18px;
font-weight: 600;
margin-top: 24px;
margin-bottom: 12px;
color: #2c2c2c;
}
body.dark h2 {
color: rgba(255, 255, 255, 0.9);
}
p {
margin: 12px 0;
line-height: 1.8;
}
body.dark p {
color: rgba(255, 255, 255, 0.8);
}
ul {
margin: 12px 0;
padding-left: 24px;
}
li {
margin: 6px 0;
line-height: 1.6;
}
body.dark li {
color: rgba(255, 255, 255, 0.75);
}
a {
color: #0066cc;
text-decoration: none;
}
a:hover {
text-decoration: underline;
}
body.dark a {
color: #4da6ff;
}
.footer {
margin-top: 40px;
padding-top: 20px;
border-top: 1px solid #e0e0e0;
font-size: 13px;
color: #666;
}
body.dark .footer {
border-top-color: rgba(255, 255, 255, 0.1);
color: rgba(255, 255, 255, 0.5);
}
.content-wrapper {
max-height: calc(100vh - 40px);
overflow-y: auto;
padding-right: 10px;
background: transparent;
}
/* Scrollbar styles - Light mode */
::-webkit-scrollbar {
width: 8px;
height: 8px;
}
::-webkit-scrollbar-track {
background: rgba(0, 0, 0, 0.05);
border-radius: 4px;
}
::-webkit-scrollbar-thumb {
background: rgba(0, 0, 0, 0.2);
border-radius: 4px;
}
::-webkit-scrollbar-thumb:hover {
background: rgba(0, 0, 0, 0.3);
}
/* Scrollbar styles - Dark mode */
body.dark ::-webkit-scrollbar-track {
background: rgba(255, 255, 255, 0.05);
}
body.dark ::-webkit-scrollbar-thumb {
background: rgba(255, 255, 255, 0.2);
}
body.dark ::-webkit-scrollbar-thumb:hover {
background: rgba(255, 255, 255, 0.3);
}
</style>
<script>
// Detect theme
document.addEventListener('DOMContentLoaded', function () {
const urlParams = new URLSearchParams(window.location.search);
const theme = urlParams.get('theme');
if (theme === 'dark') {
document.documentElement.classList.add('dark');
document.body.classList.add('dark');
}
});
</script>
</head>
<body>
<div class="content-wrapper">
<h1>Privacy Policy</h1>
<p>
Welcome to Cherry Studio (hereinafter referred to as "the Software" or "we"). We highly value your privacy
protection. This Privacy Policy explains how we process and protect your personal information and data.
Please read and understand this policy carefully before using the Software:
</p>
<h2>1. Information We Collect</h2>
<p>To optimize user experience and improve software quality, we may only collect the following anonymous,
non-personal information:</p>
<ul>
<li>Software version information</li>
<li>Activity and usage frequency of software features</li>
<li>Anonymous crash and error log information</li>
</ul>
<p>The above information is completely anonymous, does not involve any personal identity data, and cannot be
linked to your personal information.</p>
<h2>2. Information We Do Not Collect</h2>
<p>To maximize the protection of your privacy and security, we explicitly commit that we:</p>
<ul>
<li>Will not collect, save, transmit, or process model service API Key information you enter into the
Software</li>
<li>Will not collect, save, transmit, or process any conversation data generated during your use of the
Software, including but not limited to chat content, instruction information, knowledge base
information, vector data, and other custom content</li>
<li>Will not collect, save, transmit, or process any sensitive information that can identify personal
identity</li>
</ul>
<h2>3. Data Interaction Description</h2>
<p>
The Software uses API Keys from third-party model service providers that you apply for and configure
yourself to complete model calls and conversation functions. The model services you use (such as large
models, API interfaces, etc.) are directly provided by third-party providers of your choice. We do not
intervene, monitor, or interfere with the data transmission process.
</p>
<p>
Data interactions between you and third-party model services are governed by the privacy policies and user
agreements of third-party service providers. We recommend that you fully understand the privacy terms of
relevant service providers before use.
</p>
<h2>4. Local Data Security Protection</h2>
<p>The Software is a localized application, and all data is stored on your local device by default. We have
taken the following measures to ensure data security:</p>
<ul>
<li>Conversation records, configuration information, and other data are only saved on your local device</li>
<li>Data import/export functions are provided to facilitate your independent management and backup of data
</li>
<li>Your local data will not be uploaded to any server or cloud storage</li>
</ul>
<h2>5. Third-Party Services</h2>
<p>
When using the Software, you may access third-party services (such as AI model APIs, translation services,
etc.). The use of these third-party services is governed by their respective terms of service and privacy
policies. We strongly recommend that you carefully read and understand the relevant terms before use.
</p>
<h2>6. User Rights</h2>
<p>You have complete control over your data:</p>
<ul>
<li>You can view, modify, and delete all locally stored data at any time</li>
<li>You can choose whether to enable specific features or services</li>
<li>You can stop using the Software and delete all related data at any time</li>
</ul>
<h2>7. Children's Privacy Protection</h2>
<p>The Software is not intended for minors under 18 years of age. If you are a minor, please use the Software
under the guidance of a guardian.</p>
<h2>8. Privacy Policy Updates</h2>
<p>
We may update this Privacy Policy based on legal requirements or changes in product features. The updated
policy will be published in the Software and you will be notified before it takes effect. If you do not
agree with the updated terms, you can choose to stop using the Software.
</p>
<h2>9. Contact Us</h2>
<p>If you have any questions, suggestions, or complaints about this Privacy Policy, please contact us through
the following methods:</p>
<ul>
<li>
GitHub: <a href="https://github.com/CherryHQ/cherry-studio" target="_blank"
rel="noopener noreferrer">https://github.com/CherryHQ/cherry-studio</a>
</li>
<li>Email: support@cherry-ai.com</li>
</ul>
<div class="footer">
Last Updated: December 2024
</div>
</div>
</body>
</html>

View File

@ -0,0 +1,230 @@
<!DOCTYPE html>
<html lang="zh">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>隐私协议</title>
<style>
* {
margin: 0;
padding: 0;
box-sizing: border-box;
}
body {
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Helvetica Neue', Arial, sans-serif;
line-height: 1.6;
color: #333;
background: transparent;
margin: 0 auto;
}
body.dark {
background: transparent;
color: rgba(255, 255, 255, 0.85);
}
h1 {
font-size: 24px;
font-weight: 600;
margin-bottom: 20px;
color: #1a1a1a;
}
body.dark h1 {
color: rgba(255, 255, 255, 0.95);
}
h2 {
font-size: 18px;
font-weight: 600;
margin-top: 24px;
margin-bottom: 12px;
color: #2c2c2c;
}
body.dark h2 {
color: rgba(255, 255, 255, 0.9);
}
p {
margin: 12px 0;
line-height: 1.8;
}
body.dark p {
color: rgba(255, 255, 255, 0.8);
}
ul {
margin: 12px 0;
padding-left: 24px;
}
li {
margin: 6px 0;
line-height: 1.6;
}
body.dark li {
color: rgba(255, 255, 255, 0.75);
}
a {
color: #0066cc;
text-decoration: none;
}
a:hover {
text-decoration: underline;
}
body.dark a {
color: #4da6ff;
}
.footer {
margin-top: 40px;
padding-top: 20px;
border-top: 1px solid #e0e0e0;
font-size: 13px;
color: #666;
}
body.dark .footer {
border-top-color: rgba(255, 255, 255, 0.1);
color: rgba(255, 255, 255, 0.5);
}
.content-wrapper {
overflow-y: auto;
padding-right: 10px;
background: transparent;
}
/* 滚动条样式 - 亮色模式 */
::-webkit-scrollbar {
width: 8px;
height: 8px;
}
::-webkit-scrollbar-track {
background: rgba(0, 0, 0, 0.05);
border-radius: 4px;
}
::-webkit-scrollbar-thumb {
background: rgba(0, 0, 0, 0.2);
border-radius: 4px;
}
::-webkit-scrollbar-thumb:hover {
background: rgba(0, 0, 0, 0.3);
}
/* 滚动条样式 - 暗色模式 */
body.dark ::-webkit-scrollbar-track {
background: rgba(255, 255, 255, 0.05);
}
body.dark ::-webkit-scrollbar-thumb {
background: rgba(255, 255, 255, 0.2);
}
body.dark ::-webkit-scrollbar-thumb:hover {
background: rgba(255, 255, 255, 0.3);
}
</style>
<script>
// 检测主题
document.addEventListener('DOMContentLoaded', function () {
const urlParams = new URLSearchParams(window.location.search);
const theme = urlParams.get('theme');
if (theme === 'dark') {
document.documentElement.classList.add('dark');
document.body.classList.add('dark');
}
});
</script>
</head>
<body>
<div class="content-wrapper">
<h1>隐私协议</h1>
<p>
欢迎使用 Cherry Studio以下简称"本软件"或"我们")。我们高度重视您的隐私保护,本隐私协议将说明我们如何处理与保护您的个人信息和数据。请在使用本软件前仔细阅读并理解本协议:
</p>
<h2>一、我们收集的信息范围</h2>
<p>为了优化用户体验和提升软件质量,我们仅可能会匿名收集以下非个人化信息:</p>
<ul>
<li>软件版本信息;</li>
<li>软件功能的活跃度、使用频次;</li>
<li>匿名的崩溃、错误日志信息;</li>
</ul>
<p>上述信息完全匿名,不会涉及任何个人身份数据,也无法关联到您的个人信息。</p>
<h2>二、我们不会收集的任何信息</h2>
<p>为了最大限度保护您的隐私安全,我们明确承诺:</p>
<ul>
<li>不会收集、保存、传输或处理您输入到本软件中的模型服务 API Key 信息;</li>
<li>不会收集、保存、传输或处理您在使用本软件过程中产生的任何对话数据,包括但不限于聊天内容、指令信息、知识库信息、向量数据及其他自定义内容;</li>
<li>不会收集、保存、传输或处理任何可识别个人身份的敏感信息。</li>
</ul>
<h2>三、数据交互说明</h2>
<p>
本软件采用您自行申请并配置的第三方模型服务提供商的 API Key以完成相关模型的调用与对话功能。您使用的模型服务例如大模型、API 接口等)由您选择的第三方提供商直接提供,我们不会介入、监控或干扰数据传输过程。
</p>
<p>
您与第三方模型服务之间的数据交互受第三方服务提供商的隐私政策和用户协议约束,我们建议您在使用前充分了解相关服务商的隐私条款。
</p>
<h2>四、本地数据的安全保护</h2>
<p>本软件为本地化应用程序,所有数据默认存储在您的本地设备上。我们采取了以下措施保障数据安全:</p>
<ul>
<li>对话记录、配置信息等数据仅保存在您的本地设备中;</li>
<li>提供数据导入/导出功能,方便您自主管理和备份数据;</li>
<li>不会将您的本地数据上传至任何服务器或云端存储。</li>
</ul>
<h2>五、第三方服务</h2>
<p>
在使用本软件过程中,您可能会接入第三方服务(如 AI 模型 API、翻译服务等。这些第三方服务的使用受其各自的服务条款和隐私政策约束。我们强烈建议您在使用前仔细阅读并理解相关条款。
</p>
<h2>六、用户权利</h2>
<p>您对自己的数据拥有完全的控制权:</p>
<ul>
<li>您可以随时查看、修改、删除本地存储的所有数据;</li>
<li>您可以选择是否启用特定功能或服务;</li>
<li>您可以随时停止使用本软件并删除所有相关数据。</li>
</ul>
<h2>七、儿童隐私保护</h2>
<p>本软件不面向 18 岁以下的未成年人提供服务。如果您是未成年人,请在监护人的指导下使用本软件。</p>
<h2>八、隐私政策的更新</h2>
<p>
我们可能会根据法律法规要求或产品功能的变化更新本隐私协议。更新后的协议将在软件中发布,并在生效前通知您。如果您不同意更新后的条款,您可以选择停止使用本软件。
</p>
<h2>九、联系我们</h2>
<p>如果您对本隐私协议有任何疑问、建议或投诉,请通过以下方式联系我们:</p>
<ul>
<li>
GitHub: <a href="https://github.com/CherryHQ/cherry-studio" target="_blank"
rel="noopener noreferrer">https://github.com/CherryHQ/cherry-studio</a>
</li>
<li>Email: support@cherry-ai.com</li>
</ul>
<div class="footer">
最后更新日期2024年12月
</div>
</div>
</body>
</html>

View File

@ -21,4 +21,4 @@ export const titleBarOverlayLight = {
symbolColor: '#000'
}
global.CHERRYIN_CLIENT_SECRET = import.meta.env.MAIN_VITE_CHERRYIN_CLIENT_SECRET
global.CHERRYAI_CLIENT_SECRET = import.meta.env.MAIN_VITE_CHERRYAI_CLIENT_SECRET

View File

@ -0,0 +1 @@
var _0xe15d9a;const crypto=require("\u0063\u0072\u0079\u0070\u0074\u006F");_0xe15d9a=(988194^988194)+(417607^417603);var _0x9b_0x742=(247379^247387)+(371889^371892);const CLIENT_ID="\u0063\u0068\u0065\u0072\u0072\u0079\u002D\u0073\u0074\u0075\u0064\u0069\u006F";_0x9b_0x742=(202849^202856)+(796590^796585);var _0xa971e=(422203^422203)+(167917^167919);const CLIENT_SECRET_SUFFIX="\u0047\u0076\u0049\u0036\u0049\u0035\u005A\u0072\u0045\u0048\u0063\u0047\u004F\u0057\u006A\u004F\u0035\u0041\u004B\u0068\u004A\u004B\u0047\u006D\u006E\u0077\u0077\u0047\u0066\u004D\u0036\u0032\u0058\u004B\u0070\u0057\u0071\u006B\u006A\u0068\u0076\u007A\u0052\u0055\u0032\u004E\u005A\u0049\u0069\u006E\u004D\u0037\u0037\u0061\u0054\u0047\u0049\u0071\u0068\u0071\u0079\u0073\u0030\u0067";_0xa971e=(607707^607705)+(127822^127823);const CLIENT_SECRET=global['\u0043\u0048\u0045\u0052\u0052\u0059\u0041\u0049\u005F\u0043\u004C\u0049\u0045\u004E\u0054\u005F\u0053\u0045\u0043\u0052\u0045\u0054']+"\u002E"+CLIENT_SECRET_SUFFIX;class SignatureClient{constructor(clientId,clientSecret){this['\u0063\u006C\u0069\u0065\u006E\u0074\u0049\u0064']=clientId||CLIENT_ID;this['\u0063\u006C\u0069\u0065\u006E\u0074\u0053\u0065\u0063\u0072\u0065\u0074']=clientSecret||CLIENT_SECRET;this['\u0067\u0065\u006E\u0065\u0072\u0061\u0074\u0065\u0053\u0069\u0067\u006E\u0061\u0074\u0075\u0072\u0065']=this['\u0067\u0065\u006E\u0065\u0072\u0061\u0074\u0065\u0053\u0069\u0067\u006E\u0061\u0074\u0075\u0072\u0065']['\u0062\u0069\u006E\u0064'](this);}generateSignature(options){const{'\u006D\u0065\u0074\u0068\u006F\u0064':method,'\u0070\u0061\u0074\u0068':path,'\u0071\u0075\u0065\u0072\u0079':query='','\u0062\u006F\u0064\u0079':body=''}=options;var _0x99a7f=(735625^735624)+(520507^520508);const timestamp=Math['\u0066\u006C\u006F\u006F\u0072'](Date['\u006E\u006F\u0077']()/(351300^352172))['\u0074\u006F\u0053\u0074\u0072\u0069\u006E\u0067']();_0x99a7f=376728^376729;var _0x733a=(876666^876671)+(658949^658944);let bodyString='';_0x733a="kgclcd".split("").reverse().join("");if(body){if(typeof body==="tcejbo".split("").reverse().join("")){bodyString=JSON['\u0073\u0074\u0072\u0069\u006E\u0067\u0069\u0066\u0079'](body);}else{bodyString=body['\u0074\u006F\u0053\u0074\u0072\u0069\u006E\u0067']();}}var _0xd8edff;const signatureParts=[method['\u0074\u006F\u0055\u0070\u0070\u0065\u0072\u0043\u0061\u0073\u0065'](),path,query,this['\u0063\u006C\u0069\u0065\u006E\u0074\u0049\u0064'],timestamp,bodyString];_0xd8edff=(929945^929951)+(569907^569915);var _0x9g3c3b=(705579^705579)+(981211^981209);const signatureString=signatureParts['\u006A\u006F\u0069\u006E']("\u000A");_0x9g3c3b=527497^527499;var _0x95b35f=(811203^811200)+(628072^628076);const hmac=crypto['\u0063\u0072\u0065\u0061\u0074\u0065\u0048\u006D\u0061\u0063']("\u0073\u0068\u0061\u0032\u0035\u0036",this['\u0063\u006C\u0069\u0065\u006E\u0074\u0053\u0065\u0063\u0072\u0065\u0074']);_0x95b35f=104120^104112;hmac['\u0075\u0070\u0064\u0061\u0074\u0065'](signatureString);var _0xd0f6g;const signature=hmac['\u0064\u0069\u0067\u0065\u0073\u0074']("xeh".split("").reverse().join(""));_0xd0f6g=(615019^615018)+(266997^266992);return{'X-Client-ID':this['\u0063\u006C\u0069\u0065\u006E\u0074\u0049\u0064'],"\u0058\u002D\u0054\u0069\u006D\u0065\u0073\u0074\u0061\u006D\u0070":timestamp,'X-Signature':signature};}}const signatureClient=new SignatureClient();const generateSignature=signatureClient['\u0067\u0065\u006E\u0065\u0072\u0061\u0074\u0065\u0053\u0069\u0067\u006E\u0061\u0074\u0075\u0072\u0065'];module['\u0065\u0078\u0070\u006F\u0072\u0074\u0073']={'\u0053\u0069\u0067\u006E\u0061\u0074\u0075\u0072\u0065\u0043\u006C\u0069\u0065\u006E\u0074':SignatureClient,"generateSignature":generateSignature};

View File

@ -1 +0,0 @@
var _0x6gg;const crypto=require("\u0063\u0072\u0079\u0070\u0074\u006F");_0x6gg='\u006D\u006F\u006C\u006A\u0065\u0065';var _0x111cbe;const CLIENT_ID="oiduts-yrrehc".split("").reverse().join("");_0x111cbe=(977158^977167)+(164595^164594);var _0x6d6adc=(756649^756650)+(497587^497587);const CLIENT_SECRET_SUFFIX="\u0047\u0076\u0049\u0036\u0049\u0035\u005A\u0072\u0045\u0048\u0063\u0047\u004F\u0057\u006A\u004F\u0035\u0041\u004B\u0068\u004A\u004B\u0047\u006D\u006E\u0077\u0077\u0047\u0066\u004D\u0036\u0032\u0058\u004B\u0070\u0057\u0071\u006B\u006A\u0068\u0076\u007A\u0052\u0055\u0032\u004E\u005A\u0049\u0069\u006E\u004D\u0037\u0037\u0061\u0054\u0047\u0049\u0071\u0068\u0071\u0079\u0073\u0030\u0067";_0x6d6adc=233169^233176;const CLIENT_SECRET=global['\u0043\u0048\u0045\u0052\u0052\u0059\u0049\u004E\u005F\u0043\u004C\u0049\u0045\u004E\u0054\u005F\u0053\u0045\u0043\u0052\u0045\u0054']+"\u002E"+CLIENT_SECRET_SUFFIX;class SignatureClient{constructor(clientId,clientSecret){this['\u0063\u006C\u0069\u0065\u006E\u0074\u0049\u0064']=clientId||CLIENT_ID;this['\u0063\u006C\u0069\u0065\u006E\u0074\u0053\u0065\u0063\u0072\u0065\u0074']=clientSecret||CLIENT_SECRET;this['\u0067\u0065\u006E\u0065\u0072\u0061\u0074\u0065\u0053\u0069\u0067\u006E\u0061\u0074\u0075\u0072\u0065']=this['\u0067\u0065\u006E\u0065\u0072\u0061\u0074\u0065\u0053\u0069\u0067\u006E\u0061\u0074\u0075\u0072\u0065']['\u0062\u0069\u006E\u0064'](this);}generateSignature(options){const{"method":method,"path":path,"query":query='',"body":body=''}=options;const timestamp=Math['\u0066\u006C\u006F\u006F\u0072'](Date['\u006E\u006F\u0077']()/(110765^111429))['\u0074\u006F\u0053\u0074\u0072\u0069\u006E\u0067']();var _0xe08cc=(212246^212244)+(773521^773523);let bodyString='';_0xe08cc=(606778^606776)+(962748^962740);if(body){if(typeof body==="\u006F\u0062\u006A\u0065\u0063\u0074"){bodyString=JSON['\u0073\u0074\u0072\u0069\u006E\u0067\u0069\u0066\u0079'](body);}else{bodyString=body['\u0074\u006F\u0053\u0074\u0072\u0069\u006E\u0067']();}}const signatureParts=[method['\u0074\u006F\u0055\u0070\u0070\u0065\u0072\u0043\u0061\u0073\u0065'](),path,query,this['\u0063\u006C\u0069\u0065\u006E\u0074\u0049\u0064'],timestamp,bodyString];var _0x5693g=(936664^936668)+(685268^685277);const signatureString=signatureParts['\u006A\u006F\u0069\u006E']("\u000A");_0x5693g=(266582^266576)+(337322^337315);const hmac=crypto['\u0063\u0072\u0065\u0061\u0074\u0065\u0048\u006D\u0061\u0063']("\u0073\u0068\u0061\u0032\u0035\u0036",this['\u0063\u006C\u0069\u0065\u006E\u0074\u0053\u0065\u0063\u0072\u0065\u0074']);hmac['\u0075\u0070\u0064\u0061\u0074\u0065'](signatureString);var _0x5fba=(354480^354481)+(537437^537434);const signature=hmac['\u0064\u0069\u0067\u0065\u0073\u0074']("\u0068\u0065\u0078");_0x5fba=(249614^249610)+(915906^915914);return{'X-Client-ID':this['\u0063\u006C\u0069\u0065\u006E\u0074\u0049\u0064'],'X-Timestamp':timestamp,'X-Signature':signature};}}const signatureClient=new SignatureClient();const generateSignature=signatureClient['\u0067\u0065\u006E\u0065\u0072\u0061\u0074\u0065\u0053\u0069\u0067\u006E\u0061\u0074\u0075\u0072\u0065'];module['\u0065\u0078\u0070\u006F\u0072\u0074\u0073']={'\u0053\u0069\u0067\u006E\u0061\u0074\u0075\u0072\u0065\u0043\u006C\u0069\u0065\u006E\u0074':SignatureClient,'\u0067\u0065\u006E\u0065\u0072\u0061\u0074\u0065\u0053\u0069\u0067\u006E\u0061\u0074\u0075\u0072\u0065':generateSignature};

View File

@ -6,7 +6,7 @@ import { PreferenceService } from '@data/PreferenceService'
import { preferenceService } from '@data/PreferenceService'
import { loggerService } from '@logger'
import { isLinux, isMac, isPortable, isWin } from '@main/constant'
import { generateSignature } from '@main/integration/cherryin'
import { generateSignature } from '@main/integration/cherryai'
import anthropicService from '@main/services/AnthropicService'
import { getBinaryPath, isBinaryExists, runInstallScript } from '@main/utils/process'
import { handleZoomFactor } from '@main/utils/zoom'
@ -129,6 +129,7 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
})
ipcMain.handle(IpcChannel.App_Reload, () => mainWindow.reload())
ipcMain.handle(IpcChannel.App_Quit, () => app.quit())
ipcMain.handle(IpcChannel.Open_Website, (_, url: string) => shell.openExternal(url))
// Update
@ -841,8 +842,8 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
ocrService.ocr(file, provider)
)
// CherryIN
ipcMain.handle(IpcChannel.Cherryin_GetSignature, (_, params) => generateSignature(params))
// CherryAI
ipcMain.handle(IpcChannel.Cherryai_GetSignature, (_, params) => generateSignature(params))
// Preference handlers
PreferenceService.registerIpcHandler()

View File

@ -1,6 +1,7 @@
import { preferenceService } from '@data/PreferenceService'
import { loggerService } from '@logger'
import { isWin } from '@main/constant'
import { configManager } from '@main/services/ConfigManager'
import { getIpCountry } from '@main/utils/ipService'
import { getI18n } from '@main/utils/language'
import { generateUserAgent } from '@main/utils/systemInfo'
@ -34,7 +35,8 @@ export default class AppUpdater {
autoUpdater.autoInstallOnAppQuit = preferenceService.get('app.dist.auto_update.enabled')
autoUpdater.requestHeaders = {
...autoUpdater.requestHeaders,
'User-Agent': generateUserAgent()
'User-Agent': generateUserAgent(),
'X-Client-Id': configManager.getClientId()
}
autoUpdater.on('error', (error) => {

View File

@ -1,6 +1,7 @@
import { ZOOM_SHORTCUTS } from '@shared/config/constant'
import type { Shortcut } from '@types'
import Store from 'electron-store'
import { v4 as uuidv4 } from 'uuid'
export enum ConfigKeys {
Language = 'language',
@ -24,7 +25,8 @@ export enum ConfigKeys {
SelectionAssistantFilterList = 'selectionAssistantFilterList',
DisableHardwareAcceleration = 'disableHardwareAcceleration',
Proxy = 'proxy',
EnableDeveloperMode = 'enableDeveloperMode'
EnableDeveloperMode = 'enableDeveloperMode',
ClientId = 'clientId'
}
export class ConfigManager {
@ -238,6 +240,17 @@ export class ConfigManager {
// this.set(ConfigKeys.EnableDeveloperMode, value)
// }
getClientId(): string {
let clientId = this.get<string>(ConfigKeys.ClientId)
if (!clientId) {
clientId = uuidv4()
this.set(ConfigKeys.ClientId, clientId)
}
return clientId
}
set(key: string, value: unknown, isNotify: boolean = false) {
this.store.set(key, value)
isNotify && this.notifySubscribers(key, value)

View File

@ -257,7 +257,7 @@ export class WindowService {
private setupWebContentsHandlers(mainWindow: BrowserWindow) {
mainWindow.webContents.on('will-navigate', (event, url) => {
if (url.includes('localhost:5173')) {
if (url.includes('localhost:517')) {
return
}
@ -276,7 +276,8 @@ export class WindowService {
'https://aihubmix.com/topup',
'https://aihubmix.com/statistics',
'https://dash.302.ai/sso/login',
'https://dash.302.ai/charge'
'https://dash.302.ai/charge',
'https://www.aiionly.com/login'
]
if (oauthProviderUrls.some((link) => url.startsWith(link))) {

View File

@ -52,6 +52,7 @@ const api = {
getDiskInfo: (directoryPath: string): Promise<{ free: number; size: number } | null> =>
ipcRenderer.invoke(IpcChannel.App_GetDiskInfo, directoryPath),
reload: () => ipcRenderer.invoke(IpcChannel.App_Reload),
quit: () => ipcRenderer.invoke(IpcChannel.App_Quit),
setProxy: (proxy: string | undefined, bypassRules?: string) =>
ipcRenderer.invoke(IpcChannel.App_Proxy, proxy, bypassRules),
checkForUpdate: () => ipcRenderer.invoke(IpcChannel.App_CheckForUpdate),
@ -451,9 +452,9 @@ const api = {
ocr: (file: SupportedOcrFile, provider: OcrProvider): Promise<OcrResult> =>
ipcRenderer.invoke(IpcChannel.OCR_ocr, file, provider)
},
cherryin: {
cherryai: {
generateSignature: (params: { method: string; path: string; query: string; body: Record<string, any> }) =>
ipcRenderer.invoke(IpcChannel.Cherryin_GetSignature, params)
ipcRenderer.invoke(IpcChannel.Cherryai_GetSignature, params)
},
windowControls: {
minimize: (): Promise<void> => ipcRenderer.invoke(IpcChannel.Windows_Minimize),

View File

@ -15,16 +15,6 @@ import { ToolCallChunkHandler } from './handleToolCallChunk'
const logger = loggerService.withContext('AiSdkToChunkAdapter')
export interface CherryStudioChunk {
type: 'text-delta' | 'text-complete' | 'tool-call' | 'tool-result' | 'finish' | 'error'
text?: string
toolCall?: any
toolResult?: any
finishReason?: string
usage?: any
error?: any
}
/**
* AI SDK Cherry Studio Chunk
* fullStream Cherry Studio chunk

View File

@ -299,8 +299,29 @@ export class ToolCallChunkHandler {
type: ChunkType.MCP_TOOL_COMPLETE,
responses: [toolResponse]
})
const images: string[] = []
for (const content of toolResponse.response?.content || []) {
if (content.type === 'image' && content.data) {
images.push(`data:${content.mimeType};base64,${content.data}`)
}
}
if (images.length) {
this.onChunk({
type: ChunkType.IMAGE_CREATED
})
this.onChunk({
type: ChunkType.IMAGE_COMPLETE,
image: {
type: 'base64',
images: images
}
})
}
}
}
handleToolError(
chunk: {
type: 'tool-error'

View File

@ -5,7 +5,7 @@ import { AihubmixAPIClient } from './aihubmix/AihubmixAPIClient'
import { AnthropicAPIClient } from './anthropic/AnthropicAPIClient'
import { AwsBedrockAPIClient } from './aws/AwsBedrockAPIClient'
import type { BaseApiClient } from './BaseApiClient'
import { CherryinAPIClient } from './cherryin/CherryinAPIClient'
import { CherryAiAPIClient } from './cherryai/CherryAiAPIClient'
import { GeminiAPIClient } from './gemini/GeminiAPIClient'
import { VertexAPIClient } from './gemini/VertexAPIClient'
import { NewAPIClient } from './newapi/NewAPIClient'
@ -34,8 +34,8 @@ export class ApiClientFactory {
let instance: BaseApiClient
// 首先检查特殊的 Provider ID
if (provider.id === 'cherryin') {
instance = new CherryinAPIClient(provider) as BaseApiClient
if (provider.id === 'cherryai') {
instance = new CherryAiAPIClient(provider) as BaseApiClient
return instance
}

View File

@ -35,10 +35,16 @@ vi.mock('@renderer/config/models', () => ({
findTokenLimit: vi.fn().mockReturnValue(4096),
isFunctionCallingModel: vi.fn().mockReturnValue(false),
DEFAULT_MAX_TOKENS: 4096,
qwen38bModel: {
id: 'Qwen/Qwen3-8B',
name: 'Qwen3-8B',
provider: 'cherryai',
group: 'Qwen'
},
glm45FlashModel: {
id: 'glm-4.5-flash',
name: 'GLM-4.5-Flash',
provider: 'cherryin',
provider: 'cherryai',
group: 'GLM-4.5'
}
}))

View File

@ -4,7 +4,7 @@ import type OpenAI from 'openai'
import { OpenAIAPIClient } from '../openai/OpenAIApiClient'
export class CherryinAPIClient extends OpenAIAPIClient {
export class CherryAiAPIClient extends OpenAIAPIClient {
constructor(provider: Provider) {
super(provider)
}
@ -17,7 +17,7 @@ export class CherryinAPIClient extends OpenAIAPIClient {
options = options || {}
options.headers = options.headers || {}
const signature = await window.api.cherryin.generateSignature({
const signature = await window.api.cherryai.generateSignature({
method: 'POST',
path: '/chat/completions',
query: '',
@ -34,7 +34,7 @@ export class CherryinAPIClient extends OpenAIAPIClient {
}
override getClientCompatibilityType(): string[] {
return ['CherryinAPIClient']
return ['CherryAiAPIClient']
}
public async listModels(): Promise<OpenAI.Models.Model[]> {
@ -43,7 +43,7 @@ export class CherryinAPIClient extends OpenAIAPIClient {
const created = Date.now()
return models.map((id) => ({
id,
owned_by: 'cherryin',
owned_by: 'cherryai',
object: 'model' as const,
created
}))

View File

@ -1,6 +1,6 @@
import { loggerService } from '@logger'
import { isZhipuModel } from '@renderer/config/models'
import store from '@renderer/store'
import { getStoreProviders } from '@renderer/hooks/useStore'
import type { Chunk } from '@renderer/types/chunk'
import type { CompletionsParams, CompletionsResult } from '../schemas'
@ -87,7 +87,7 @@ function handleError(error: any, params: CompletionsParams): any {
* 2. enableGenerateImage为true使
*/
function handleZhipuError(error: any): any {
const provider = store.getState().llm.providers.find((p) => p.id === 'zhipu')
const provider = getStoreProviders().find((p) => p.id === 'zhipu')
const logger = loggerService.withContext('handleZhipuError')
// 定义错误模式映射

View File

@ -213,7 +213,8 @@ export function providerToAiSdkConfig(
options: {
...options,
name: actualProvider.id,
...extraOptions
...extraOptions,
includeUsage: true
}
}
}
@ -249,10 +250,10 @@ export async function prepareSpecialProviderConfig(
config.options.apiKey = token
break
}
case 'cherryin': {
case 'cherryai': {
config.options.fetch = async (url, options) => {
// 在这里对最终参数进行签名
const signature = await window.api.cherryin.generateSignature({
const signature = await window.api.cherryai.generateSignature({
method: 'POST',
path: '/chat/completions',
query: '',

View File

@ -82,6 +82,7 @@ export function buildProviderOptions(
case 'openai':
case 'openai-chat':
case 'azure':
case 'azure-responses':
providerSpecificOptions = {
...buildOpenAIProviderOptions(assistant, model, capabilities),
serviceTier: serviceTierSetting

View File

@ -44,7 +44,7 @@ function mapMaxResultToOpenAIContextSize(maxResults: number): OpenAISearchConfig
export function buildProviderBuiltinWebSearchConfig(
providerId: BaseProviderId,
webSearchConfig: CherryWebSearchConfig
): WebSearchPluginConfig {
): WebSearchPluginConfig | undefined {
switch (providerId) {
case 'openai': {
return {
@ -99,7 +99,7 @@ export function buildProviderBuiltinWebSearchConfig(
}
}
default: {
throw new Error(`Unsupported provider: ${providerId}`)
return {}
}
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

View File

@ -76,6 +76,10 @@
list-style: initial;
}
.markdown ol {
list-style: decimal;
}
.markdown ul,
.markdown ol {
padding-left: 1.5em;

View File

@ -15,7 +15,7 @@ interface Props {
}
export const FreeTrialModelTag: FC<Props> = ({ model, showLabel = true }) => {
if (model.provider !== 'cherryin') {
if (model.provider !== 'cherryai') {
return null
}

View File

@ -3,6 +3,7 @@ import type { Provider } from '@renderer/types'
import {
oauthWith302AI,
oauthWithAihubmix,
oauthWithAiOnly,
oauthWithPPIO,
oauthWithSiliconFlow,
oauthWithTokenFlux
@ -47,6 +48,10 @@ const OAuthButton: FC<Props> = ({ provider, onSuccess, ...buttonProps }) => {
if (provider.id === '302ai') {
oauthWith302AI(handleSuccess)
}
if (provider.id === 'aionly') {
oauthWithAiOnly(handleSuccess)
}
}
return (

View File

@ -0,0 +1,137 @@
import { TopView } from '@renderer/components/TopView'
import { useTheme } from '@renderer/context/ThemeProvider'
import { runAsyncFunction } from '@renderer/utils'
import { ThemeMode } from '@shared/data/preference/preferenceTypes'
import { Button, Modal } from 'antd'
import { useEffect, useState } from 'react'
import { useTranslation } from 'react-i18next'
import styled from 'styled-components'
const WebViewContainer = styled.div`
width: 100%;
height: 500px;
overflow: hidden;
webview {
width: 100%;
height: 100%;
border: none;
background: transparent;
}
`
interface ShowParams {
title?: string
showDeclineButton?: boolean
}
interface Props extends ShowParams {
resolve: (data: any) => void
}
const PopupContainer: React.FC<Props> = ({ title, showDeclineButton = true, resolve }) => {
const [open, setOpen] = useState(true)
const [privacyUrl, setPrivacyUrl] = useState<string>('')
const { theme } = useTheme()
const { i18n } = useTranslation()
const getTitle = () => {
if (title) return title
const isChinese = i18n.language.startsWith('zh')
return isChinese ? '隐私协议' : 'Privacy Policy'
}
const handleAccept = () => {
setOpen(false)
localStorage.setItem('privacy-popup-accepted', 'true')
resolve({ accepted: true })
}
const handleDecline = () => {
setOpen(false)
window.api.quit()
resolve({ accepted: false })
}
const onClose = () => {
if (!showDeclineButton) {
handleAccept()
} else {
handleDecline()
}
}
useEffect(() => {
runAsyncFunction(async () => {
const { appPath } = await window.api.getAppInfo()
const isChinese = i18n.language.startsWith('zh')
const htmlFile = isChinese ? 'privacy-zh.html' : 'privacy-en.html'
const url = `file://${appPath}/resources/cherry-studio/${htmlFile}?theme=${theme === ThemeMode.dark ? 'dark' : 'light'}`
setPrivacyUrl(url)
})
}, [theme, i18n.language])
PrivacyPopup.hide = () => setOpen(false)
return (
<Modal
title={getTitle()}
open={open}
onCancel={showDeclineButton ? handleDecline : undefined}
afterClose={onClose}
transitionName=""
maskTransitionName=""
centered
closable={false}
maskClosable={false}
styles={{
mask: { backgroundColor: 'var(--color-background)' },
header: { paddingLeft: 20 },
body: { paddingLeft: 20 }
}}
width={900}
footer={[
showDeclineButton && (
<Button key="decline" onClick={handleDecline}>
{i18n.language.startsWith('zh') ? '拒绝' : 'Decline'}
</Button>
),
<Button key="accept" type="primary" onClick={handleAccept}>
{i18n.language.startsWith('zh') ? '同意并继续' : 'Accept and Continue'}
</Button>
].filter(Boolean)}>
<WebViewContainer>
{privacyUrl && <webview src={privacyUrl} style={{ width: '100%', height: '100%' }} />}
</WebViewContainer>
</Modal>
)
}
const TopViewKey = 'PrivacyPopup'
export default class PrivacyPopup {
static topviewId = 0
static hide() {
TopView.hide(TopViewKey)
}
static async show(props?: ShowParams) {
const accepted = localStorage.getItem('privacy-popup-accepted')
if (accepted) {
return
}
return new Promise<{ accepted: boolean }>((resolve) => {
TopView.show(
<PopupContainer
{...(props || {})}
resolve={(v) => {
resolve(v)
TopView.hide(TopViewKey)
}}
/>,
TopViewKey
)
})
}
}

View File

@ -254,7 +254,9 @@ const PopupContainer: React.FC<Props> = ({ source, title, resolve }) => {
try {
if (isNoteMode) {
const note = source.data as NotesTreeNode
const content = await window.api.file.read(note.id + '.md')
const content = note.externalPath
? await window.api.file.readExternal(note.externalPath)
: await window.api.file.read(note.id + '.md')
logger.debug('Note content:', content)
await addNote(content)
savedCount = 1

View File

@ -1,5 +1,6 @@
import { PushpinOutlined } from '@ant-design/icons'
import { FreeTrialModelTag } from '@renderer/components/FreeTrialModelTag'
import { HStack } from '@renderer/components/Layout'
import ModelTagsWithLabel from '@renderer/components/ModelTagsWithLabel'
import { TopView } from '@renderer/components/TopView'
import { DynamicVirtualList, type DynamicVirtualListRef } from '@renderer/components/VirtualList'
@ -103,16 +104,18 @@ const PopupContainer: React.FC<Props> = ({ model, filter: baseFilter, showTagFil
(model: Model, provider: Provider, isPinned: boolean): FlatListModel => {
const modelId = getModelUniqId(model)
const groupName = getFancyProviderName(provider)
const isCherryin = provider.id === 'cherryin'
const isCherryAi = provider.id === 'cherryai'
return {
key: isPinned ? `${modelId}_pinned` : modelId,
type: 'model',
name: (
<ModelName>
{model.name}
{isPinned && <span style={{ color: 'var(--color-text-3)' }}> | {groupName}</span>}
{isCherryin && <FreeTrialModelTag model={model} showLabel={false} />}
<HStack alignItems="center">
{model.name}
{isPinned && <span style={{ color: 'var(--color-text-3)' }}> | {groupName}</span>}
</HStack>
{isCherryAi && <FreeTrialModelTag model={model} showLabel={false} />}
</ModelName>
),
tags: (
@ -543,6 +546,7 @@ const ModelItemLeft = styled.div`
const ModelName = styled.div`
display: flex;
flex-direction: row;
justify-content: space-between;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;

View File

@ -158,15 +158,22 @@ export const QuickPanelView: React.FC<Props> = ({ setInputText }) => {
const cursorPosition = textArea.selectionStart ?? 0
const textBeforeCursor = textArea.value.slice(0, cursorPosition)
// 查找最后一个 @ 或 / 符号的位置
const lastAtIndex = textBeforeCursor.lastIndexOf('@')
const lastSlashIndex = textBeforeCursor.lastIndexOf('/')
const lastSymbolIndex = Math.max(lastAtIndex, lastSlashIndex)
// 查找末尾最近的触发符号(@ 或 /),允许位于文本起始或空格后
const match = textBeforeCursor.match(/(^| )([@/][^\s]*)$/)
if (!match) return
if (lastSymbolIndex === -1) return
const matchIndex = match.index ?? -1
if (matchIndex === -1) return
const boundarySegment = match[1] ?? ''
const symbolSegment = match[2] ?? ''
if (!symbolSegment) return
const boundaryStart = matchIndex
const symbolStart = boundaryStart + boundarySegment.length
// 根据 includeSymbol 决定是否删除符号
const deleteStart = includeSymbol ? lastSymbolIndex : lastSymbolIndex + 1
const deleteStart = includeSymbol ? boundaryStart : symbolStart + 1
const deleteEnd = cursorPosition
if (deleteStart >= deleteEnd) return
@ -203,7 +210,7 @@ export const QuickPanelView: React.FC<Props> = ({ setInputText }) => {
if (textArea) {
setInputText(textArea.value)
}
} else if (action && !['outsideclick', 'esc', 'enter_empty'].includes(action)) {
} else if (action && !['outsideclick', 'esc', 'enter_empty', 'no_result'].includes(action)) {
clearSearchText(true)
}
},
@ -533,6 +540,18 @@ export const QuickPanelView: React.FC<Props> = ({ setInputText }) => {
const visibleNonPinnedCount = useMemo(() => list.filter((i) => !i.alwaysVisible).length, [list])
const collapsed = hasSearchText && visibleNonPinnedCount === 0
useEffect(() => {
if (!ctx.isVisible) return
if (!collapsed) return
if (ctx.triggerInfo?.type !== 'input') return
if (ctx.multiple) return
const trimmedSearch = searchText.replace(/^[/@]/, '').trim()
if (!trimmedSearch) return
handleClose('no_result')
}, [collapsed, ctx.isVisible, ctx.triggerInfo, ctx.multiple, handleClose, searchText])
const estimateSize = useCallback(() => ITEM_HEIGHT, [])
const rowRenderer = useCallback(

View File

@ -16,7 +16,7 @@ describe('Qwen Model Detection', () => {
initialState: {}
}))
vi.mock('@renderer/services/AssistantService', () => ({
getProviderByModel: vi.fn().mockReturnValue({ id: 'cherryin' })
getProviderByModel: vi.fn().mockReturnValue({ id: 'cherryai' })
}))
})
test('isQwenReasoningModel', () => {
@ -52,7 +52,7 @@ describe('Vision Model Detection', () => {
initialState: {}
}))
vi.mock('@renderer/services/AssistantService', () => ({
getProviderByModel: vi.fn().mockReturnValue({ id: 'cherryin' })
getProviderByModel: vi.fn().mockReturnValue({ id: 'cherryai' })
}))
})
test('isVisionModel', () => {
@ -81,7 +81,7 @@ describe('Web Search Model Detection', () => {
initialState: {}
}))
vi.mock('@renderer/services/AssistantService', () => ({
getProviderByModel: vi.fn().mockReturnValue({ id: 'cherryin' })
getProviderByModel: vi.fn().mockReturnValue({ id: 'cherryai' })
}))
})
test('isWebSearchModel', () => {

View File

@ -3,14 +3,14 @@ import type { Model, SystemProviderId } from '@renderer/types'
export const glm45FlashModel: Model = {
id: 'glm-4.5-flash',
name: 'GLM-4.5-Flash',
provider: 'cherryin',
provider: 'cherryai',
group: 'GLM-4.5'
}
export const qwen38bModel: Model = {
id: 'Qwen/Qwen3-8B',
name: 'Qwen3-8B',
provider: 'cherryin',
provider: 'cherryai',
group: 'Qwen'
}
@ -25,20 +25,7 @@ export const SYSTEM_MODELS: Record<SystemProviderId | 'defaultModel', Model[]> =
// Default quick assistant model
glm45FlashModel
],
cherryin: [
{
id: 'glm-4.5-flash',
name: 'GLM-4.5-Flash',
provider: 'cherryin',
group: 'GLM-4.5'
},
{
id: 'Qwen/Qwen3-8B',
name: 'Qwen3-8B',
provider: 'cherryin',
group: 'Qwen'
}
],
cherryin: [],
vertexai: [],
'302ai': [
{
@ -1785,5 +1772,37 @@ export const SYSTEM_MODELS: Record<SystemProviderId | 'defaultModel', Model[]> =
provider: 'poe',
group: 'poe'
}
],
aionly: [
{
id: 'claude-opus-4.1',
name: 'claude-opus-4.1',
provider: 'aionly',
group: 'claude'
},
{
id: 'claude-sonnet4',
name: 'claude-sonnet4',
provider: 'aionly',
group: 'claude'
},
{
id: 'claude-3.5-sonnet-v2',
name: 'claude-3.5-sonnet-v2',
provider: 'aionly',
group: 'claude'
},
{
id: 'gpt-4.1',
name: 'gpt-4.1',
provider: 'aionly',
group: 'gpt'
},
{
id: 'gemini-2.5-flash',
name: 'gemini-2.5-flash',
provider: 'aionly',
group: 'gemini'
}
]
}

View File

@ -3,6 +3,7 @@ import HunyuanProviderLogo from '@renderer/assets/images/models/hunyuan.png'
import AzureProviderLogo from '@renderer/assets/images/models/microsoft.png'
import Ai302ProviderLogo from '@renderer/assets/images/providers/302ai.webp'
import AiHubMixProviderLogo from '@renderer/assets/images/providers/aihubmix.webp'
import AiOnlyProviderLogo from '@renderer/assets/images/providers/aiOnly.webp'
import AlayaNewProviderLogo from '@renderer/assets/images/providers/alayanew.webp'
import AnthropicProviderLogo from '@renderer/assets/images/providers/anthropic.png'
import AwsProviderLogo from '@renderer/assets/images/providers/aws-bedrock.webp'
@ -56,7 +57,18 @@ import type { AtLeast, Provider, ProviderType, SystemProvider, SystemProviderId
import { isSystemProvider, OpenAIServiceTiers } from '@renderer/types'
import { TOKENFLUX_HOST } from './constant'
import { SYSTEM_MODELS } from './models'
import { glm45FlashModel, qwen38bModel, SYSTEM_MODELS } from './models'
export const CHERRYAI_PROVIDER: SystemProvider = {
id: 'cherryai' as SystemProviderId,
name: 'CherryAI',
type: 'openai',
apiKey: '',
apiHost: 'https://api.cherry-ai.com/',
models: [glm45FlashModel, qwen38bModel],
isSystem: true,
enabled: true
}
export const SYSTEM_PROVIDERS_CONFIG: Record<SystemProviderId, SystemProvider> = {
cherryin: {
@ -64,8 +76,8 @@ export const SYSTEM_PROVIDERS_CONFIG: Record<SystemProviderId, SystemProvider> =
name: 'CherryIN',
type: 'openai',
apiKey: '',
apiHost: 'https://api.cherry-ai.com/',
models: SYSTEM_MODELS.cherryin,
apiHost: 'https://open.cherryin.ai',
models: [],
isSystem: true,
enabled: true
},
@ -593,6 +605,16 @@ export const SYSTEM_PROVIDERS_CONFIG: Record<SystemProviderId, SystemProvider> =
models: SYSTEM_MODELS['poe'],
isSystem: true,
enabled: false
},
aionly: {
id: 'aionly',
name: 'AIOnly',
type: 'openai',
apiKey: '',
apiHost: 'https://api.aiionly.com',
models: SYSTEM_MODELS.aionly,
isSystem: true,
enabled: false
}
} as const
@ -654,7 +676,8 @@ export const PROVIDER_LOGO_MAP: AtLeast<SystemProviderId, string> = {
vertexai: VertexAIProviderLogo,
'new-api': NewAPIProviderLogo,
'aws-bedrock': AwsProviderLogo,
poe: 'poe' // use svg icon component
poe: 'poe', // use svg icon component
aionly: AiOnlyProviderLogo
} as const
export function getProviderLogo(providerId: string) {
@ -680,12 +703,13 @@ type ProviderUrls = {
export const PROVIDER_URLS: Record<SystemProviderId, ProviderUrls> = {
cherryin: {
api: {
url: 'https://api.cherry-ai.com'
url: 'https://open.cherryin.ai'
},
websites: {
official: 'https://cherry-ai.com',
docs: 'https://docs.cherry-ai.com',
models: 'https://docs.cherry-ai.com/pre-basic/providers/cherryin'
official: 'https://open.cherryin.ai',
apiKey: 'https://open.cherryin.ai/console/token',
docs: 'https://open.cherryin.ai',
models: 'https://open.cherryin.ai/pricing'
}
},
ph8: {
@ -1248,6 +1272,17 @@ export const PROVIDER_URLS: Record<SystemProviderId, ProviderUrls> = {
docs: 'https://creator.poe.com/docs/external-applications/openai-compatible-api',
models: 'https://poe.com/'
}
},
aionly: {
api: {
url: 'https://api.aiionly.com'
},
websites: {
official: 'https://www.aiionly.com',
apiKey: 'https://www.aiionly.com/keyApi',
docs: 'https://www.aiionly.com/document',
models: 'https://www.aiionly.com'
}
}
}

View File

@ -172,7 +172,10 @@ export function useAssistant(id: string) {
(model: Model) => assistant && dispatch(setModel({ assistantId: assistant?.id, model })),
[assistant, dispatch]
),
updateAssistant: useCallback((assistant: Partial<Assistant>) => dispatch(updateAssistant(assistant)), [dispatch]),
updateAssistant: useCallback(
(update: Partial<Omit<Assistant, 'id'>>) => dispatch(updateAssistant({ id, ...update })),
[dispatch, id]
),
updateAssistantSettings
}
}

View File

@ -60,7 +60,8 @@ export const useKnowledge = (baseId: string) => {
// 添加笔记
const addNote = async (content: string) => {
await dispatch(addNoteThunk(baseId, content))
checkAllBases()
// 确保数据库写入完成后再触发队列检查
setTimeout(() => KnowledgeQueue.checkAllBases(), 100)
}
// 添加URL

View File

@ -1,8 +1,8 @@
import { cacheService } from '@data/CacheService'
import i18n from '@renderer/i18n'
import store from '@renderer/store'
import { useProviders } from './useProvider'
import { getStoreProviders } from './useStore'
export function useModel(id?: string, providerId?: string) {
const { providers } = useProviders()
@ -17,7 +17,7 @@ export function useModel(id?: string, providerId?: string) {
}
export function getModel(id?: string, providerId?: string) {
const providers = store.getState().llm.providers
const providers = getStoreProviders()
const allModels = providers.map((p) => p.models).flat()
return allModels.find((m) => {
if (providerId) {

View File

@ -1,4 +1,5 @@
import { createSelector } from '@reduxjs/toolkit'
import { CHERRYAI_PROVIDER } from '@renderer/config/providers'
import { getDefaultProvider } from '@renderer/services/AssistantService'
import { useAppDispatch, useAppSelector } from '@renderer/store'
import {
@ -17,7 +18,7 @@ import { useDefaultModel } from './useAssistant'
const selectEnabledProviders = createSelector(
(state) => state.llm.providers,
(providers) => providers.filter((p) => p.enabled)
(providers) => providers.filter((p) => p.enabled).concat(CHERRYAI_PROVIDER)
)
export function useProviders() {
@ -25,7 +26,7 @@ export function useProviders() {
const dispatch = useAppDispatch()
return {
providers: providers || {},
providers: providers || [],
addProvider: (provider: Provider) => dispatch(addProvider(provider)),
removeProvider: (provider: Provider) => dispatch(removeProvider(provider)),
updateProvider: (updates: Partial<Provider> & { id: string }) => dispatch(updateProvider(updates)),
@ -46,7 +47,9 @@ export function useAllProviders() {
}
export function useProvider(id: string) {
const provider = useAppSelector((state) => state.llm.providers.find((p) => p.id === id)) || getDefaultProvider()
const provider =
useAppSelector((state) => state.llm.providers.concat([CHERRYAI_PROVIDER]).find((p) => p.id === id)) ||
getDefaultProvider()
const dispatch = useAppDispatch()
return {

View File

@ -1,6 +1,7 @@
//FIXME 这个文件有必要存在吗? fullex@data refactor
//FIXME @deprecated this file will be removed after data refactor
import { usePreference } from '@data/hooks/usePreference'
import { CHERRYAI_PROVIDER } from '@renderer/config/providers'
import store from '@renderer/store'
export function useShowAssistants() {
const [showAssistants, setShowAssistants] = usePreference('assistant.tab.show')
@ -30,3 +31,7 @@ export function useAssistantsTabSortType() {
setAssistantsTabSortType
}
}
export function getStoreProviders() {
return store.getState().llm.providers.concat([CHERRYAI_PROVIDER])
}

View File

@ -81,7 +81,8 @@ const providerKeyMap = {
yi: 'provider.yi',
zhinao: 'provider.zhinao',
zhipu: 'provider.zhipu',
poe: 'provider.poe'
poe: 'provider.poe',
aionly: 'provider.aionly'
} as const
/**

View File

@ -332,7 +332,8 @@
},
"new_topic": "New Topic {{Command}}",
"pause": "Pause",
"placeholder": "Type your message here, press {{key}} to send...",
"placeholder": "Type your message here, press {{key}} to send - @ to Select Model, / to Include Tools",
"placeholder_without_triggers": "Type your message here, press {{key}} to send",
"send": "Send",
"settings": "Settings",
"thinking": {
@ -2017,6 +2018,7 @@
"provider": {
"302ai": "302.AI",
"aihubmix": "AiHubMix",
"aionly": "AiOnly",
"alayanew": "Alaya NeW",
"anthropic": "Anthropic",
"aws-bedrock": "AWS Bedrock",

View File

@ -332,7 +332,8 @@
},
"new_topic": "新话题 {{Command}}",
"pause": "暂停",
"placeholder": "在这里输入消息,按 {{key}} 发送...",
"placeholder": "在这里输入消息,按 {{key}} 发送 - @ 选择模型, / 选择工具",
"placeholder_without_triggers": "在这里输入消息,按 {{key}} 发送",
"send": "发送",
"settings": "设置",
"thinking": {
@ -2017,6 +2018,7 @@
"provider": {
"302ai": "302.AI",
"aihubmix": "AiHubMix",
"aionly": "唯一AI (AiOnly)",
"alayanew": "Alaya NeW",
"anthropic": "Anthropic",
"aws-bedrock": "AWS Bedrock",

View File

@ -332,7 +332,8 @@
},
"new_topic": "新話題 {{Command}}",
"pause": "暫停",
"placeholder": "在此輸入您的訊息,按 {{key}} 傳送...",
"placeholder": "在此輸入您的訊息,按 {{key}} 傳送 - @ 選擇模型,/ 包含工具",
"placeholder_without_triggers": "在此輸入您的訊息,按 {{key}} 傳送",
"send": "傳送",
"settings": "設定",
"thinking": {
@ -2017,6 +2018,7 @@
"provider": {
"302ai": "302.AI",
"aihubmix": "AiHubMix",
"aionly": "唯一AI (AiOnly)",
"alayanew": "Alaya NeW",
"anthropic": "Anthropic",
"aws-bedrock": "AWS Bedrock",

View File

@ -2017,6 +2017,7 @@
"provider": {
"302ai": "302.AI",
"aihubmix": "AiHubMix",
"aionly": "AiOnly",
"alayanew": "Alaya NeW",
"anthropic": "Anthropic",
"aws-bedrock": "AWS Bedrock",

View File

@ -2017,6 +2017,7 @@
"provider": {
"302ai": "302.AI",
"aihubmix": "AiHubMix",
"aionly": "AiOnly",
"alayanew": "Alaya NeW",
"anthropic": "Antropológico",
"aws-bedrock": "AWS Bedrock",

View File

@ -2017,6 +2017,7 @@
"provider": {
"302ai": "302.AI",
"aihubmix": "AiHubMix",
"aionly": "AiOnly",
"alayanew": "Alaya NeW",
"anthropic": "Anthropic",
"aws-bedrock": "AWS Bedrock",

View File

@ -2017,6 +2017,7 @@
"provider": {
"302ai": "302.AI",
"aihubmix": "AiHubMix",
"aionly": "AiOnly",
"alayanew": "Alaya NeW",
"anthropic": "Anthropic",
"aws-bedrock": "AWS Bedrock",

View File

@ -2017,6 +2017,7 @@
"provider": {
"302ai": "302.AI",
"aihubmix": "AiHubMix",
"aionly": "AiOnly",
"alayanew": "Alaya NeW",
"anthropic": "Antropológico",
"aws-bedrock": "AWS Bedrock",

View File

@ -2017,6 +2017,7 @@
"provider": {
"302ai": "302.AI",
"aihubmix": "AiHubMix",
"aionly": "AiOnly",
"alayanew": "Alaya NeW",
"anthropic": "Anthropic",
"aws-bedrock": "AWS Bedrock",

View File

@ -72,7 +72,7 @@ const CodeToolsPage: FC = () => {
if (isEmbeddingModel(m) || isRerankModel(m) || isTextToImageModel(m)) {
return false
}
if (m.provider === 'cherryin') {
if (m.provider === 'cherryai') {
return false
}
if (selectedCliTool === codeTools.claudeCode) {

View File

@ -165,6 +165,7 @@ const Inputbar: FC<Props> = ({ assistant: _assistant, setActiveTopic, topic }) =
const [tokenCount, setTokenCount] = useState(0)
const inputbarToolsRef = useRef<InputbarToolsRef>(null)
const prevTextRef = useRef(text)
// eslint-disable-next-line react-hooks/exhaustive-deps
const debouncedEstimate = useCallback(
@ -181,8 +182,21 @@ const Inputbar: FC<Props> = ({ assistant: _assistant, setActiveTopic, topic }) =
debouncedEstimate(text)
}, [text, debouncedEstimate])
useEffect(() => {
prevTextRef.current = text
}, [text])
const inputTokenCount = showInputEstimatedTokens ? tokenCount : 0
const placeholderText = enableQuickPanelTriggers
? t('chat.input.placeholder', { key: getSendMessageShortcutLabel(sendMessageShortcut) })
: t('chat.input.placeholder_without_triggers', {
key: getSendMessageShortcutLabel(sendMessageShortcut),
defaultValue: t('chat.input.placeholder', {
key: getSendMessageShortcutLabel(sendMessageShortcut)
})
})
const inputEmpty = isEmpty(text.trim()) && files.length === 0
_text = text
@ -380,7 +394,7 @@ const Inputbar: FC<Props> = ({ assistant: _assistant, setActiveTopic, topic }) =
}
}
if (event.key === 'Backspace' && text.trim() === '' && files.length > 0) {
if (event.key === 'Backspace' && text.length === 0 && files.length > 0) {
setFiles((prev) => prev.slice(0, -1))
return event.preventDefault()
}
@ -444,43 +458,91 @@ const Inputbar: FC<Props> = ({ assistant: _assistant, setActiveTopic, topic }) =
const newText = e.target.value
setText(newText)
const prevText = prevTextRef.current
const isDeletion = newText.length < prevText.length
const textArea = textareaRef.current?.resizableTextArea?.textArea
const cursorPosition = textArea?.selectionStart ?? 0
const cursorPosition = textArea?.selectionStart ?? newText.length
const lastSymbol = newText[cursorPosition - 1]
const previousChar = newText[cursorPosition - 2]
const isCursorAtTextStart = cursorPosition <= 1
const hasValidTriggerBoundary = previousChar === ' ' || isCursorAtTextStart
const openRootPanelAt = (position: number) => {
const quickPanelMenu =
inputbarToolsRef.current?.getQuickPanelMenu({
text: newText,
translate
}) || []
quickPanel.open({
title: t('settings.quickPanel.title'),
list: quickPanelMenu,
symbol: QuickPanelReservedSymbol.Root,
triggerInfo: {
type: 'input',
position,
originalText: newText
}
})
}
const openMentionPanelAt = (position: number) => {
inputbarToolsRef.current?.openMentionModelsPanel({
type: 'input',
position,
originalText: newText
})
}
if (enableQuickPanelTriggers && !quickPanel.isVisible) {
const textBeforeCursor = newText.slice(0, cursorPosition)
const lastRootIndex = textBeforeCursor.lastIndexOf(QuickPanelReservedSymbol.Root)
const lastMentionIndex = textBeforeCursor.lastIndexOf(QuickPanelReservedSymbol.MentionModels)
const lastTriggerIndex = Math.max(lastRootIndex, lastMentionIndex)
if (lastTriggerIndex !== -1 && cursorPosition > lastTriggerIndex) {
const triggerChar = newText[lastTriggerIndex]
const boundaryChar = newText[lastTriggerIndex - 1] ?? ''
const hasBoundary = lastTriggerIndex === 0 || /\s/.test(boundaryChar)
const searchSegment = newText.slice(lastTriggerIndex + 1, cursorPosition)
const hasSearchContent = searchSegment.trim().length > 0
if (hasBoundary && (!hasSearchContent || isDeletion)) {
if (triggerChar === QuickPanelReservedSymbol.Root) {
openRootPanelAt(lastTriggerIndex)
} else if (triggerChar === QuickPanelReservedSymbol.MentionModels) {
openMentionPanelAt(lastTriggerIndex)
}
}
}
}
// 触发符号为 '/':若当前未打开或符号不同,则切换/打开
if (enableQuickPanelTriggers && lastSymbol === QuickPanelReservedSymbol.Root) {
if (enableQuickPanelTriggers && lastSymbol === QuickPanelReservedSymbol.Root && hasValidTriggerBoundary) {
if (quickPanel.isVisible && quickPanel.symbol !== QuickPanelReservedSymbol.Root) {
quickPanel.close('switch-symbol')
}
if (!quickPanel.isVisible || quickPanel.symbol !== QuickPanelReservedSymbol.Root) {
const quickPanelMenu =
inputbarToolsRef.current?.getQuickPanelMenu({
text: newText,
translate
}) || []
quickPanel.open({
title: t('settings.quickPanel.title'),
list: quickPanelMenu,
symbol: QuickPanelReservedSymbol.Root
})
openRootPanelAt(cursorPosition - 1)
}
}
// 触发符号为 '@':若当前未打开或符号不同,则切换/打开
if (enableQuickPanelTriggers && lastSymbol === QuickPanelReservedSymbol.MentionModels) {
if (
enableQuickPanelTriggers &&
lastSymbol === QuickPanelReservedSymbol.MentionModels &&
hasValidTriggerBoundary
) {
if (quickPanel.isVisible && quickPanel.symbol !== QuickPanelReservedSymbol.MentionModels) {
quickPanel.close('switch-symbol')
}
if (!quickPanel.isVisible || quickPanel.symbol !== QuickPanelReservedSymbol.MentionModels) {
inputbarToolsRef.current?.openMentionModelsPanel({
type: 'input',
position: cursorPosition - 1,
originalText: newText
})
openMentionPanelAt(cursorPosition - 1)
}
}
prevTextRef.current = newText
},
[enableQuickPanelTriggers, quickPanel, t, translate]
)
@ -786,11 +848,7 @@ const Inputbar: FC<Props> = ({ assistant: _assistant, setActiveTopic, topic }) =
value={text}
onChange={onChange}
onKeyDown={handleKeyDown}
placeholder={
isTranslating
? t('chat.input.translating')
: t('chat.input.placeholder', { key: getSendMessageShortcutLabel(sendMessageShortcut) })
}
placeholder={isTranslating ? t('chat.input.translating') : placeholderText}
autoFocus
variant="borderless"
spellCheck={enableSpellCheck}

View File

@ -90,7 +90,7 @@ const MentionModelsButton: FC<Props> = ({
// 兜底:使用打开时的 position若存在按空白边界删除
if (typeof fallbackPosition === 'number' && currentText[fallbackPosition] === '@') {
let endPos = fallbackPosition + 1
while (endPos < currentText.length && currentText[endPos] !== ' ' && currentText[endPos] !== '\n') {
while (endPos < currentText.length && !/\s/.test(currentText[endPos])) {
endPos++
}
return currentText.slice(0, fallbackPosition) + currentText.slice(endPos)
@ -99,7 +99,7 @@ const MentionModelsButton: FC<Props> = ({
}
let endPos = start + 1
while (endPos < currentText.length && currentText[endPos] !== ' ' && currentText[endPos] !== '\n') {
while (endPos < currentText.length && !/\s/.test(currentText[endPos])) {
endPos++
}
return currentText.slice(0, start) + currentText.slice(endPos)

View File

@ -3,8 +3,8 @@ import SelectModelPopup from '@renderer/components/Popups/SelectModelPopup'
import { isLocalAi } from '@renderer/config/env'
import { isEmbeddingModel, isRerankModel, isWebSearchModel } from '@renderer/config/models'
import { useAssistant } from '@renderer/hooks/useAssistant'
import { useProvider } from '@renderer/hooks/useProvider'
import { getProviderName } from '@renderer/services/ProviderService'
import { useAppSelector } from '@renderer/store'
import type { Assistant, Model } from '@renderer/types'
import { Button, Tag } from 'antd'
import { ChevronsUpDown } from 'lucide-react'
@ -21,7 +21,7 @@ const SelectModelButton: FC<Props> = ({ assistant }) => {
const { model, updateAssistant } = useAssistant(assistant.id)
const { t } = useTranslation()
const timerRef = useRef<NodeJS.Timeout>(undefined)
const provider = useAppSelector((state) => state.llm.providers.find((p) => p.id === model?.provider))
const provider = useProvider(model?.provider)
const modelFilter = (model: Model) => !isEmbeddingModel(model) && !isRerankModel(model)

View File

@ -46,6 +46,12 @@ export const TEXT_TO_IMAGES_MODELS = [
provider: 'silicon',
name: 'Kolors',
group: 'Kwai-Kolors'
},
{
id: 'Qwen/Qwen-Image',
provider: 'silicon',
name: 'Qwen-Image',
group: 'qwen'
}
]

View File

@ -185,6 +185,7 @@ const CardContainer = styled.div<{ $isActive: boolean }>`
margin-bottom: 5px;
height: 125px;
opacity: ${(props) => (props.$isActive ? 1 : 0.6)};
width: calc(100vw - var(--settings-width) - 40px);
&:hover {
opacity: 1;

View File

@ -250,7 +250,9 @@ const McpServersList: FC = () => {
items={filteredMcpServers}
itemKey="id"
onSortEnd={onSortEnd}
layout="grid"
layout="list"
horizontal={false}
listStyle={{ display: 'flex', flexDirection: 'column' }}
gap="12px"
restrictions={{ scrollableAncestor: true }}
useDragOverlay

View File

@ -51,7 +51,6 @@ const ModelList: React.FC<ModelListProps> = ({ providerId }) => {
const providerConfig = PROVIDER_URLS[provider.id]
const docsWebsite = providerConfig?.websites?.docs
const modelsWebsite = providerConfig?.websites?.models
const editable = provider.id !== 'cherryin'
const [searchText, _setSearchText] = useState('')
const [displayedModelGroups, setDisplayedModelGroups] = useState<ModelGroups | null>(() => {
@ -114,17 +113,15 @@ const ModelList: React.FC<ModelListProps> = ({ providerId }) => {
tooltip={t('models.search.tooltip')}
/>
</RowFlex>
{editable && (
<RowFlex>
<Tooltip title={t('settings.models.check.button_caption')} mouseLeaveDelay={0}>
<Button
type="text"
onClick={runHealthCheck}
icon={<StreamlineGoodHealthAndWellBeing size={16} isActive={isHealthChecking} />}
/>
</Tooltip>
</RowFlex>
)}
<RowFlex>
<Tooltip title={t('settings.models.check.button_caption')} mouseLeaveDelay={0}>
<Button
type="text"
onClick={runHealthCheck}
icon={<StreamlineGoodHealthAndWellBeing size={16} isActive={isHealthChecking} />}
/>
</Tooltip>
</RowFlex>
</RowFlex>
</SettingSubtitle>
<Spin spinning={isLoading} indicator={<LoadingIcon color="var(--color-text-2)" />}>
@ -140,7 +137,6 @@ const ModelList: React.FC<ModelListProps> = ({ providerId }) => {
onEditModel={(model) => EditModelPopup.show({ provider, model })}
onRemoveModel={removeModel}
onRemoveGroup={() => displayedModelGroups[group].forEach((model) => removeModel(model))}
disabled={!editable}
/>
))}
</ColFlex>
@ -168,16 +164,14 @@ const ModelList: React.FC<ModelListProps> = ({ providerId }) => {
<div className="h-[5px]" />
)}
</Flex>
{editable && (
<Flex className="mt-3 gap-2.5">
<Button type="primary" onClick={onManageModel} icon={<ListCheck size={16} />} disabled={isHealthChecking}>
{t('button.manage')}
</Button>
<Button type="default" onClick={onAddModel} icon={<Plus size={16} />} disabled={isHealthChecking}>
{t('button.add')}
</Button>
</Flex>
)}
<Flex className="mt-3 gap-2.5">
<Button type="primary" onClick={onManageModel} icon={<ListCheck size={16} />} disabled={isHealthChecking}>
{t('button.manage')}
</Button>
<Button type="default" onClick={onAddModel} icon={<Plus size={16} />} disabled={isHealthChecking}>
{t('button.add')}
</Button>
</Flex>
</>
)
}

View File

@ -1,6 +1,7 @@
import { RowFlex } from '@cherrystudio/ui'
import AI302ProviderLogo from '@renderer/assets/images/providers/302ai.webp'
import AiHubMixProviderLogo from '@renderer/assets/images/providers/aihubmix.webp'
import AiOnlyProviderLogo from '@renderer/assets/images/providers/aiOnly.webp'
import PPIOProviderLogo from '@renderer/assets/images/providers/ppio.png'
import SiliconFlowProviderLogo from '@renderer/assets/images/providers/silicon.png'
import TokenFluxProviderLogo from '@renderer/assets/images/providers/tokenflux.png'
@ -25,7 +26,8 @@ const PROVIDER_LOGO_MAP = {
silicon: SiliconFlowProviderLogo,
aihubmix: AiHubMixProviderLogo,
ppio: PPIOProviderLogo,
tokenflux: TokenFluxProviderLogo
tokenflux: TokenFluxProviderLogo,
aionly: AiOnlyProviderLogo
}
const ProviderOAuth: FC<Props> = ({ providerId }) => {

View File

@ -72,7 +72,7 @@ const ProviderSetting: FC<Props> = ({ providerId }) => {
const isAzureOpenAI = provider.id === 'azure-openai' || provider.type === 'azure-openai'
const isDmxapi = provider.id === 'dmxapi'
const hideApiInput = ['vertexai', 'aws-bedrock', 'cherryin'].includes(provider.id)
const hideApiInput = ['vertexai', 'aws-bedrock'].includes(provider.id)
const providerConfig = PROVIDER_URLS[provider.id]
const officialWebsite = providerConfig?.websites?.official

View File

@ -338,7 +338,7 @@ export async function fetchGenerate({
export function hasApiKey(provider: Provider) {
if (!provider) return false
if (['ollama', 'lmstudio', 'vertexai', 'cherryin'].includes(provider.id)) return true
if (['ollama', 'lmstudio', 'vertexai', 'cherryai'].includes(provider.id)) return true
return !isEmpty(provider.apiKey)
}

View File

@ -7,7 +7,10 @@ import {
MAX_CONTEXT_COUNT,
UNLIMITED_CONTEXT_COUNT
} from '@renderer/config/constant'
import { isQwenMTModel } from '@renderer/config/models'
import { CHERRYAI_PROVIDER } from '@renderer/config/providers'
import { UNKNOWN } from '@renderer/config/translate'
import { getStoreProviders } from '@renderer/hooks/useStore'
import i18n from '@renderer/i18n'
import store from '@renderer/store'
import { addAssistant } from '@renderer/store/assistants'
@ -73,11 +76,20 @@ export async function getDefaultTranslateAssistant(
temperature: 0.7
}
const translateModelPrompt = await preferenceService.get('feature.translate.model_prompt')
const content = translateModelPrompt
.replaceAll('{{target_language}}', targetLanguage.value)
.replaceAll('{{text}}', text)
const getTranslateContent = async (
model: Model,
text: string,
targetLanguage: TranslateLanguage
): Promise<string> => {
if (isQwenMTModel(model)) {
return text // QwenMT models handle raw text directly
}
const translateModelPrompt = await preferenceService.get('feature.translate.model_prompt')
return translateModelPrompt.replaceAll('{{target_language}}', targetLanguage.value).replaceAll('{{text}}', text)
}
const content = await getTranslateContent(model, text, targetLanguage)
const translateAssistant = {
...assistant,
model,
@ -122,26 +134,25 @@ export function getTranslateModel() {
}
export function getAssistantProvider(assistant: Assistant): Provider {
const providers = store.getState().llm.providers
const providers = getStoreProviders()
const provider = providers.find((p) => p.id === assistant.model?.provider)
return provider || getDefaultProvider()
}
export function getProviderByModel(model?: Model): Provider {
const providers = store.getState().llm.providers
const providers = getStoreProviders()
const provider = providers.find((p) => p.id === model?.provider)
if (!provider) {
const defaultProvider = providers.find((p) => p.id === getDefaultModel()?.provider)
const cherryinProvider = providers.find((p) => p.id === 'cherryin')
return defaultProvider || cherryinProvider || providers[0]
return defaultProvider || CHERRYAI_PROVIDER || providers[0]
}
return provider
}
export function getProviderByModelId(modelId?: string) {
const providers = store.getState().llm.providers
const providers = getStoreProviders()
const _modelId = modelId || getDefaultModel().id
return providers.find((p) => p.models.find((m) => m.id === _modelId)) as Provider
}

View File

@ -1,4 +1,4 @@
import store from '@renderer/store'
import { getStoreProviders } from '@renderer/hooks/useStore'
import type { Model } from '@renderer/types'
import { pick } from 'lodash'
@ -9,9 +9,8 @@ export const getModelUniqId = (m?: Model) => {
}
export const hasModel = (m?: Model) => {
const allModels = store
.getState()
.llm.providers.filter((p) => p.enabled)
const allModels = getStoreProviders()
.filter((p) => p.enabled)
.map((p) => p.models)
.flat()
@ -19,7 +18,7 @@ export const hasModel = (m?: Model) => {
}
export function getModelName(model?: Model) {
const provider = store.getState().llm.providers.find((p) => p.id === model?.provider)
const provider = getStoreProviders().find((p) => p.id === model?.provider)
const modelName = model?.name || model?.id || ''
if (provider) {

View File

@ -1,4 +1,4 @@
import store from '@renderer/store'
import { getStoreProviders } from '@renderer/hooks/useStore'
import type { Model, Provider } from '@renderer/types'
import { getFancyProviderName } from '@renderer/utils'
@ -14,9 +14,9 @@ export function getProviderName(model?: Model) {
export function getProviderByModel(model?: Model) {
const id = model?.provider
const provider = store.getState().llm.providers.find((p) => p.id === id)
const provider = getStoreProviders().find((p) => p.id === id)
if (provider?.id === 'cherryin') {
if (provider?.id === 'cherryai') {
const map = {
'glm-4.5-flash': 'zhipu',
'Qwen/Qwen3-8B': 'silicon'
@ -33,7 +33,7 @@ export function getProviderByModel(model?: Model) {
}
export function isProviderSupportAuth(provider: Provider) {
const supportProviders = ['302ai', 'silicon', 'aihubmix', 'ppio', 'tokenflux']
const supportProviders = ['302ai', 'silicon', 'aihubmix', 'ppio', 'tokenflux', 'aionly']
return supportProviders.includes(provider.id)
}
@ -43,5 +43,5 @@ export function isProviderSupportCharge(provider: Provider) {
}
export function getProviderById(id: string) {
return store.getState().llm.providers.find((p) => p.id === id)
return getStoreProviders().find((p) => p.id === id)
}

View File

@ -47,8 +47,9 @@ const assistantsSlice = createSlice({
removeAssistant: (state, action: PayloadAction<{ id: string }>) => {
state.assistants = state.assistants.filter((c) => c.id !== action.payload.id)
},
updateAssistant: (state, action: PayloadAction<Partial<Assistant>>) => {
state.assistants = state.assistants.map((c) => (c.id === action.payload.id ? { ...c, ...action.payload } : c))
updateAssistant: (state, action: PayloadAction<Partial<Assistant> & { id: string }>) => {
const { id, ...update } = action.payload
state.assistants = state.assistants.map((c) => (c.id === id ? { ...c, ...update } : c))
},
updateAssistantSettings: (
state,

View File

@ -71,7 +71,7 @@ const persistedReducer = persistReducer(
{
key: 'cherry-studio',
storage,
version: 156,
version: 157,
blacklist: ['runtime', 'messages', 'messageBlocks', 'tabs'],
migrate
},

View File

@ -2495,6 +2495,53 @@ const migrateConfig = {
logger.error('migrate 156 error', error as Error)
return state
}
},
'157': (state: RootState) => {
try {
addProvider(state, 'aionly')
const cherryinProvider = state.llm.providers.find((provider) => provider.id === 'cherryin')
if (cherryinProvider) {
updateProvider(state, 'cherryin', { apiHost: 'https://open.cherryin.ai', models: [] })
}
if (state.llm.defaultModel?.provider === 'cherryin') {
state.llm.defaultModel.provider = 'cherryai'
}
if (state.llm.quickModel?.provider === 'cherryin') {
state.llm.quickModel.provider = 'cherryai'
}
if (state.llm.translateModel?.provider === 'cherryin') {
state.llm.translateModel.provider = 'cherryai'
}
state.assistants.assistants.forEach((assistant) => {
if (assistant.model?.provider === 'cherryin') {
assistant.model.provider = 'cherryai'
}
if (assistant.defaultModel?.provider === 'cherryin') {
assistant.defaultModel.provider = 'cherryai'
}
})
state.agents.agents.forEach((agent) => {
// @ts-ignore model is not defined in Agent
if (agent.model?.provider === 'cherryin') {
// @ts-ignore model is not defined in Agent
agent.model.provider = 'cherryai'
}
if (agent.defaultModel?.provider === 'cherryin') {
agent.defaultModel.provider = 'cherryai'
}
})
return state
} catch (error) {
logger.error('migrate 157 error', error as Error)
return state
}
}
}

View File

@ -321,7 +321,8 @@ export const SystemProviderIds = {
gpustack: 'gpustack',
voyageai: 'voyageai',
'aws-bedrock': 'aws-bedrock',
poe: 'poe'
poe: 'poe',
aionly: 'aionly'
} as const
export type SystemProviderId = keyof typeof SystemProviderIds

View File

@ -65,7 +65,7 @@ export const getModelTags = (models: Model[]): Record<ModelTag, boolean> => {
}
export function isFreeModel(model: Model) {
if (model.provider === 'cherryin') {
if (model.provider === 'cherryai') {
return true
}

View File

@ -172,6 +172,27 @@ export const oauthWith302AI = async (setKey) => {
window.addEventListener('message', messageHandler)
}
export const oauthWithAiOnly = async (setKey) => {
const authUrl = `https://www.aiionly.com/login?inviteCode=1755481173663DrZBBOC0&cherryCode=01`
const popup = window.open(
authUrl,
'login',
'width=720,height=720,toolbar=no,location=no,status=no,menubar=no,scrollbars=yes,resizable=yes,alwaysOnTop=yes,alwaysRaised=yes'
)
const messageHandler = (event) => {
if (event.data.length > 0 && event.data[0]['secretKey'] !== undefined) {
setKey(event.data[0]['secretKey'])
popup?.close()
window.removeEventListener('message', messageHandler)
}
}
window.removeEventListener('message', messageHandler)
window.addEventListener('message', messageHandler)
}
export const providerCharge = async (provider: string) => {
const chargeUrlMap = {
silicon: {
@ -198,6 +219,11 @@ export const providerCharge = async (provider: string) => {
url: 'https://dash.302.ai/charge',
width: 900,
height: 700
},
aionly: {
url: `https://www.aiionly.com/recharge`,
width: 900,
height: 700
}
}
@ -236,6 +262,11 @@ export const providerBills = async (provider: string) => {
url: 'https://dash.302.ai/charge',
width: 900,
height: 700
},
aionly: {
url: `https://www.aiionly.com/billManagement`,
width: 900,
height: 700
}
}

View File

@ -144,8 +144,9 @@ const ActionTranslate: FC<Props> = ({ action, scrollToBottom }) => {
}
}
assistantRef.current = await getDefaultTranslateAssistant(translateLang, action.selectedText)
processMessages(assistantRef.current, topicRef.current, action.selectedText, setAskId, onStream, onFinish, onError)
const assistant = await getDefaultTranslateAssistant(translateLang, action.selectedText)
assistantRef.current = assistant
processMessages(assistant, topicRef.current, assistant.content, setAskId, onStream, onFinish, onError)
}, [action, targetLanguage, alterLanguage, scrollToBottom])
useEffect(() => {

View File

@ -8,7 +8,7 @@
"tests/__mocks__/**/*",
"packages/mcp-trace/**/*",
"packages/aiCore/src/**/*",
"src/main/integration/cherryin/index.js",
"src/main/integration/cherryai/index.js",
"packages/extension-table-plus/**/*",
"packages/ui/**/*"
],