Merge branch 'main' into feat/openai-deepsearch

This commit is contained in:
suyao 2025-07-18 01:21:23 +08:00
commit 5e33a91154
No known key found for this signature in database
513 changed files with 53331 additions and 22306 deletions

View File

@ -1,9 +1,9 @@
root = true
[*]
charset = utf-8
indent_style = space
indent_size = 2
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
root = true
[*]
charset = utf-8
indent_style = space
indent_size = 2
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true

2
.git-blame-ignore-revs Normal file
View File

@ -0,0 +1,2 @@
# ignore #7923 eol change and code formatting
4ac8a388347ff35f34de42c3ef4a2f81f03fb3b1

1
.gitattributes vendored
View File

@ -1,2 +1,3 @@
* text=auto eol=lf
/.yarn/** linguist-vendored
/.yarn/releases/* binary

View File

@ -73,4 +73,4 @@ body:
id: additional
attributes:
label: 附加信息
description: 任何能让我们对您的问题有更多了解的信息,包括截图或相关链接
description: 任何能让我们对您的问题有更多了解的信息,包括截图或相关链接

View File

@ -73,4 +73,4 @@ body:
id: additional
attributes:
label: Additional Information
description: Any other information that could help us better understand your question, including screenshots or relevant links
description: Any other information that could help us better understand your question, including screenshots or relevant links

View File

@ -1,86 +1,17 @@
version: 2
updates:
- package-ecosystem: "npm"
directory: "/"
- package-ecosystem: 'github-actions'
directory: '/'
schedule:
interval: "monthly"
open-pull-requests-limit: 7
target-branch: "main"
commit-message:
prefix: "chore"
include: "scope"
groups:
# 核心框架
core-framework:
patterns:
- "react"
- "react-dom"
- "electron"
- "typescript"
- "@types/react*"
- "@types/node"
update-types:
- "minor"
- "patch"
# Electron 生态和构建工具
electron-build:
patterns:
- "electron-*"
- "@electron*"
- "vite"
- "@vitejs/*"
- "dotenv-cli"
- "rollup-plugin-*"
- "@swc/*"
update-types:
- "minor"
- "patch"
# 测试工具
testing-tools:
patterns:
- "vitest"
- "@vitest/*"
- "playwright"
- "@playwright/*"
- "eslint*"
- "@eslint*"
- "prettier"
- "husky"
- "lint-staged"
update-types:
- "minor"
- "patch"
# CherryStudio 自定义包
cherrystudio-packages:
patterns:
- "@cherrystudio/*"
update-types:
- "minor"
- "patch"
# 兜底其他 dependencies
other-dependencies:
dependency-type: "production"
# 兜底其他 devDependencies
other-dev-dependencies:
dependency-type: "development"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
interval: 'monthly'
open-pull-requests-limit: 3
commit-message:
prefix: "ci"
include: "scope"
prefix: 'ci'
include: 'scope'
groups:
github-actions:
patterns:
- "*"
- '*'
update-types:
- "minor"
- "patch"
- 'minor'
- 'patch'

View File

@ -9,115 +9,115 @@ labels:
# skips and removes
- name: skip all
content:
regexes: "[Ss]kip (?:[Aa]ll |)[Ll]abels?"
regexes: '[Ss]kip (?:[Aa]ll |)[Ll]abels?'
- name: remove all
content:
regexes: "[Rr]emove (?:[Aa]ll |)[Ll]abels?"
regexes: '[Rr]emove (?:[Aa]ll |)[Ll]abels?'
- name: skip kind/bug
content:
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)kind/bug(?:`|)"
regexes: '[Ss]kip (?:[Ll]abels? |)(?:`|)kind/bug(?:`|)'
- name: remove kind/bug
content:
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)kind/bug(?:`|)"
regexes: '[Rr]emove (?:[Ll]abels? |)(?:`|)kind/bug(?:`|)'
- name: skip kind/enhancement
content:
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)kind/enhancement(?:`|)"
regexes: '[Ss]kip (?:[Ll]abels? |)(?:`|)kind/enhancement(?:`|)'
- name: remove kind/enhancement
content:
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)kind/enhancement(?:`|)"
regexes: '[Rr]emove (?:[Ll]abels? |)(?:`|)kind/enhancement(?:`|)'
- name: skip kind/question
content:
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)kind/question(?:`|)"
regexes: '[Ss]kip (?:[Ll]abels? |)(?:`|)kind/question(?:`|)'
- name: remove kind/question
content:
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)kind/question(?:`|)"
regexes: '[Rr]emove (?:[Ll]abels? |)(?:`|)kind/question(?:`|)'
- name: skip area/Connectivity
content:
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)area/Connectivity(?:`|)"
regexes: '[Ss]kip (?:[Ll]abels? |)(?:`|)area/Connectivity(?:`|)'
- name: remove area/Connectivity
content:
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)area/Connectivity(?:`|)"
regexes: '[Rr]emove (?:[Ll]abels? |)(?:`|)area/Connectivity(?:`|)'
- name: skip area/UI/UX
content:
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)area/UI/UX(?:`|)"
regexes: '[Ss]kip (?:[Ll]abels? |)(?:`|)area/UI/UX(?:`|)'
- name: remove area/UI/UX
content:
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)area/UI/UX(?:`|)"
regexes: '[Rr]emove (?:[Ll]abels? |)(?:`|)area/UI/UX(?:`|)'
- name: skip kind/documentation
content:
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)kind/documentation(?:`|)"
regexes: '[Ss]kip (?:[Ll]abels? |)(?:`|)kind/documentation(?:`|)'
- name: remove kind/documentation
content:
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)kind/documentation(?:`|)"
regexes: '[Rr]emove (?:[Ll]abels? |)(?:`|)kind/documentation(?:`|)'
- name: skip client:linux
content:
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)client:linux(?:`|)"
regexes: '[Ss]kip (?:[Ll]abels? |)(?:`|)client:linux(?:`|)'
- name: remove client:linux
content:
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)client:linux(?:`|)"
regexes: '[Rr]emove (?:[Ll]abels? |)(?:`|)client:linux(?:`|)'
- name: skip client:mac
content:
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)client:mac(?:`|)"
regexes: '[Ss]kip (?:[Ll]abels? |)(?:`|)client:mac(?:`|)'
- name: remove client:mac
content:
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)client:mac(?:`|)"
regexes: '[Rr]emove (?:[Ll]abels? |)(?:`|)client:mac(?:`|)'
- name: skip client:win
content:
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)client:win(?:`|)"
regexes: '[Ss]kip (?:[Ll]abels? |)(?:`|)client:win(?:`|)'
- name: remove client:win
content:
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)client:win(?:`|)"
regexes: '[Rr]emove (?:[Ll]abels? |)(?:`|)client:win(?:`|)'
- name: skip sig/Assistant
content:
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)sig/Assistant(?:`|)"
regexes: '[Ss]kip (?:[Ll]abels? |)(?:`|)sig/Assistant(?:`|)'
- name: remove sig/Assistant
content:
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)sig/Assistant(?:`|)"
regexes: '[Rr]emove (?:[Ll]abels? |)(?:`|)sig/Assistant(?:`|)'
- name: skip sig/Data
content:
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)sig/Data(?:`|)"
regexes: '[Ss]kip (?:[Ll]abels? |)(?:`|)sig/Data(?:`|)'
- name: remove sig/Data
content:
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)sig/Data(?:`|)"
regexes: '[Rr]emove (?:[Ll]abels? |)(?:`|)sig/Data(?:`|)'
- name: skip sig/MCP
content:
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)sig/MCP(?:`|)"
regexes: '[Ss]kip (?:[Ll]abels? |)(?:`|)sig/MCP(?:`|)'
- name: remove sig/MCP
content:
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)sig/MCP(?:`|)"
regexes: '[Rr]emove (?:[Ll]abels? |)(?:`|)sig/MCP(?:`|)'
- name: skip sig/RAG
content:
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)sig/RAG(?:`|)"
regexes: '[Ss]kip (?:[Ll]abels? |)(?:`|)sig/RAG(?:`|)'
- name: remove sig/RAG
content:
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)sig/RAG(?:`|)"
regexes: '[Rr]emove (?:[Ll]abels? |)(?:`|)sig/RAG(?:`|)'
- name: skip lgtm
content:
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)lgtm(?:`|)"
regexes: '[Ss]kip (?:[Ll]abels? |)(?:`|)lgtm(?:`|)'
- name: remove lgtm
content:
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)lgtm(?:`|)"
regexes: '[Rr]emove (?:[Ll]abels? |)(?:`|)lgtm(?:`|)'
- name: skip License
content:
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)License(?:`|)"
regexes: '[Ss]kip (?:[Ll]abels? |)(?:`|)License(?:`|)'
- name: remove License
content:
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)License(?:`|)"
regexes: '[Rr]emove (?:[Ll]abels? |)(?:`|)License(?:`|)'
# `Dev Team`
- name: Dev Team
@ -129,7 +129,7 @@ labels:
# Area labels
- name: area/Connectivity
content: area/Connectivity
regexes: "代理|[Pp]roxy"
regexes: '代理|[Pp]roxy'
skip-if:
- skip all
- skip area/Connectivity
@ -139,7 +139,7 @@ labels:
- name: area/UI/UX
content: area/UI/UX
regexes: "界面|[Uu][Ii]|重叠|按钮|图标|组件|渲染|菜单|栏目|头像|主题|样式|[Cc][Ss][Ss]"
regexes: '界面|[Uu][Ii]|重叠|按钮|图标|组件|渲染|菜单|栏目|头像|主题|样式|[Cc][Ss][Ss]'
skip-if:
- skip all
- skip area/UI/UX
@ -150,7 +150,7 @@ labels:
# Kind labels
- name: kind/documentation
content: kind/documentation
regexes: "文档|教程|[Dd]oc(s|umentation)|[Rr]eadme"
regexes: '文档|教程|[Dd]oc(s|umentation)|[Rr]eadme'
skip-if:
- skip all
- skip kind/documentation
@ -161,7 +161,7 @@ labels:
# Client labels
- name: client:linux
content: client:linux
regexes: "(?:[Ll]inux|[Uu]buntu|[Dd]ebian)"
regexes: '(?:[Ll]inux|[Uu]buntu|[Dd]ebian)'
skip-if:
- skip all
- skip client:linux
@ -171,7 +171,7 @@ labels:
- name: client:mac
content: client:mac
regexes: "(?:[Mm]ac|[Mm]acOS|[Oo]SX)"
regexes: '(?:[Mm]ac|[Mm]acOS|[Oo]SX)'
skip-if:
- skip all
- skip client:mac
@ -181,7 +181,7 @@ labels:
- name: client:win
content: client:win
regexes: "(?:[Ww]in|[Ww]indows)"
regexes: '(?:[Ww]in|[Ww]indows)'
skip-if:
- skip all
- skip client:win
@ -192,7 +192,7 @@ labels:
# SIG labels
- name: sig/Assistant
content: sig/Assistant
regexes: "快捷助手|[Aa]ssistant"
regexes: '快捷助手|[Aa]ssistant'
skip-if:
- skip all
- skip sig/Assistant
@ -202,7 +202,7 @@ labels:
- name: sig/Data
content: sig/Data
regexes: "[Ww]ebdav|坚果云|备份|同步|数据|Obsidian|Notion|Joplin|思源"
regexes: '[Ww]ebdav|坚果云|备份|同步|数据|Obsidian|Notion|Joplin|思源'
skip-if:
- skip all
- skip sig/Data
@ -212,7 +212,7 @@ labels:
- name: sig/MCP
content: sig/MCP
regexes: "[Mm][Cc][Pp]"
regexes: '[Mm][Cc][Pp]'
skip-if:
- skip all
- skip sig/MCP
@ -222,7 +222,7 @@ labels:
- name: sig/RAG
content: sig/RAG
regexes: "知识库|[Rr][Aa][Gg]"
regexes: '知识库|[Rr][Aa][Gg]'
skip-if:
- skip all
- skip sig/RAG
@ -233,7 +233,7 @@ labels:
# Other labels
- name: lgtm
content: lgtm
regexes: "(?:[Ll][Gg][Tt][Mm]|[Ll]ooks [Gg]ood [Tt]o [Mm]e)"
regexes: '(?:[Ll][Gg][Tt][Mm]|[Ll]ooks [Gg]ood [Tt]o [Mm]e)'
skip-if:
- skip all
- skip lgtm
@ -243,7 +243,7 @@ labels:
- name: License
content: License
regexes: "(?:[Ll]icense|[Cc]opyright|[Mm][Ii][Tt]|[Aa]pache)"
regexes: '(?:[Ll]icense|[Cc]opyright|[Mm][Ii][Tt]|[Aa]pache)'
skip-if:
- skip all
- skip License

View File

@ -0,0 +1,27 @@
name: Dispatch Docs Update on Release
on:
release:
types: [released]
permissions:
contents: write
jobs:
dispatch-docs-update:
runs-on: ubuntu-latest
steps:
- name: Get Release Tag from Event
id: get-event-tag
shell: bash
run: |
# 从当前 Release 事件中获取 tag_name
echo "tag=${{ github.event.release.tag_name }}" >> $GITHUB_OUTPUT
- name: Dispatch update-download-version workflow to cherry-studio-docs
uses: peter-evans/repository-dispatch@v3
with:
token: ${{ secrets.REPO_DISPATCH_TOKEN }}
repository: CherryHQ/cherry-studio-docs
event-type: update-download-version
client-payload: '{"version": "${{ steps.get-event-tag.outputs.tag }}"}'

View File

@ -1,4 +1,4 @@
name: "Issue Checker"
name: 'Issue Checker'
on:
issues:
@ -19,7 +19,7 @@ jobs:
steps:
- uses: MaaAssistantArknights/issue-checker@v1.14
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
repo-token: '${{ secrets.GITHUB_TOKEN }}'
configuration-path: .github/issue-checker.yml
not-before: 2022-08-05T00:00:00Z
include-title: 1
include-title: 1

View File

@ -1,8 +1,8 @@
name: "Stale Issue Management"
name: 'Stale Issue Management'
on:
schedule:
- cron: "0 0 * * *"
- cron: '0 0 * * *'
workflow_dispatch:
env:
@ -24,18 +24,18 @@ jobs:
uses: actions/stale@v9
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
only-labels: "needs-more-info"
only-labels: 'needs-more-info'
days-before-stale: ${{ env.daysBeforeStale }}
days-before-close: 0 # Close immediately after stale
stale-issue-label: "inactive"
close-issue-label: "closed:no-response"
days-before-close: 0 # Close immediately after stale
stale-issue-label: 'inactive'
close-issue-label: 'closed:no-response'
stale-issue-message: |
This issue has been labeled as needing more information and has been inactive for ${{ env.daysBeforeStale }} days.
It will be closed now due to lack of additional information.
该问题被标记为"需要更多信息"且已经 ${{ env.daysBeforeStale }} 天没有任何活动,将立即关闭。
operations-per-run: 50
exempt-issue-labels: "pending, Dev Team"
exempt-issue-labels: 'pending, Dev Team'
days-before-pr-stale: -1
days-before-pr-close: -1
@ -45,11 +45,11 @@ jobs:
repo-token: ${{ secrets.GITHUB_TOKEN }}
days-before-stale: ${{ env.daysBeforeStale }}
days-before-close: ${{ env.daysBeforeClose }}
stale-issue-label: "inactive"
stale-issue-label: 'inactive'
stale-issue-message: |
This issue has been inactive for a prolonged period and will be closed automatically in ${{ env.daysBeforeClose }} days.
该问题已长时间处于闲置状态,${{ env.daysBeforeClose }} 天后将自动关闭。
exempt-issue-labels: "pending, Dev Team, kind/enhancement"
exempt-issue-labels: 'pending, Dev Team, kind/enhancement'
days-before-pr-stale: -1 # Completely disable stalling for PRs
days-before-pr-close: -1 # Completely disable closing for PRs

View File

@ -77,8 +77,10 @@ jobs:
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
NODE_OPTIONS: --max-old-space-size=8192
MAIN_VITE_MINERU_API_KEY: ${{ vars.MAIN_VITE_MINERU_API_KEY }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
RENDERER_VITE_PPIO_APP_SECRET: ${{ vars.RENDERER_VITE_PPIO_APP_SECRET }}
- name: Build Mac
if: matrix.os == 'macos-latest'
@ -92,9 +94,11 @@ jobs:
APPLE_ID: ${{ vars.APPLE_ID }}
APPLE_APP_SPECIFIC_PASSWORD: ${{ vars.APPLE_APP_SPECIFIC_PASSWORD }}
APPLE_TEAM_ID: ${{ vars.APPLE_TEAM_ID }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NODE_OPTIONS: --max-old-space-size=8192
MAIN_VITE_MINERU_API_KEY: ${{ vars.MAIN_VITE_MINERU_API_KEY }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
RENDERER_VITE_PPIO_APP_SECRET: ${{ vars.RENDERER_VITE_PPIO_APP_SECRET }}
- name: Build Windows
if: matrix.os == 'windows-latest'
@ -103,8 +107,10 @@ jobs:
yarn build:win
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
NODE_OPTIONS: --max-old-space-size=8192
MAIN_VITE_MINERU_API_KEY: ${{ vars.MAIN_VITE_MINERU_API_KEY }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
RENDERER_VITE_PPIO_APP_SECRET: ${{ vars.RENDERER_VITE_PPIO_APP_SECRET }}
- name: Release
uses: ncipollo/release-action@v1
@ -115,38 +121,3 @@ jobs:
tag: ${{ steps.get-tag.outputs.tag }}
artifacts: 'dist/*.exe,dist/*.zip,dist/*.dmg,dist/*.AppImage,dist/*.snap,dist/*.deb,dist/*.rpm,dist/*.tar.gz,dist/latest*.yml,dist/rc*.yml,dist/*.blockmap'
token: ${{ secrets.GITHUB_TOKEN }}
dispatch-docs-update:
needs: release
if: success() && github.repository == 'CherryHQ/cherry-studio' # 确保所有构建成功且在主仓库中运行
runs-on: ubuntu-latest
steps:
- name: Get release tag
id: get-tag
shell: bash
run: |
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
echo "tag=${{ github.event.inputs.tag }}" >> $GITHUB_OUTPUT
else
echo "tag=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
fi
- name: Check if tag is pre-release
id: check-tag
shell: bash
run: |
TAG="${{ steps.get-tag.outputs.tag }}"
if [[ "$TAG" == *"rc"* || "$TAG" == *"pre-release"* ]]; then
echo "is_pre_release=true" >> $GITHUB_OUTPUT
else
echo "is_pre_release=false" >> $GITHUB_OUTPUT
fi
- name: Dispatch update-download-version workflow to cherry-studio-docs
if: steps.check-tag.outputs.is_pre_release == 'false'
uses: peter-evans/repository-dispatch@v3
with:
token: ${{ secrets.REPO_DISPATCH_TOKEN }}
repository: CherryHQ/cherry-studio-docs
event-type: update-download-version
client-payload: '{"version": "${{ steps.get-tag.outputs.tag }}"}'

4
.gitignore vendored
View File

@ -46,6 +46,10 @@ local
.aider*
.cursorrules
.cursor/*
.claude/*
.gemini/*
.trae/*
.claude-code-router/*
# vitest
coverage

View File

@ -1,3 +1,3 @@
{
"recommendations": ["dbaeumer.vscode-eslint"]
"recommendations": ["dbaeumer.vscode-eslint", "esbenp.prettier-vscode", "editorconfig.editorconfig"]
}

4
.vscode/launch.json vendored
View File

@ -10,7 +10,7 @@
"windows": {
"runtimeExecutable": "${workspaceRoot}/node_modules/.bin/electron-vite.cmd"
},
"runtimeArgs": ["--sourcemap"],
"runtimeArgs": ["--inspect", "--sourcemap"],
"env": {
"REMOTE_DEBUGGING_PORT": "9222"
}
@ -21,7 +21,7 @@
"request": "attach",
"type": "chrome",
"webRoot": "${workspaceFolder}/src/renderer",
"timeout": 60000,
"timeout": 3000000,
"presentation": {
"hidden": true
}

View File

@ -1,8 +1,10 @@
{
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.fixAll.eslint": "explicit"
"source.fixAll.eslint": "explicit",
"source.organizeImports": "never"
},
"files.eol": "\n",
"search.exclude": {
"**/dist/**": true,
".yarn/releases/**": true

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,69 @@
diff --git a/es/dropdown/dropdown.js b/es/dropdown/dropdown.js
index 986877a762b9ad0aca596a8552732cd12d2eaabb..1f18aa2ea745e68950e4cee16d4d655f5c835fd5 100644
--- a/es/dropdown/dropdown.js
+++ b/es/dropdown/dropdown.js
@@ -2,7 +2,7 @@
import * as React from 'react';
import LeftOutlined from "@ant-design/icons/es/icons/LeftOutlined";
-import RightOutlined from "@ant-design/icons/es/icons/RightOutlined";
+import { ChevronRight } from 'lucide-react';
import classNames from 'classnames';
import RcDropdown from 'rc-dropdown';
import useEvent from "rc-util/es/hooks/useEvent";
@@ -158,8 +158,10 @@ const Dropdown = props => {
className: `${prefixCls}-menu-submenu-arrow`
}, direction === 'rtl' ? (/*#__PURE__*/React.createElement(LeftOutlined, {
className: `${prefixCls}-menu-submenu-arrow-icon`
- })) : (/*#__PURE__*/React.createElement(RightOutlined, {
- className: `${prefixCls}-menu-submenu-arrow-icon`
+ })) : (/*#__PURE__*/React.createElement(ChevronRight, {
+ size: 16,
+ strokeWidth: 1.8,
+ className: `${prefixCls}-menu-submenu-arrow-icon lucide-custom`
}))),
mode: "vertical",
selectable: false,
diff --git a/es/dropdown/style/index.js b/es/dropdown/style/index.js
index 768c01783002c6901c85a73061ff6b3e776a60ce..39b1b95a56cdc9fb586a193c3adad5141f5cf213 100644
--- a/es/dropdown/style/index.js
+++ b/es/dropdown/style/index.js
@@ -240,7 +240,8 @@ const genBaseStyle = token => {
marginInlineEnd: '0 !important',
color: token.colorTextDescription,
fontSize: fontSizeIcon,
- fontStyle: 'normal'
+ fontStyle: 'normal',
+ marginTop: 3,
}
}
}),
diff --git a/es/select/useIcons.js b/es/select/useIcons.js
index 959115be936ef8901548af2658c5dcfdc5852723..c812edd52123eb0faf4638b1154fcfa1b05b513b 100644
--- a/es/select/useIcons.js
+++ b/es/select/useIcons.js
@@ -4,10 +4,10 @@ import * as React from 'react';
import CheckOutlined from "@ant-design/icons/es/icons/CheckOutlined";
import CloseCircleFilled from "@ant-design/icons/es/icons/CloseCircleFilled";
import CloseOutlined from "@ant-design/icons/es/icons/CloseOutlined";
-import DownOutlined from "@ant-design/icons/es/icons/DownOutlined";
import LoadingOutlined from "@ant-design/icons/es/icons/LoadingOutlined";
import SearchOutlined from "@ant-design/icons/es/icons/SearchOutlined";
import { devUseWarning } from '../_util/warning';
+import { ChevronDown } from 'lucide-react';
export default function useIcons(_ref) {
let {
suffixIcon,
@@ -56,8 +56,10 @@ export default function useIcons(_ref) {
className: iconCls
}));
}
- return getSuffixIconNode(/*#__PURE__*/React.createElement(DownOutlined, {
- className: iconCls
+ return getSuffixIconNode(/*#__PURE__*/React.createElement(ChevronDown, {
+ size: 16,
+ strokeWidth: 1.8,
+ className: `${iconCls} lucide-custom`
}));
};
}

View File

@ -1,4 +1,4 @@
[中文](./docs/CONTRIBUTING.zh.md) | [English](./CONTRIBUTING.md)
[中文](docs/CONTRIBUTING.zh.md) | [English](CONTRIBUTING.md)
# Cherry Studio Contributor Guide
@ -58,6 +58,10 @@ git commit --signoff -m "Your commit message"
Maintainers are here to help you implement your use case within a reasonable timeframe. They will do their best to review your code and provide constructive feedback promptly. However, if you get stuck during the review process or feel your Pull Request is not receiving the attention it deserves, please contact us via comments in the Issue or through the [Community](README.md#-community).
### Participating in the Test Plan
The Test Plan aims to provide users with a more stable application experience and faster iteration speed. For details, please refer to the [Test Plan](docs/testplan-en.md).
### Other Suggestions
- **Contact Developers**: Before submitting a PR, you can contact the developers first to discuss or get help.

137
README.md
View File

@ -33,32 +33,22 @@
<img src="https://github.com/CherryHQ/cherry-studio/blob/main/build/icon.png?raw=true" width="150" height="150" alt="banner" /><br>
</a>
</h1>
<p align="center">English | <a href="./docs/README.zh.md">中文</a> | <a href="./docs/README.ja.md">日本語</a> | <a href="https://cherry-ai.com">Official Site</a> | <a href="https://docs.cherry-ai.com/cherry-studio-wen-dang/en-us">Documents</a> | <a href="./docs/dev.md">Development</a> | <a href="https://github.com/CherryHQ/cherry-studio/issues">Feedback</a><br></p>
<!-- 题头徽章组合 -->
<p align="center">English | <a href="./docs/README.zh.md">中文</a> | <a href="https://cherry-ai.com">Official Site</a> | <a href="https://docs.cherry-ai.com/cherry-studio-wen-dang/en-us">Documents</a> | <a href="./docs/dev.md">Development</a> | <a href="https://github.com/CherryHQ/cherry-studio/issues">Feedback</a><br></p>
<div align="center">
[![][deepwiki-shield]][deepwiki-link]
[![][twitter-shield]][twitter-link]
[![][discord-shield]][discord-link]
[![][telegram-shield]][telegram-link]
</div>
<!-- 项目统计徽章 -->
<div align="center">
[![][github-stars-shield]][github-stars-link]
[![][github-forks-shield]][github-forks-link]
[![][github-release-shield]][github-release-link]
[![][github-nightly-shield]][github-nightly-link]
[![][github-contributors-shield]][github-contributors-link]
</div>
<div align="center">
[![][license-shield]][license-link]
[![][commercial-shield]][commercial-link]
[![][sponsor-shield]][sponsor-link]
@ -66,9 +56,9 @@
</div>
<div align="center">
<a href="https://hellogithub.com/repository/1605492e1e2a4df3be07abfa4578dd37" target="_blank"><img src="https://api.hellogithub.com/v1/widgets/recommend.svg?rid=1605492e1e2a4df3be07abfa4578dd37" alt="FeaturedHelloGitHub" style="width: 200px; height: 43px;" width="200" height="43" /></a>
<a href="https://trendshift.io/repositories/11772" target="_blank"><img src="https://trendshift.io/api/badge/repositories/11772" alt="kangfenmao%2Fcherry-studio | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
<a href="https://www.producthunt.com/posts/cherry-studio?embed=true&utm_source=badge-featured&utm_medium=badge&utm_souce=badge-cherry&#0045;studio" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/featured.svg?post_id=496640&theme=light" alt="Cherry&#0032;Studio - AI&#0032;Chatbots&#0044;&#0032;AI&#0032;Desktop&#0032;Client | Product Hunt" style="width: 200px; height: 43px;" width="200" height="43" /></a>
<a href="https://hellogithub.com/repository/1605492e1e2a4df3be07abfa4578dd37" target="_blank" style="text-decoration: none"><img src="https://api.hellogithub.com/v1/widgets/recommend.svg?rid=1605492e1e2a4df3be07abfa4578dd37" alt="FeaturedHelloGitHub" width="220" height="55" /></a>
<a href="https://trendshift.io/repositories/11772" target="_blank" style="text-decoration: none"><img src="https://trendshift.io/api/badge/repositories/11772" alt="kangfenmao%2Fcherry-studio | Trendshift" width="220" height="55" /></a>
<a href="https://www.producthunt.com/posts/cherry-studio?embed=true&utm_source=badge-featured&utm_medium=badge&utm_souce=badge-cherry&#0045;studio" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/featured.svg?post_id=496640&theme=light" alt="Cherry&#0032;Studio - AI&#0032;Chatbots&#0044;&#0032;AI&#0032;Desktop&#0032;Client | Product Hunt" width="220" height="55" /></a>
</div>
# 🍒 Cherry Studio
@ -193,10 +183,82 @@ Refer to the [Branching Strategy](docs/branching-strategy-en.md) for contributio
3. **Submit Changes**: Commit and push your changes.
4. **Open a Pull Request**: Describe your changes and reasons.
For more detailed guidelines, please refer to our [Contributing Guide](./CONTRIBUTING.md).
For more detailed guidelines, please refer to our [Contributing Guide](CONTRIBUTING.md).
Thank you for your support and contributions!
# 🔧 Developer Co-creation Program
We are launching the Cherry Studio Developer Co-creation Program to foster a healthy and positive-feedback loop within the open-source ecosystem. We believe that great software is built collaboratively, and every merged pull request breathes new life into the project.
We sincerely invite you to join our ranks of contributors and shape the future of Cherry Studio with us.
## Contributor Rewards Program
To give back to our core contributors and create a virtuous cycle, we have established the following long-term incentive plan.
**The inaugural tracking period for this program will be Q3 2025 (July, August, September). Rewards for this cycle will be distributed on October 1st.**
Within any tracking period (e.g., July 1st to September 30th for the first cycle), any developer who contributes more than **30 meaningful commits** to any of Cherry Studio's open-source projects on GitHub is eligible for the following benefits:
- **Cursor Subscription Sponsorship**: Receive a **$70 USD** credit or reimbursement for your [Cursor](https://cursor.sh/) subscription, making AI your most efficient coding partner.
- **Unlimited Model Access**: Get **unlimited** API calls for the **DeepSeek** and **Qwen** models.
- **Cutting-Edge Tech Access**: Enjoy occasional perks, including API access to models like **Claude**, **Gemini**, and **OpenAI**, keeping you at the forefront of technology.
## Growing Together & Future Plans
A vibrant community is the driving force behind any sustainable open-source project. As Cherry Studio grows, so will our rewards program. We are committed to continuously aligning our benefits with the best-in-class tools and resources in the industry. This ensures our core contributors receive meaningful support, creating a positive cycle where developers, the community, and the project grow together.
**Moving forward, the project will also embrace an increasingly open stance to give back to the entire open-source community.**
## How to Get Started?
We look forward to your first Pull Request!
You can start by exploring our repositories, picking up a `good first issue`, or proposing your own enhancements. Every commit is a testament to the spirit of open source.
Thank you for your interest and contributions.
Let's build together.
# 🏢 Enterprise Edition
Building on the Community Edition, we are proud to introduce **Cherry Studio Enterprise Edition**—a privately deployable AI productivity and management platform designed for modern teams and enterprises.
The Enterprise Edition addresses core challenges in team collaboration by centralizing the management of AI resources, knowledge, and data. It empowers organizations to enhance efficiency, foster innovation, and ensure compliance, all while maintaining 100% control over their data in a secure environment.
## Core Advantages
- **Unified Model Management**: Centrally integrate and manage various cloud-based LLMs (e.g., OpenAI, Anthropic, Google Gemini) and locally deployed private models. Employees can use them out-of-the-box without individual configuration.
- **Enterprise-Grade Knowledge Base**: Build, manage, and share team-wide knowledge bases. Ensure knowledge is retained and consistent, enabling team members to interact with AI based on unified and accurate information.
- **Fine-Grained Access Control**: Easily manage employee accounts and assign role-based permissions for different models, knowledge bases, and features through a unified admin backend.
- **Fully Private Deployment**: Deploy the entire backend service on your on-premises servers or private cloud, ensuring your data remains 100% private and under your control to meet the strictest security and compliance standards.
- **Reliable Backend Services**: Provides stable API services, enterprise-grade data backup and recovery mechanisms to ensure business continuity.
## ✨ Online Demo
> 🚧 **Public Beta Notice**
>
> The Enterprise Edition is currently in its early public beta stage, and we are actively iterating and optimizing its features. We are aware that it may not be perfectly stable yet. If you encounter any issues or have valuable suggestions during your trial, we would be very grateful if you could contact us via email to provide feedback.
**🔗 [Cherry Studio Enterprise](https://www.cherry-ai.com/enterprise)**
## Version Comparison
| Feature | Community Edition | Enterprise Edition |
| :---------------- | :----------------------------------------- | :-------------------------------------------------------------------------------------------------------------------------------------- |
| **Open Source** | ✅ Yes | ⭕️ part. released to cust. |
| **Cost** | Free for Personal Use / Commercial License | Buyout / Subscription Fee |
| **Admin Backend** | — | ● Centralized **Model** Access<br>**Employee** Management<br>● Shared **Knowledge Base**<br>**Access** Control<br>**Data** Backup |
| **Server** | — | ✅ Dedicated Private Deployment |
## Get the Enterprise Edition
We believe the Enterprise Edition will become your team's AI productivity engine. If you are interested in Cherry Studio Enterprise Edition and would like to learn more, request a quote, or schedule a demo, please contact us.
- **For Business Inquiries & Purchasing**:
**📧 [bd@cherry-ai.com](mailto:bd@cherry-ai.com)**
# 🔗 Related Projects
- [one-api](https://github.com/songquanpeng/one-api):LLM API management and distribution system, supporting mainstream models like OpenAI, Azure, and Anthropic. Features unified API interface, suitable for key management and secondary distribution.
@ -210,34 +272,45 @@ Thank you for your support and contributions!
</a>
<br /><br />
# 📊 GitHub Stats
![Stats](https://repobeats.axiom.co/api/embed/a693f2e5f773eed620f70031e974552156c7f397.svg 'Repobeats analytics image')
# ⭐️ Star History
[![Star History Chart](https://api.star-history.com/svg?repos=CherryHQ/cherry-studio&type=Timeline)](https://star-history.com/#CherryHQ/cherry-studio&Timeline)
<a href="https://www.star-history.com/#CherryHQ/cherry-studio&Date">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=CherryHQ/cherry-studio&type=Date&theme=dark" />
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=CherryHQ/cherry-studio&type=Date" />
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=CherryHQ/cherry-studio&type=Date" />
</picture>
</a>
<!-- Links & Images -->
[deepwiki-shield]: https://img.shields.io/badge/Deepwiki-CherryHQ-0088CC?style=plastic
[deepwiki-shield]: https://img.shields.io/badge/Deepwiki-CherryHQ-0088CC?logo=data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAyNy45MyAzMiI+PHBhdGggZD0iTTE5LjMzIDE0LjEyYy42Ny0uMzkgMS41LS4zOSAyLjE4IDBsMS43NCAxYy4wNi4wMy4xMS4wNi4xOC4wN2guMDRjLjA2LjAzLjEyLjAzLjE4LjAzaC4wMmMuMDYgMCAuMTEgMCAuMTctLjAyaC4wM2MuMDYtLjAyLjEyLS4wNS4xNy0uMDhoLjAybDMuNDgtMi4wMWMuMjUtLjE0LjQtLjQxLjQtLjdWOC40YS44MS44MSAwIDAgMC0uNC0uN2wtMy40OC0yLjAxYS44My44MyAwIDAgMC0uODEgMEwxOS43NyA3LjdoLS4wMWwtLjE1LjEyLS4wMi4wMnMtLjA3LjA5LS4xLjE0VjhhLjQuNCAwIDAgMC0uMDguMTd2LjA0Yy0uMDMuMDYtLjAzLjEyLS4wMy4xOXYyLjAxYzAgLjc4LS40MSAxLjQ5LTEuMDkgMS44OC0uNjcuMzktMS41LjM5LTIuMTggMGwtMS43NC0xYS42LjYgMCAwIDAtLjIxLS4wOGMtLjA2LS4wMS0uMTItLjAyLS4xOC0uMDJoLS4wM2MtLjA2IDAtLjExLjAxLS4xNy4wMmgtLjAzYy0uMDYuMDItLjEyLjA0LS4xNy4wN2gtLjAybC0zLjQ3IDIuMDFjLS4yNS4xNC0uNC40MS0uNC43VjE4YzAgLjI5LjE1LjU1LjQuN2wzLjQ4IDIuMDFoLjAyYy4wNi4wNC4xMS4wNi4xNy4wOGguMDNjLjA1LjAyLjExLjAzLjE3LjAzaC4wMmMuMDYgMCAuMTIgMCAuMTgtLjAyaC4wNGMuMDYtLjAzLjEyLS4wNS4xOC0uMDhsMS43NC0xYy42Ny0uMzkgMS41LS4zOSAyLjE3IDBzMS4wOSAxLjExIDEuMDkgMS44OHYyLjAxYzAgLjA3IDAgLjEzLjAyLjE5di4wNGMuMDMuMDYuMDUuMTIuMDguMTd2LjAycy4wOC4wOS4xMi4xM2wuMDIuMDJzLjA5LjA4LjE1LjExYzAgMCAuMDEgMCAuMDEuMDFsMy40OCAyLjAxYy4yNS4xNC41Ni4xNC44MSAwbDMuNDgtMi4wMWMuMjUtLjE0LjQtLjQxLjQtLjd2LTQuMDFhLjgxLjgxIDAgMCAwLS40LS43bC0zLjQ4LTIuMDFoLS4wMmMtLjA1LS4wNC0uMTEtLjA2LS4xNy0uMDhoLS4wM2EuNS41IDAgMCAwLS4xNy0uMDNoLS4wM2MtLjA2IDAtLjEyIDAtLjE4LjAyLS4wNy4wMi0uMTUuMDUtLjIxLjA4bC0xLjc0IDFjLS42Ny4zOS0xLjUuMzktMi4xNyAwYTIuMTkgMi4xOSAwIDAgMS0xLjA5LTEuODhjMC0uNzguNDItMS40OSAxLjA5LTEuODhaIiBzdHlsZT0iZmlsbDojNWRiZjlkIi8+PHBhdGggZD0ibS40IDEzLjExIDMuNDcgMi4wMWMuMjUuMTQuNTYuMTQuOCAwbDMuNDctMi4wMWguMDFsLjE1LS4xMi4wMi0uMDJzLjA3LS4wOS4xLS4xNGwuMDItLjAyYy4wMy0uMDUuMDUtLjExLjA3LS4xN3YtLjA0Yy4wMy0uMDYuMDMtLjEyLjAzLS4xOVYxMC40YzAtLjc4LjQyLTEuNDkgMS4wOS0xLjg4czEuNS0uMzkgMi4xOCAwbDEuNzQgMWMuMDcuMDQuMTQuMDcuMjEuMDguMDYuMDEuMTIuMDIuMTguMDJoLjAzYy4wNiAwIC4xMS0uMDEuMTctLjAyaC4wM2MuMDYtLjAyLjEyLS4wNC4xNy0uMDdoLjAybDMuNDctMi4wMmMuMjUtLjE0LjQtLjQxLjQtLjd2LTRhLjgxLjgxIDAgMCAwLS40LS43bC0zLjQ2LTJhLjgzLjgzIDAgMCAwLS44MSAwbC0zLjQ4IDIuMDFoLS4wMWwtLjE1LjEyLS4wMi4wMi0uMS4xMy0uMDIuMDJjLS4wMy4wNS0uMDUuMTEtLjA3LjE3di4wNGMtLjAzLjA2LS4wMy4xMi0uMDMuMTl2Mi4wMWMwIC43OC0uNDIgMS40OS0xLjA5IDEuODhzLTEuNS4zOS0yLjE4IDBsLTEuNzQtMWEuNi42IDAgMCAwLS4yMS0uMDhjLS4wNi0uMDEtLjEyLS4wMi0uMTgtLjAyaC0uMDNjLS4wNiAwLS4xMS4wMS0uMTcuMDJoLS4wM2MtLjA2LjAyLS4xMi4wNS0uMTcuMDhoLS4wMkwuNCA3LjcxYy0uMjUuMTQtLjQuNDEtLjQuNjl2NC4wMWMwIC4yOS4xNS41Ni40LjciIHN0eWxlPSJmaWxsOiM0NDY4YzQiLz48cGF0aCBkPSJtMTcuODQgMjQuNDgtMy40OC0yLjAxaC0uMDJjLS4wNS0uMDQtLjExLS4wNi0uMTctLjA4aC0uMDNhLjUuNSAwIDAgMC0uMTctLjAzaC0uMDNjLS4wNiAwLS4xMiAwLS4xOC4wMmgtLjA0Yy0uMDYuMDMtLjEyLjA1LS4xOC4wOGwtMS43NCAxYy0uNjcuMzktMS41LjM5LTIuMTggMGEyLjE5IDIuMTkgMCAwIDEtMS4wOS0xLjg4di0yLjAxYzAtLjA2IDAtLjEzLS4wMi0uMTl2LS4wNGMtLjAzLS4wNi0uMDUtLjExLS4wOC0uMTdsLS4wMi0uMDJzLS4wNi0uMDktLjEtLjEzTDguMjkgMTlzLS4wOS0uMDgtLjE1LS4xMWgtLjAxbC0zLjQ3LTIuMDJhLjgzLjgzIDAgMCAwLS44MSAwTC4zNyAxOC44OGEuODcuODcgMCAwIDAtLjM3LjcxdjQuMDFjMCAuMjkuMTUuNTUuNC43bDMuNDcgMi4wMWguMDJjLjA1LjA0LjExLjA2LjE3LjA4aC4wM2MuMDUuMDIuMTEuMDMuMTYuMDNoLjAzYy4wNiAwIC4xMiAwIC4xOC0uMDJoLjA0Yy4wNi0uMDMuMTItLjA1LjE4LS4wOGwxLjc0LTFjLjY3LS4zOSAxLjUtLjM5IDIuMTcgMHMxLjA5IDEuMTEgMS4wOSAxLjg4djIuMDFjMCAuMDcgMCAuMTMuMDIuMTl2LjA0Yy4wMy4wNi4wNS4xMS4wOC4xN2wuMDIuMDJzLjA2LjA5LjEuMTRsLjAyLjAycy4wOS4wOC4xNS4xMWguMDFsMy40OCAyLjAyYy4yNS4xNC41Ni4xNC44MSAwbDMuNDgtMi4wMWMuMjUtLjE0LjQtLjQxLjQtLjdWMjUuMmEuODEuODEgMCAwIDAtLjQtLjdaIiBzdHlsZT0iZmlsbDojNDI5M2Q5Ii8+PC9zdmc+
[deepwiki-link]: https://deepwiki.com/CherryHQ/cherry-studio
[twitter-shield]: https://img.shields.io/badge/Twitter-CherryStudioApp-0088CC?style=plastic&logo=x
[twitter-shield]: https://img.shields.io/badge/Twitter-CherryStudioApp-0088CC?logo=x
[twitter-link]: https://twitter.com/CherryStudioHQ
[discord-shield]: https://img.shields.io/badge/Discord-@CherryStudio-0088CC?style=plastic&logo=discord
[discord-shield]: https://img.shields.io/badge/Discord-@CherryStudio-0088CC?logo=discord
[discord-link]: https://discord.gg/wez8HtpxqQ
[telegram-shield]: https://img.shields.io/badge/Telegram-@CherryStudioAI-0088CC?style=plastic&logo=telegram
[telegram-shield]: https://img.shields.io/badge/Telegram-@CherryStudioAI-0088CC?logo=telegram
[telegram-link]: https://t.me/CherryStudioAI
<!-- Links & Images -->
[github-stars-shield]: https://img.shields.io/github/stars/CherryHQ/cherry-studio?style=social
[github-stars-link]: https://github.com/CherryHQ/cherry-studio/stargazers
[github-forks-shield]: https://img.shields.io/github/forks/CherryHQ/cherry-studio?style=social
[github-forks-link]: https://github.com/CherryHQ/cherry-studio/network
[github-release-shield]: https://img.shields.io/github/v/release/CherryHQ/cherry-studio
[github-release-shield]: https://img.shields.io/github/v/release/CherryHQ/cherry-studio?logo=github
[github-release-link]: https://github.com/CherryHQ/cherry-studio/releases
[github-contributors-shield]: https://img.shields.io/github/contributors/CherryHQ/cherry-studio
[github-nightly-shield]: https://img.shields.io/github/actions/workflow/status/CherryHQ/cherry-studio/nightly-build.yml?label=nightly%20build&logo=github
[github-nightly-link]: https://github.com/CherryHQ/cherry-studio/actions/workflows/nightly-build.yml
[github-contributors-shield]: https://img.shields.io/github/contributors/CherryHQ/cherry-studio?logo=github
[github-contributors-link]: https://github.com/CherryHQ/cherry-studio/graphs/contributors
<!-- Links & Images -->
[license-shield]: https://img.shields.io/badge/License-AGPLv3-important.svg?style=plastic&logo=gnu
[license-shield]: https://img.shields.io/badge/License-AGPLv3-important.svg?logo=gnu
[license-link]: https://www.gnu.org/licenses/agpl-3.0
[commercial-shield]: https://img.shields.io/badge/License-Contact-white.svg?style=plastic&logoColor=white&logo=telegram&color=blue
[commercial-shield]: https://img.shields.io/badge/License-Contact-white.svg?logoColor=white&logo=telegram&color=blue
[commercial-link]: mailto:license@cherry-ai.com?subject=Commercial%20License%20Inquiry
[sponsor-shield]: https://img.shields.io/badge/Sponsor-FF6699.svg?style=plastic&logo=githubsponsors&logoColor=white
[sponsor-shield]: https://img.shields.io/badge/Sponsor-FF6699.svg?logo=githubsponsors&logoColor=white
[sponsor-link]: https://github.com/CherryHQ/cherry-studio/blob/main/docs/sponsor.md

64
SECURITY.md Normal file
View File

@ -0,0 +1,64 @@
# Security Policy
## 📢 Reporting a Vulnerability
At Cherry Studio, we take security seriously and appreciate your efforts to responsibly disclose vulnerabilities. If you discover a security issue, please report it as soon as possible.
**Please do not create public issues for security-related reports.**
- To report a security issue, please use the GitHub Security Advisories tab to "[Open a draft security advisory](https://github.com/CherryHQ/cherry-studio/security/advisories/new)".
- Include a detailed description of the issue, steps to reproduce, potential impact, and any possible mitigations.
- If applicable, please also attach proof-of-concept code or screenshots.
We will acknowledge your report within **72 hours** and provide a status update as we investigate.
---
## 🔒 Supported Versions
We aim to support the latest released version and one previous minor release.
| Version | Supported |
|-----------------|--------------------|
| Latest (`main`) | ✅ Supported |
| Previous minor | ✅ Supported |
| Older versions | ❌ Not supported |
If you are using an unsupported version, we strongly recommend updating to the latest release to receive security fixes.
---
## 💡 Security Measures
Cherry Studio integrates several security best practices, including:
- Strict dependency updates and regular vulnerability scanning.
- TypeScript strict mode and linting to reduce potential injection or runtime issues.
- Enforced code formatting and pre-commit hooks.
- Internal security reviews before releases.
- Dedicated MCP (Model Context Protocol) safeguards for model interactions and data privacy.
---
## 🛡️ Disclosure Policy
- We follow a **coordinated disclosure** approach.
- We will not publicly disclose vulnerabilities until a fix has been developed and released.
- Credit will be given to researchers who responsibly disclose vulnerabilities, if requested.
---
## 🤝 Acknowledgements
We greatly appreciate contributions from the security community and strive to recognize all researchers who help keep Cherry Studio safe.
---
## 🌟 Questions?
For any security-related questions not involving vulnerabilities, please reach out to:
**security@cherry-ai.com**
---
Thank you for helping keep Cherry Studio and its users secure!

View File

@ -1,6 +1,6 @@
# Cherry Studio 贡献者指南
[**English**](../CONTRIBUTING.md) | [**中文**](./CONTRIBUTING.zh.md)
[**English**](../CONTRIBUTING.md) | [**中文**](CONTRIBUTING.zh.md)
欢迎来到 Cherry Studio 的贡献者社区!我们致力于将 Cherry Studio 打造成一个长期提供价值的项目,并希望邀请更多的开发者加入我们的行列。无论您是经验丰富的开发者还是刚刚起步的初学者,您的贡献都将帮助我们更好地服务用户,提升软件质量。
@ -24,7 +24,7 @@
## 开始之前
请确保阅读了[行为准则](CODE_OF_CONDUCT.md)和[LICENSE](LICENSE)。
请确保阅读了[行为准则](../CODE_OF_CONDUCT.md)和[LICENSE](../LICENSE)。
## 开始贡献
@ -32,7 +32,7 @@
### 测试
未经测试的功能等同于不存在。为确保代码真正有效,应通过单元测试和功能测试覆盖相关流程。因此,在考虑贡献时,也请考虑可测试性。所有测试均可本地运行,无需依赖 CI。请参阅[开发者指南](docs/dev.md#test)中的“Test”部分。
未经测试的功能等同于不存在。为确保代码真正有效,应通过单元测试和功能测试覆盖相关流程。因此,在考虑贡献时,也请考虑可测试性。所有测试均可本地运行,无需依赖 CI。请参阅[开发者指南](dev.md#test)中的“Test”部分。
### 拉取请求的自动化测试
@ -60,7 +60,11 @@ git commit --signoff -m "Your commit message"
### 获取代码审查/合并
维护者在此帮助您在合理时间内实现您的用例。他们会尽力在合理时间内审查您的代码并提供建设性反馈。但如果您在审查过程中受阻,或认为您的 Pull Request 未得到应有的关注,请通过 Issue 中的评论或者[社群](README.md#-community)联系我们
维护者在此帮助您在合理时间内实现您的用例。他们会尽力在合理时间内审查您的代码并提供建设性反馈。但如果您在审查过程中受阻,或认为您的 Pull Request 未得到应有的关注,请通过 Issue 中的评论或者[社群](README.zh.md#-community)联系我们
### 参与测试计划
测试计划旨在为用户提供更稳定的应用体验和更快的迭代速度,详细情况请参阅[测试计划](testplan-zh.md)。
### 其他建议

View File

@ -1,215 +0,0 @@
<h1 align="center">
<a href="https://github.com/CherryHQ/cherry-studio/releases">
<img src="https://github.com/CherryHQ/cherry-studio/blob/main/build/icon.png?raw=true" width="150" height="150" alt="banner" /><br>
</a>
</h1>
<p align="center">
<a href="https://github.com/CherryHQ/cherry-studio">English</a> | <a href="./README.zh.md">中文</a> | 日本語 | <a href="https://cherry-ai.com">公式サイト</a> | <a href="https://docs.cherry-ai.com/cherry-studio-wen-dang/ja">ドキュメント</a> | <a href="./dev.md">開発</a> | <a href="https://github.com/CherryHQ/cherry-studio/issues">フィードバック</a><br>
</p>
<!-- バッジコレクション -->
<div align="center">
[![][deepwiki-shield]][deepwiki-link]
[![][twitter-shield]][twitter-link]
[![][discord-shield]][discord-link]
[![][telegram-shield]][telegram-link]
</div>
<!-- プロジェクト統計 -->
<div align="center">
[![][github-stars-shield]][github-stars-link]
[![][github-forks-shield]][github-forks-link]
[![][github-release-shield]][github-release-link]
[![][github-contributors-shield]][github-contributors-link]
</div>
<div align="center">
[![][license-shield]][license-link]
[![][commercial-shield]][commercial-link]
[![][sponsor-shield]][sponsor-link]
</div>
<div align="center">
<a href="https://hellogithub.com/repository/1605492e1e2a4df3be07abfa4578dd37" target="_blank"><img src="https://api.hellogithub.com/v1/widgets/recommend.svg?rid=1605492e1e2a4df3be07abfa4578dd37" alt="FeaturedHelloGitHub" style="width: 200px; height: 43px;" width="200" height="43" /></a>
<a href="https://trendshift.io/repositories/11772" target="_blank"><img src="https://trendshift.io/api/badge/repositories/11772" alt="kangfenmao%2Fcherry-studio | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
<a href="https://www.producthunt.com/posts/cherry-studio?embed=true&utm_source=badge-featured&utm_medium=badge&utm_souce=badge-cherry&#0045;studio" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/featured.svg?post_id=496640&theme=light" alt="Cherry&#0032;Studio - AI&#0032;Chatbots&#0044;&#0032;AI&#0032;Desktop&#0032;Client | Product Hunt" style="width: 200px; height: 43px;" width="200" height="43" /></a>
</div>
# 🍒 Cherry Studio
Cherry Studio は、複数の LLM プロバイダーをサポートするデスクトップクライアントで、Windows、Mac、Linux で利用可能です。
👏 [Telegram](https://t.me/CherryStudioAI)[Discord](https://discord.gg/wez8HtpxqQ) | [QQグループ(575014769)](https://qm.qq.com/q/lo0D4qVZKi)
❤️ Cherry Studio をお気に入りにしましたか?小さな星をつけてください 🌟 または [スポンサー](sponsor.md) をして開発をサポートしてください!
# 🌠 スクリーンショット
![](https://github.com/user-attachments/assets/36dddb2c-e0fb-4a5f-9411-91447bab6e18)
![](https://github.com/user-attachments/assets/f549e8a0-2385-40b4-b52b-2039e39f2930)
![](https://github.com/user-attachments/assets/58e0237c-4d36-40de-b428-53051d982026)
# 🌟 主な機能
1. **多様な LLM サービス対応**
- ☁️ 主要な LLM クラウドサービス対応OpenAI、Gemini、Anthropic など
- 🔗 AI Web サービス統合Claude、Peplexity、Poe など
- 💻 Ollama、LM Studio によるローカルモデル実行対応
2. **AI アシスタントと対話**
- 📚 300+ の事前設定済み AI アシスタント
- 🤖 カスタム AI アシスタントの作成
- 💬 複数モデルでの同時対話機能
3. **文書とデータ処理**
- 📄 テキスト、画像、Office、PDF など多様な形式対応
- ☁️ WebDAV によるファイル管理とバックアップ
- 📊 Mermaid による図表作成
- 💻 コードハイライト機能
4. **実用的なツール統合**
- 🔍 グローバル検索機能
- 📝 トピック管理システム
- 🔤 AI による翻訳機能
- 🎯 ドラッグ&ドロップによる整理
- 🔌 ミニプログラム対応
- ⚙️ MCPモデルコンテキストプロトコルサービス
5. **優れたユーザー体験**
- 🖥️ Windows、Mac、Linux のクロスプラットフォーム対応
- 📦 環境構築不要ですぐに使用可能
- 🎨 ライト/ダークテーマと透明ウィンドウ対応
- 📝 完全な Markdown レンダリング
- 🤲 簡単な共有機能
# 📝 開発計画
以下の機能と改善に積極的に取り組んでいます:
1. 🎯 **コア機能**
- 選択アシスタント - スマートな内容選択の強化
- ディープリサーチ - 高度な研究能力
- メモリーシステム - グローバルコンテキスト認識
- ドキュメント前処理 - 文書処理の改善
- MCP マーケットプレイス - モデルコンテキストプロトコルエコシステム
2. 🗂 **ナレッジ管理**
- ノートとコレクション
- ダイナミックキャンバス可視化
- OCR 機能
- TTSテキスト読み上げサポート
3. 📱 **プラットフォーム対応**
- HarmonyOS エディション
- Android アプリフェーズ1
- iOS アプリフェーズ1
- マルチウィンドウ対応
- ウィンドウピン留め機能
4. 🔌 **高度な機能**
- プラグインシステム
- ASR音声認識
- アシスタントとトピックの対話機能リファクタリング
[プロジェクトボード](https://github.com/orgs/CherryHQ/projects/7)で進捗を確認し、貢献することができます。
開発計画に影響を与えたいですか?[GitHub ディスカッション](https://github.com/CherryHQ/cherry-studio/discussions)に参加して、アイデアやフィードバックを共有してください!
# 🌈 テーマ
- テーマギャラリーhttps://cherrycss.com
- Aero テーマhttps://github.com/hakadao/CherryStudio-Aero
- PaperMaterial テーマhttps://github.com/rainoffallingstar/CherryStudio-PaperMaterial
- Claude テーマhttps://github.com/bjl101501/CherryStudio-Claudestyle-dynamic
- メープルネオンテーマhttps://github.com/BoningtonChen/CherryStudio_themes
より多くのテーマの PR を歓迎します
# 🤝 貢献
Cherry Studio への貢献を歓迎します!以下の方法で貢献できます:
1. **コードの貢献**:新機能を開発するか、既存のコードを最適化します
2. **バグの修正**:見つけたバグを修正します
3. **問題の管理**GitHub の問題を管理するのを手伝います
4. **製品デザイン**:デザインの議論に参加します
5. **ドキュメントの作成**:ユーザーマニュアルやガイドを改善します
6. **コミュニティの参加**:ディスカッションに参加し、ユーザーを支援します
7. **使用の促進**Cherry Studio を広めます
[ブランチ戦略](branching-strategy-en.md)を参照して貢献ガイドラインを確認してください
## 始め方
1. **リポジトリをフォーク**:フォークしてローカルマシンにクローンします
2. **ブランチを作成**:変更のためのブランチを作成します
3. **変更を提出**:変更をコミットしてプッシュします
4. **プルリクエストを開く**:変更内容と理由を説明します
詳細なガイドラインについては、[貢献ガイド](../CONTRIBUTING.md)をご覧ください。
ご支援と貢献に感謝します!
# 🔗 関連プロジェクト
- [one-api](https://github.com/songquanpeng/one-api)LLM API の管理・配信システム。OpenAI、Azure、Anthropic などの主要モデルに対応し、統一 API インターフェースを提供。API キー管理と再配布に利用可能。
- [ublacklist](https://github.com/iorate/ublacklist)Google 検索結果から特定のサイトを非表示にします
# 🚀 コントリビューター
<a href="https://github.com/CherryHQ/cherry-studio/graphs/contributors">
<img src="https://contrib.rocks/image?repo=CherryHQ/cherry-studio" />
</a>
<br /><br />
# ⭐️ スター履歴
[![Star History Chart](https://api.star-history.com/svg?repos=CherryHQ/cherry-studio&type=Timeline)](https://star-history.com/#CherryHQ/cherry-studio&Timeline)
<!-- リンクと画像 -->
[deepwiki-shield]: https://img.shields.io/badge/Deepwiki-CherryHQ-0088CC?style=plastic
[deepwiki-link]: https://deepwiki.com/CherryHQ/cherry-studio
[twitter-shield]: https://img.shields.io/badge/Twitter-CherryStudioApp-0088CC?style=plastic&logo=x
[twitter-link]: https://twitter.com/CherryStudioHQ
[discord-shield]: https://img.shields.io/badge/Discord-@CherryStudio-0088CC?style=plastic&logo=discord
[discord-link]: https://discord.gg/wez8HtpxqQ
[telegram-shield]: https://img.shields.io/badge/Telegram-@CherryStudioAI-0088CC?style=plastic&logo=telegram
[telegram-link]: https://t.me/CherryStudioAI
<!-- プロジェクト統計 -->
[github-stars-shield]: https://img.shields.io/github/stars/CherryHQ/cherry-studio?style=social
[github-stars-link]: https://github.com/CherryHQ/cherry-studio/stargazers
[github-forks-shield]: https://img.shields.io/github/forks/CherryHQ/cherry-studio?style=social
[github-forks-link]: https://github.com/CherryHQ/cherry-studio/network
[github-release-shield]: https://img.shields.io/github/v/release/CherryHQ/cherry-studio
[github-release-link]: https://github.com/CherryHQ/cherry-studio/releases
[github-contributors-shield]: https://img.shields.io/github/contributors/CherryHQ/cherry-studio
[github-contributors-link]: https://github.com/CherryHQ/cherry-studio/graphs/contributors
<!-- ライセンスとスポンサー -->
[license-shield]: https://img.shields.io/badge/License-AGPLv3-important.svg?style=plastic&logo=gnu
[license-link]: https://www.gnu.org/licenses/agpl-3.0
[commercial-shield]: https://img.shields.io/badge/商用ライセンス-お問い合わせ-white.svg?style=plastic&logoColor=white&logo=telegram&color=blue
[commercial-link]: mailto:license@cherry-ai.com?subject=商業ライセンスについて
[sponsor-shield]: https://img.shields.io/badge/スポンサー-FF6699.svg?style=plastic&logo=githubsponsors&logoColor=white
[sponsor-link]: https://github.com/CherryHQ/cherry-studio/blob/main/docs/sponsor.md

View File

@ -1,10 +1,40 @@
<div align="right" >
<details>
<summary >🌐 Language</summary>
<div>
<div align="right">
<p><a href="https://openaitx.github.io/view.html?user=CherryHQ&project=cherry-studio&lang=en">English</a></p>
<p><a href="https://openaitx.github.io/view.html?user=CherryHQ&project=cherry-studio&lang=zh-CN">简体中文</a></p>
<p><a href="https://openaitx.github.io/view.html?user=CherryHQ&project=cherry-studio&lang=zh-TW">繁體中文</a></p>
<p><a href="https://openaitx.github.io/view.html?user=CherryHQ&project=cherry-studio&lang=ja">日本語</a></p>
<p><a href="https://openaitx.github.io/view.html?user=CherryHQ&project=cherry-studio&lang=ko">한국어</a></p>
<p><a href="https://openaitx.github.io/view.html?user=CherryHQ&project=cherry-studio&lang=hi">हिन्दी</a></p>
<p><a href="https://openaitx.github.io/view.html?user=CherryHQ&project=cherry-studio&lang=th">ไทย</a></p>
<p><a href="https://openaitx.github.io/view.html?user=CherryHQ&project=cherry-studio&lang=fr">Français</a></p>
<p><a href="https://openaitx.github.io/view.html?user=CherryHQ&project=cherry-studio&lang=de">Deutsch</a></p>
<p><a href="https://openaitx.github.io/view.html?user=CherryHQ&project=cherry-studio&lang=es">Español</a></p>
<p><a href="https://openaitx.github.io/view.html?user=CherryHQ&project=cherry-studio&lang=it">Itapano</a></p>
<p><a href="https://openaitx.github.io/view.html?user=CherryHQ&project=cherry-studio&lang=ru">Русский</a></p>
<p><a href="https://openaitx.github.io/view.html?user=CherryHQ&project=cherry-studio&lang=pt">Português</a></p>
<p><a href="https://openaitx.github.io/view.html?user=CherryHQ&project=cherry-studio&lang=nl">Nederlands</a></p>
<p><a href="https://openaitx.github.io/view.html?user=CherryHQ&project=cherry-studio&lang=pl">Polski</a></p>
<p><a href="https://openaitx.github.io/view.html?user=CherryHQ&project=cherry-studio&lang=ar">العربية</a></p>
<p><a href="https://openaitx.github.io/view.html?user=CherryHQ&project=cherry-studio&lang=fa">فارسی</a></p>
<p><a href="https://openaitx.github.io/view.html?user=CherryHQ&project=cherry-studio&lang=tr">Türkçe</a></p>
<p><a href="https://openaitx.github.io/view.html?user=CherryHQ&project=cherry-studio&lang=vi">Tiếng Việt</a></p>
<p><a href="https://openaitx.github.io/view.html?user=CherryHQ&project=cherry-studio&lang=id">Bahasa Indonesia</a></p>
</div>
</div>
</details>
</div>
<h1 align="center">
<a href="https://github.com/CherryHQ/cherry-studio/releases">
<img src="https://github.com/CherryHQ/cherry-studio/blob/main/build/icon.png?raw=true" width="150" height="150" alt="banner" /><br>
</a>
</h1>
<p align="center">
<a href="https://github.com/CherryHQ/cherry-studio">English</a> | 中文 | <a href="./README.ja.md">日本語</a> | <a href="https://cherry-ai.com">官方网站</a> | <a href="https://docs.cherry-ai.com/cherry-studio-wen-dang/zh-cn">文档</a> | <a href="./dev.md">开发</a> | <a href="https://github.com/CherryHQ/cherry-studio/issues">反馈</a><br>
<a href="https://github.com/CherryHQ/cherry-studio">English</a> | 中文 | <a href="https://cherry-ai.com">官方网站</a> | <a href="https://docs.cherry-ai.com/cherry-studio-wen-dang/zh-cn">文档</a> | <a href="./dev.md">开发</a> | <a href="https://github.com/CherryHQ/cherry-studio/issues">反馈</a><br>
</p>
<!-- 题头徽章组合 -->
@ -18,19 +48,10 @@
</div>
<!-- 项目统计徽章 -->
<div align="center">
[![][github-stars-shield]][github-stars-link]
[![][github-forks-shield]][github-forks-link]
[![][github-release-shield]][github-release-link]
[![][github-contributors-shield]][github-contributors-link]
</div>
<div align="center">
[![][license-shield]][license-link]
[![][commercial-shield]][commercial-link]
[![][sponsor-shield]][sponsor-link]
@ -38,9 +59,9 @@
</div>
<div align="center">
<a href="https://hellogithub.com/repository/1605492e1e2a4df3be07abfa4578dd37" target="_blank"><img src="https://api.hellogithub.com/v1/widgets/recommend.svg?rid=1605492e1e2a4df3be07abfa4578dd37" alt="FeaturedHelloGitHub" style="width: 200px; height: 43px;" width="200" height="43" /></a>
<a href="https://trendshift.io/repositories/11772" target="_blank"><img src="https://trendshift.io/api/badge/repositories/11772" alt="kangfenmao%2Fcherry-studio | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
<a href="https://www.producthunt.com/posts/cherry-studio?embed=true&utm_source=badge-featured&utm_medium=badge&utm_souce=badge-cherry&#0045;studio" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/featured.svg?post_id=496640&theme=light" alt="Cherry&#0032;Studio - AI&#0032;Chatbots&#0044;&#0032;AI&#0032;Desktop&#0032;Client | Product Hunt" style="width: 200px; height: 43px;" width="200" height="43" /></a>
<a href="https://hellogithub.com/repository/1605492e1e2a4df3be07abfa4578dd37" target="_blank" style="text-decoration: none"><img src="https://api.hellogithub.com/v1/widgets/recommend.svg?rid=1605492e1e2a4df3be07abfa4578dd37" alt="FeaturedHelloGitHub" width="220" height="55" /></a>
<a href="https://trendshift.io/repositories/11772" target="_blank" style="text-decoration: none"><img src="https://trendshift.io/api/badge/repositories/11772" alt="kangfenmao%2Fcherry-studio | Trendshift" width="220" height="55" /></a>
<a href="https://www.producthunt.com/posts/cherry-studio?embed=true&utm_source=badge-featured&utm_medium=badge&utm_souce=badge-cherry&#0045;studio" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/featured.svg?post_id=496640&theme=light" alt="Cherry&#0032;Studio - AI&#0032;Chatbots&#0044;&#0032;AI&#0032;Desktop&#0032;Client | Product Hunt" width="220" height="55" /></a>
</div>
# 🍒 Cherry Studio
@ -51,14 +72,6 @@ Cherry Studio 是一款支持多个大语言模型LLM服务商的桌面客
❤️ 喜欢 Cherry Studio? 点亮小星星 🌟 或 [赞助开发者](sponsor.md)! ❤️
# GitCode✖Cherry Studio【新源力】贡献挑战赛
<p align="center">
<a href="https://gitcode.com/CherryHQ/cherry-studio/discussion/2">
<img src="https://raw.gitcode.com/user-images/assets/5007375/8d8d7559-1141-4691-b90f-d154558c6896/cherry-studio-gitcode.jpg" width="100%" alt="banner" />
</a>
</p>
# 📖 使用教程
https://docs.cherry-ai.com
@ -177,10 +190,82 @@ https://docs.cherry-ai.com
3. **提交更改**:提交并推送您的更改
4. **打开 Pull Request**:描述您的更改和原因
有关更详细的指南,请参阅我们的 [贡献指南](./CONTRIBUTING.zh.md)
有关更详细的指南,请参阅我们的 [贡献指南](CONTRIBUTING.zh.md)
感谢您的支持和贡献!
# 🔧 开发者共创计划
我们正在启动 Cherry Studio 开发者共创计划,旨在为开源生态系统构建一个健康、正向反馈的循环。我们相信,优秀的软件是通过协作构建的,每一个合并的拉取请求都为项目注入新的生命力。
我们诚挚地邀请您加入我们的贡献者队伍,与我们一起塑造 Cherry Studio 的未来。
## 贡献者奖励计划
为了回馈我们的核心贡献者并创造良性循环,我们建立了以下长期激励计划。
**该计划的首个跟踪周期将是 2025 年第三季度7月、8月、9月。此周期的奖励将在 10月1日 发放。**
在任何跟踪周期内(例如,首个周期的 7月1日 至 9月30日任何为 Cherry Studio 在 GitHub 上的开源项目贡献超过 **30 个有意义提交** 的开发者都有资格获得以下福利:
- **Cursor 订阅赞助**:获得 **70 美元** 的 [Cursor](https://cursor.sh/) 订阅积分或报销,让 AI 成为您最高效的编码伙伴。
- **无限模型访问**:获得 **DeepSeek****Qwen** 模型的 **无限次** API 调用。
- **前沿技术访问**:享受偶尔的特殊福利,包括 **Claude**、**Gemini** 和 **OpenAI** 等模型的 API 访问权限,让您始终站在技术前沿。
## 共同成长与未来规划
活跃的社区是任何可持续开源项目背后的推动力。随着 Cherry Studio 的发展,我们的奖励计划也将随之发展。我们致力于持续将我们的福利与行业内最优秀的工具和资源保持一致。这确保我们的核心贡献者获得有意义的支持,创造一个开发者、社区和项目共同成长的正向循环。
**展望未来,该项目还将采取越来越开放的态度来回馈整个开源社区。**
## 如何开始?
我们期待您的第一个拉取请求!
您可以从探索我们的仓库开始,选择一个 `good first issue`,或者提出您自己的改进建议。每一个提交都是开源精神的体现。
感谢您的关注和贡献。
让我们一起建设。
# 🏢 企业版
在社区版的基础上,我们自豪地推出 **Cherry Studio 企业版**——一个为现代团队和企业设计的私有部署 AI 生产力与管理平台。
企业版通过集中管理 AI 资源、知识和数据,解决了团队协作中的核心挑战。它赋能组织提升效率、促进创新并确保合规,同时在安全环境中保持对数据的 100% 控制。
## 核心优势
- **统一模型管理**:集中整合和管理各种基于云的大语言模型(如 OpenAI、Anthropic、Google Gemini和本地部署的私有模型。员工可以开箱即用无需单独配置。
- **企业级知识库**:构建、管理和分享全团队的知识库。确保知识得到保留且一致,使团队成员能够基于统一准确的信息与 AI 交互。
- **细粒度访问控制**:通过统一的管理后台轻松管理员工账户,并为不同模型、知识库和功能分配基于角色的权限。
- **完全私有部署**:在您的本地服务器或私有云上部署整个后端服务,确保您的数据 100% 私有且在您的控制之下,满足最严格的安全和合规标准。
- **可靠的后端服务**:提供稳定的 API 服务、企业级数据备份和恢复机制,确保业务连续性。
## ✨ 在线演示
> 🚧 **公开测试版通知**
>
> 企业版目前处于早期公开测试阶段,我们正在积极迭代和优化其功能。我们知道它可能还不够完全稳定。如果您在试用过程中遇到任何问题或有宝贵建议,我们非常感谢您能通过邮件联系我们提供反馈。
**🔗 [Cherry Studio 企业版](https://www.cherry-ai.com/enterprise)**
## 版本对比
| 功能 | 社区版 | 企业版 |
| :----------- | :---------------------- | :--------------------------------------------------------------------------------------------- |
| **开源** | ✅ 是 | ⭕️ 部分开源,对客户开放 |
| **成本** | 个人使用免费 / 商业授权 | 买断 / 订阅费用 |
| **管理后台** | — | ● 集中化**模型**访问<br>**员工**管理<br>● 共享**知识库**<br>● **访问**控制<br>● **数据**备份 |
| **服务器** | — | ✅ 专用私有部署 |
## 获取企业版
我们相信企业版将成为您团队的 AI 生产力引擎。如果您对 Cherry Studio 企业版感兴趣,希望了解更多信息、请求报价或安排演示,请联系我们。
- **商业咨询与购买**
**📧 [bd@cherry-ai.com](mailto:bd@cherry-ai.com)**
# 🔗 相关项目
- [one-api](https://github.com/songquanpeng/one-api)LLM API 管理及分发系统,支持 OpenAI、Azure、Anthropic 等主流模型,统一 API 接口,可用于密钥管理与二次分发。
@ -194,34 +279,43 @@ https://docs.cherry-ai.com
</a>
<br /><br />
# 📊 GitHub 统计
![Stats](https://repobeats.axiom.co/api/embed/a693f2e5f773eed620f70031e974552156c7f397.svg 'Repobeats analytics image')
# ⭐️ Star 记录
[![Star History Chart](https://api.star-history.com/svg?repos=CherryHQ/cherry-studio&type=Timeline)](https://star-history.com/#CherryHQ/cherry-studio&Timeline)
<a href="https://www.star-history.com/#CherryHQ/cherry-studio&Date">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=CherryHQ/cherry-studio&type=Date&theme=dark" />
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=CherryHQ/cherry-studio&type=Date" />
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=CherryHQ/cherry-studio&type=Date" />
</picture>
</a>
<!-- Links & Images -->
[deepwiki-shield]: https://img.shields.io/badge/Deepwiki-CherryHQ-0088CC?style=plastic
[deepwiki-shield]: https://img.shields.io/badge/Deepwiki-CherryHQ-0088CC
[deepwiki-link]: https://deepwiki.com/CherryHQ/cherry-studio
[twitter-shield]: https://img.shields.io/badge/Twitter-CherryStudioApp-0088CC?style=plastic&logo=x
[twitter-shield]: https://img.shields.io/badge/Twitter-CherryStudioApp-0088CC?logo=x
[twitter-link]: https://twitter.com/CherryStudioHQ
[discord-shield]: https://img.shields.io/badge/Discord-@CherryStudio-0088CC?style=plastic&logo=discord
[discord-shield]: https://img.shields.io/badge/Discord-@CherryStudio-0088CC?logo=discord
[discord-link]: https://discord.gg/wez8HtpxqQ
[telegram-shield]: https://img.shields.io/badge/Telegram-@CherryStudioAI-0088CC?style=plastic&logo=telegram
[telegram-shield]: https://img.shields.io/badge/Telegram-@CherryStudioAI-0088CC?logo=telegram
[telegram-link]: https://t.me/CherryStudioAI
<!-- 项目统计徽章 -->
[github-stars-shield]: https://img.shields.io/github/stars/CherryHQ/cherry-studio?style=social
[github-stars-link]: https://github.com/CherryHQ/cherry-studio/stargazers
[github-forks-shield]: https://img.shields.io/github/forks/CherryHQ/cherry-studio?style=social
[github-forks-link]: https://github.com/CherryHQ/cherry-studio/network
[github-release-shield]: https://img.shields.io/github/v/release/CherryHQ/cherry-studio
[github-release-link]: https://github.com/CherryHQ/cherry-studio/releases
[github-contributors-shield]: https://img.shields.io/github/contributors/CherryHQ/cherry-studio
[github-contributors-link]: https://github.com/CherryHQ/cherry-studio/graphs/contributors
<!-- 许可和赞助徽章 -->
[license-shield]: https://img.shields.io/badge/License-AGPLv3-important.svg?style=plastic&logo=gnu
[license-shield]: https://img.shields.io/badge/License-AGPLv3-important.svg?logo=gnu
[license-link]: https://www.gnu.org/licenses/agpl-3.0
[commercial-shield]: https://img.shields.io/badge/商用授权-联系-white.svg?style=plastic&logoColor=white&logo=telegram&color=blue
[commercial-shield]: https://img.shields.io/badge/商用授权-联系-white.svg?logoColor=white&logo=telegram&color=blue
[commercial-link]: mailto:license@cherry-ai.com?subject=商业授权咨询
[sponsor-shield]: https://img.shields.io/badge/赞助支持-FF6699.svg?style=plastic&logo=githubsponsors&logoColor=white
[sponsor-shield]: https://img.shields.io/badge/赞助支持-FF6699.svg?logo=githubsponsors&logoColor=white
[sponsor-link]: https://github.com/CherryHQ/cherry-studio/blob/main/docs/sponsor.md

View File

@ -16,6 +16,8 @@ Cherry Studio implements a structured branching strategy to maintain code qualit
- Only accepts documentation updates and bug fixes
- Thoroughly tested before production deployment
For details about the `testplan` branch used in the Test Plan, please refer to the [Test Plan](testplan-en.md).
## Contributing Branches
When contributing to Cherry Studio, please follow these guidelines:

View File

@ -16,6 +16,8 @@ Cherry Studio 采用结构化的分支策略来维护代码质量并简化开发
- 只接受文档更新和 bug 修复
- 经过完整测试后可以发布到生产环境
关于测试计划所使用的`testplan`分支,请查阅[测试计划](testplan-zh.md)。
## 贡献分支
在为 Cherry Studio 贡献代码时,请遵循以下准则:

View File

@ -0,0 +1,222 @@
# Cherry Studio 记忆功能指南
## 功能介绍
Cherry Studio 的记忆功能是一个强大的工具,能够帮助 AI 助手记住对话中的重要信息、用户偏好和上下文。通过记忆功能,您的 AI 助手可以:
- 📝 **记住重要信息**:自动从对话中提取并存储关键事实和信息
- 🧠 **个性化响应**:基于存储的记忆提供更加个性化和相关的回答
- 🔍 **智能检索**:在需要时自动搜索相关记忆,增强对话的连贯性
- 👥 **多用户支持**:为不同用户维护独立的记忆上下文
记忆功能特别适用于需要长期保持上下文的场景,例如个人助手、客户服务、教育辅导等。
## 如何启用记忆功能
### 1. 全局配置(首次设置)
在使用记忆功能之前,您需要先进行全局配置:
1. 点击侧边栏的 **记忆** 图标(记忆棒图标)进入记忆管理页面
2. 点击右上角的 **更多** 按钮(三个点),选择 **设置**
3. 在设置弹窗中配置以下必要项:
- **LLM 模型**:选择用于处理记忆的语言模型(推荐使用 GPT-4 或 Claude 等高级模型)
- **嵌入模型**:选择用于生成向量嵌入的模型(如 text-embedding-3-small
- **嵌入维度**:输入嵌入模型的维度(通常为 1536
4. 点击 **确定** 保存配置
> ⚠️ **注意**:嵌入模型和维度一旦设置后无法更改,请谨慎选择。
### 2. 为助手启用记忆
完成全局配置后,您可以为特定助手启用记忆功能:
1. 进入 **助手** 页面
2. 选择要启用记忆的助手,点击 **编辑**
3. 在助手设置中找到 **记忆** 部分
4. 打开记忆功能开关
5. 保存助手设置
启用后,该助手将在对话过程中自动提取和使用记忆。
## 使用方法
### 查看记忆
1. 点击侧边栏的 **记忆** 图标进入记忆管理页面
2. 您可以看到所有存储的记忆卡片,包括:
- 记忆内容
- 创建时间
- 所属用户
### 添加记忆
手动添加记忆有两种方式:
**方式一:在记忆管理页面添加**
1. 点击右上角的 **添加记忆** 按钮
2. 在弹窗中输入记忆内容
3. 点击 **添加** 保存
**方式二:在对话中自动提取**
- 当助手启用记忆功能后,系统会自动从对话中提取重要信息并存储为记忆
### 编辑记忆
1. 在记忆卡片上点击 **更多** 按钮(三个点)
2. 选择 **编辑**
3. 修改记忆内容
4. 点击 **保存**
### 删除记忆
1. 在记忆卡片上点击 **更多** 按钮
2. 选择 **删除**
3. 确认删除操作
## 记忆搜索
记忆管理页面提供了强大的搜索功能:
1. 在页面顶部的搜索框中输入关键词
2. 系统会实时过滤显示匹配的记忆
3. 搜索支持模糊匹配,可以搜索记忆内容的任何部分
## 用户管理
记忆功能支持多用户,您可以为不同的用户维护独立的记忆库:
### 切换用户
1. 在记忆管理页面,点击右上角的用户选择器
2. 选择要切换到的用户
3. 页面会自动加载该用户的记忆
### 添加新用户
1. 点击用户选择器
2. 选择 **添加新用户**
3. 输入用户 ID支持字母、数字、下划线和连字符
4. 点击 **添加**
### 删除用户
1. 切换到要删除的用户
2. 点击右上角的 **更多** 按钮
3. 选择 **删除用户**
4. 确认删除(注意:这将删除该用户的所有记忆)
> 💡 **提示**默认用户default-user无法删除。
## 设置说明
### LLM 模型
- 用于处理记忆提取和更新的语言模型
- 建议选择能力较强的模型以获得更好的记忆提取效果
- 可随时更改
### 嵌入模型
- 用于将文本转换为向量,支持语义搜索
- 一旦设置后无法更改(为了保证现有记忆的兼容性)
- 推荐使用 OpenAI 的 text-embedding 系列模型
### 嵌入维度
- 嵌入向量的维度,需要与选择的嵌入模型匹配
- 常见维度:
- text-embedding-3-small: 1536
- text-embedding-3-large: 3072
- text-embedding-ada-002: 1536
### 自定义提示词(可选)
- **事实提取提示词**:自定义如何从对话中提取信息
- **记忆更新提示词**:自定义如何更新现有记忆
## 最佳实践
### 1. 合理组织记忆
- 保持记忆简洁明了,每条记忆专注于一个具体信息
- 使用清晰的语言描述事实,避免模糊表达
- 定期审查和清理过时或不准确的记忆
### 2. 多用户场景
- 为不同的使用场景创建独立用户(如工作、个人、学习等)
- 使用有意义的用户 ID便于识别和管理
- 定期备份重要用户的记忆数据
### 3. 模型选择建议
- **LLM 模型**GPT-4、Claude 3 等高级模型能更准确地提取和理解信息
- **嵌入模型**:选择与您的主要使用语言匹配的模型
### 4. 性能优化
- 避免存储过多冗余记忆,这可能影响搜索性能
- 定期整理和合并相似的记忆
- 对于大量记忆的场景,考虑按主题或时间进行分类管理
## 常见问题
### Q: 为什么我无法启用记忆功能?
A: 请确保您已经完成全局配置,包括选择 LLM 模型和嵌入模型。
### Q: 记忆会自动同步到所有助手吗?
A: 不会。每个助手的记忆功能需要单独启用,且记忆是按用户隔离的。
### Q: 如何导出我的记忆数据?
A: 目前系统暂不支持直接导出功能,但所有记忆都存储在本地数据库中。
### Q: 删除的记忆可以恢复吗?
A: 删除操作是永久的,无法恢复。建议在删除前仔细确认。
### Q: 记忆功能会影响对话速度吗?
A: 记忆功能在后台异步处理,不会明显影响对话响应速度。但过多的记忆可能会略微增加搜索时间。
### Q: 如何清空所有记忆?
A: 您可以删除当前用户并重新创建,或者手动删除所有记忆条目。
## 注意事项
### 隐私保护
- 所有记忆数据都存储在您的本地设备上,不会上传到云端
- 请勿在记忆中存储敏感信息(如密码、私钥等)
- 定期审查记忆内容,确保没有意外存储的隐私信息
### 数据安全
- 记忆数据存储在本地数据库中
- 建议定期备份重要数据
- 更换设备时请注意迁移记忆数据
### 使用限制
- 单条记忆的长度建议不超过 500 字
- 每个用户的记忆数量建议控制在 1000 条以内
- 过多的记忆可能影响系统性能
## 技术细节
记忆功能使用了先进的 RAG检索增强生成技术
1. **信息提取**:使用 LLM 从对话中智能提取关键信息
2. **向量化存储**:通过嵌入模型将文本转换为向量,支持语义搜索
3. **智能检索**:在对话时自动搜索相关记忆,提供给 AI 作为上下文
4. **持续学习**:随着对话进行,不断更新和完善记忆库
---
💡 **提示**:记忆功能是 Cherry Studio 的高级特性,合理使用可以大大提升 AI 助手的智能程度和用户体验。如有更多问题,欢迎查阅文档或联系支持团队。

View File

@ -0,0 +1,11 @@
# 数据库设置字段
此文档包含部分字段的数据类型说明。
## 字段
| 字段名 | 类型 | 说明 |
| ------------------------------ | ------------------------------ | ------------ |
| `translate:target:language` | `LanguageCode` | 翻译目标语言 |
| `translate:source:language` | `LanguageCode` | 翻译源语言 |
| `translate:bidirectional:pair` | `[LanguageCode, LanguageCode]` | 双向翻译对 |

99
docs/testplan-en.md Normal file
View File

@ -0,0 +1,99 @@
# Test Plan
To provide users with a more stable application experience and faster iteration speed, Cherry Studio has launched the "Test Plan".
## User Guide
The Test Plan is divided into the RC channel and the Beta channel, with the following differences:
- **RC (Release Candidate)**: The features are stable, with fewer bugs, and it is close to the official release.
- **Beta**: Features may change at any time, and there may be more bugs, but users can experience future features earlier.
Users can enable the "Test Plan" and select the version channel in the software's `Settings` > `About`. Please note that the versions in the "Test Plan" cannot guarantee data consistency, so be sure to back up your data before using them.
Users are welcome to submit issues or provide feedback through other channels for any bugs encountered during testing. Your feedback is very important to us.
## Developer Guide
### Participating in the Test Plan
Developers should submit `PRs` according to the [Contributor Guide](../CONTRIBUTING.md) (and ensure the target branch is `main`). The repository maintainers will evaluate whether the `PR` should be included in the Test Plan based on factors such as the impact of the feature on the application, its importance, and whether broader testing is needed.
If the `PR` is added to the Test Plan, the repository maintainers will:
- Notify the `PR` submitter.
- Set the PR to `draft` status (to avoid accidental merging into `main` before testing is complete).
- Set the `milestone` to the specific Test Plan version.
- Modify the `PR` title.
During participation in the Test Plan, `PR` submitters should:
- Keep the `PR` branch synchronized with the latest `main` (i.e., the `PR` branch should always be based on the latest `main` code).
- Ensure the `PR` branch is conflict-free.
- Actively respond to comments & reviews and fix bugs.
- Enable maintainers to modify the `PR` branch to allow for bug fixes at any time.
Inclusion in the Test Plan does not guarantee the final merging of the `PR`. It may be shelved due to immature features or poor testing feedback.
### Test Plan Lead
A maintainer will be assigned as the lead for a specific version (e.g., `1.5.0-rc`). The responsibilities of the Test Plan lead include:
- Determining whether a `PR` meets the Test Plan requirements and deciding whether it should be included in the current Test Plan.
- Modifying the status of `PRs` added to the Test Plan and communicating relevant matters with the `PR` submitter.
- Before the Test Plan release, merging the branches of `PRs` added to the Test Plan (using squash merge) into the corresponding version branch of `testplan` and resolving conflicts.
- Ensuring the `testplan` branch is synchronized with the latest `main`.
- Overseeing the Test Plan release.
## In-Depth Understanding
### About `PRs`
A `PR` is a collection of a specific branch (and commits), comments, reviews, and other information, and it is the **smallest management unit** of the Test Plan.
Compared to submitting all features to a single branch, the Test Plan manages features through `PRs`, which offers greater flexibility and efficiency:
- Features can be added or removed between different versions of the Test Plan without cumbersome `revert` operations.
- Clear feature boundaries and responsibilities are established. Bug fixes are completed within their respective `PRs`, isolating cross-impact and better tracking progress.
- The `PR` submitter is responsible for resolving conflicts with the latest `main`. The Test Plan lead is responsible for resolving conflicts between `PR` branches. However, since features added to the Test Plan are relatively independent (in other words, if a feature has broad implications, it should be independently included in the Test Plan), conflicts are generally few or simple.
### The `testplan` Branch
The `testplan` branch is a **temporary** branch used for Test Plan releases.
Note:
- **Do not develop based on this branch**. It may change or even be deleted at any time, and there is no guarantee of commit completeness or order.
- **Do not submit `commits` or `PRs` to this branch**, as they will not be retained.
- The `testplan` branch is always based on the latest `main` branch (not on a released version), with features added on top.
#### RC Branch
Branch name: `testplan/rc/x.y.z`
Used for RC releases, where `x.y.z` is the target version number. Note that whether it is rc.1 or rc.5, as long as the major version number is `x.y.z`, it is completed in this branch.
Generally, the version number for releases from this branch is named `x.y.z-rc.n`.
#### Beta Branch
Branch name: `testplan/beta/x.y.z`
Used for Beta releases, where `x.y.z` is the target version number. Note that whether it is beta.1 or beta.5, as long as the major version number is `x.y.z`, it is completed in this branch.
Generally, the version number for releases from this branch is named `x.y.z-beta.n`.
### Version Rules
The application version number for the Test Plan is: `x.y.z-CHA.n`, where:
- `x.y.z` is the conventional version number, referred to here as the **target version number**.
- `CHA` is the channel code (Channel), currently divided into `rc` and `beta`.
- `n` is the release number, starting from `1`.
Examples of complete version numbers: `1.5.0-rc.3`, `1.5.1-beta.1`, `1.6.0-beta.6`.
The **target version number** of the Test Plan points to the official version number where these features are expected to be added. For example:
- `1.5.0-rc.3` means this is a preview of the `1.5.0` official release (the current latest official release is `1.4.9`, and `1.5.0` has not yet been officially released).
- `1.5.1-beta.1` means this is a beta version of the `1.5.1` official release (the current latest official release is `1.5.0`, and `1.5.1` has not yet been officially released).

99
docs/testplan-zh.md Normal file
View File

@ -0,0 +1,99 @@
# 测试计划
为了给用户提供更稳定的应用体验并提供更快的迭代速度Cherry Studio推出“测试计划”。
## 用户指南
测试计划分为RC版通道和Beta版通道吗区别在于
- **RC版预览版**RC即Release Candidate功能已经稳定BUG较少接近正式版
- **Beta版测试版**功能可能随时变化BUG较多可以较早体验未来功能
用户可以在软件的`设置`-`关于`中,开启“测试计划”并选择版本通道。请注意“测试计划”的版本无法保证数据的一致性,请使用前一定要备份数据。
用户在测试过程中发现的BUG欢迎提交issue或通过其他渠道反馈。用户的反馈对我们非常重要。
## 开发者指南
### 参与测试计划
开发者按照[贡献者指南](CONTRIBUTING.zh.md)要求正常提交`PR`并注意提交target为`main`)。仓库维护者会综合考虑(例如该功能对应用的影响程度,功能的重要性,是否需要更广泛的测试等),决定该`PR`是否应加入测试计划。
若该`PR`加入测试计划,仓库维护者会做如下操作:
- 通知`PR`提交人
- 设置PR为`draft`状态(避免在测试完成前意外并入`main`
- `milestone`设置为具体测试计划版本
- 修改`PR`标题
`PR`提交人在参与测试计划过程中,应做到:
- 保持`PR`分支与最新`main`同步(即`PR`分支总是应基于最新`main`代码)
- 保持`PR`分支为无冲突状态
- 积极响应 comments & reviews修复bug
- 开启维护者可以修改`PR`分支的权限以便维护者能随时修改BUG
加入测试计划并不保证`PR`的最终合并,也有可能由于功能不成熟或测试反馈不佳而搁置
### 测试计划负责人
某个维护者会被指定为某个版本期间(例如`1.5.0-rc`)的测试计划负责人。测试计划负责人的工作为:
- 判断某个`PR`是否符合测试计划要求,并决定是否应合入当期测试计划
- 修改加入测试计划的`PR`状态,并与`PR`提交人沟通相关事宜
- 在测试计划发版前,将加入测试计划的`PR`分支逐一合并采用squash merge至`testplan`对应版本分支,并解决冲突
- 保证`testplan`分支与最新`main`同步
- 负责测试计划发版
## 深入理解
### 关于`PR`
`PR`是特定分支及commits、comments、reviews等各种信息的集合也是测试计划的**最小管理单元**。
相比将所有功能都提交到某个分支,测试计划通过`PR`来管理功能,这可以带来极大的灵活度和效率:
- 测试计划的各个版本间,可以随意增减功能,而无需繁琐的`revert`操作
- 明确了功能边界和负责人bug修复在各自`PR`中完成,隔离了交叉影响,也能更好观察进度
- `PR`提交人负责与最新`main`之间的冲突;测试计划负责人负责各`PR`分支之间的冲突,但因加入测试计划的各功能相对比较独立(话句话说,如果功能牵涉较广,则应独立上测试计划),冲突一般比较少或简单。
### `testplan`分支
`testplan`分支是用于测试计划发版所用的**临时**分支。
注意:
- **请勿基于该分支开发**。该分支随时会变化甚至删除且并不保证commit的完整和顺序。
- **请勿向该分支提交`commit`及`PR`**,将不会得到保留
- `testplan`分支总是基于最新`main`分支(而不是基于已发布版本),在其之上添加功能
#### RC版分支
分支名称:`testplan/rc/x.y.z`
用于RC版的发版x.y.z为目标版本号注意无论是rc.1还是rc.5只要主版本号为x.y.z都在该分支完成。
一般而言,该分支发版的版本号命名为`x.y.z-rc.n`
#### Beta版分支
分支名称:`testplan/beta/x.y.z`
用于Beta版的发版x.y.z为目标版本号注意无论是beta.1还是beta.5只要主版本号为x.y.z都在该分支完成。
一般而言,该分支发版的版本号命名为`x.y.z-beta.n`
### 版本规则
测试计划的应用版本号为:`x.y.z-CHA.n`,其中:
- `x.y.z`为一般意义上的版本号,在这里称为**目标版本号**
- `CHA`为通道号Channel现在分为`rc`和`beta`
- `n`为发版编号,从`1`计数
完整的版本号举例:`1.5.0-rc.3`、`1.5.1-beta.1`、`1.6.0-beta.6`
测试计划的**目标版本号**指向希望添加这些功能的正式版版本号。例如:
- `1.5.0-rc.3`是指,这是`1.5.0`正式版的预览版(当前最新正式版是`1.4.9`,而`1.5.0`正式版还未发布)
- `1.5.1-beta.1`是指,这是`1.5.1`正式版的测试版(当前最新正式版是`1.5.0`,而`1.5.1`正式版还未发布)

View File

@ -11,6 +11,11 @@ electronLanguages:
- en # for macOS
directories:
buildResources: build
protocols:
- name: Cherry Studio
schemes:
- cherrystudio
files:
- '**/*'
- '!**/{.vscode,.yarn,.yarn-lock,.github,.cursorrules,.prettierrc}'
@ -48,7 +53,11 @@ files:
- '!node_modules/pdf-parse/lib/pdf.js/{v1.9.426,v1.10.88,v2.0.550}'
- '!node_modules/mammoth/{mammoth.browser.js,mammoth.browser.min.js}'
- '!node_modules/selection-hook/prebuilds/**/*' # we rebuild .node, don't use prebuilds
- '!**/*.{h,iobj,ipdb,tlog,recipe,vcxproj,vcxproj.filters}' # filter .node build files
- '!node_modules/pdfjs-dist/web/**/*'
- '!node_modules/pdfjs-dist/legacy/web/*'
- '!node_modules/selection-hook/node_modules' # we don't need what in the node_modules dir
- '!node_modules/selection-hook/src' # we don't need source files
- '!**/*.{h,iobj,ipdb,tlog,recipe,vcxproj,vcxproj.filters,Makefile,*.Makefile}' # filter .node build files
asarUnpack:
- resources/**
- '**/*.{metal,exp,lib}'
@ -108,10 +117,8 @@ afterSign: scripts/notarize.js
artifactBuildCompleted: scripts/artifact-build-completed.js
releaseInfo:
releaseNotes: |
界面优化:优化多处界面样式,气泡样式改版,自动调整代码预览边栏宽度
知识库:修复知识库引用不显示问题,修复部分嵌入模型适配问题
备份与恢复:修复超过 2GB 大文件无法恢复问题
文件处理:添加 .doc 文件支持
划词助手:支持自定义 CSS 样式
MCP基于 Pyodide 实现 Python MCP 服务
其他错误修复和优化
新增全局记忆功能
MCP 支持 DXT 格式导入
全局快捷键支持 Linux 系统
模型思考过程增加动画效果
错误修复和性能优化

View File

@ -1,4 +1,5 @@
import react from '@vitejs/plugin-react-swc'
import { CodeInspectorPlugin } from 'code-inspector-plugin'
import { defineConfig, externalizeDepsPlugin } from 'electron-vite'
import { resolve } from 'path'
import { visualizer } from 'rollup-plugin-visualizer'
@ -7,6 +8,9 @@ const visualizerPlugin = (type: 'renderer' | 'main') => {
return process.env[`VISUALIZER_${type.toUpperCase()}`] ? [visualizer({ open: true })] : []
}
const isDev = process.env.NODE_ENV === 'development'
const isProd = process.env.NODE_ENV === 'production'
export default defineConfig({
main: {
plugins: [externalizeDepsPlugin(), ...visualizerPlugin('main')],
@ -19,18 +23,17 @@ export default defineConfig({
},
build: {
rollupOptions: {
external: ['@libsql/client', 'bufferutil', 'utf-8-validate'],
external: ['@libsql/client', 'bufferutil', 'utf-8-validate', '@cherrystudio/mac-system-ocr'],
output: {
// 彻底禁用代码分割 - 返回 null 强制单文件打包
manualChunks: undefined,
// 内联所有动态导入,这是关键配置
inlineDynamicImports: true
manualChunks: undefined, // 彻底禁用代码分割 - 返回 null 强制单文件打包
inlineDynamicImports: true // 内联所有动态导入,这是关键配置
}
},
sourcemap: process.env.NODE_ENV === 'development'
sourcemap: isDev
},
esbuild: isProd ? { legalComments: 'none' } : {},
optimizeDeps: {
noDiscovery: process.env.NODE_ENV === 'development'
noDiscovery: isDev
}
},
preload: {
@ -41,7 +44,7 @@ export default defineConfig({
}
},
build: {
sourcemap: process.env.NODE_ENV === 'development'
sourcemap: isDev
}
},
renderer: {
@ -59,6 +62,7 @@ export default defineConfig({
]
]
}),
...(isDev ? [CodeInspectorPlugin({ bundler: 'vite' })] : []), // 只在开发环境下启用 CodeInspectorPlugin
...visualizerPlugin('renderer')
],
resolve: {
@ -86,6 +90,7 @@ export default defineConfig({
selectionAction: resolve(__dirname, 'src/renderer/selectionAction.html')
}
}
}
},
esbuild: isProd ? { legalComments: 'none' } : {}
}
})

View File

@ -26,7 +26,7 @@ export default defineConfig([
'simple-import-sort/exports': 'error',
'unused-imports/no-unused-imports': 'error',
'@eslint-react/no-prop-types': 'error',
'prettier/prettier': ['error', { endOfLine: 'auto' }]
'prettier/prettier': ['error']
}
},
// Configuration for ensuring compatibility with the original ESLint(8.x) rules

View File

@ -1,6 +1,6 @@
{
"name": "CherryStudio",
"version": "1.4.7",
"version": "1.5.1",
"private": true,
"description": "A powerful AI assistant for producer.",
"main": "./out/main/index.js",
@ -27,12 +27,12 @@
"build:win": "dotenv npm run build && electron-builder --win --x64 --arm64",
"build:win:x64": "dotenv npm run build && electron-builder --win --x64",
"build:win:arm64": "dotenv npm run build && electron-builder --win --arm64",
"build:mac": "dotenv electron-vite build && electron-builder --mac --arm64 --x64",
"build:mac:arm64": "dotenv electron-vite build && electron-builder --mac --arm64",
"build:mac:x64": "dotenv electron-vite build && electron-builder --mac --x64",
"build:linux": "dotenv electron-vite build && electron-builder --linux --x64 --arm64",
"build:linux:arm64": "dotenv electron-vite build && electron-builder --linux --arm64",
"build:linux:x64": "dotenv electron-vite build && electron-builder --linux --x64",
"build:mac": "dotenv npm run build && electron-builder --mac --arm64 --x64",
"build:mac:arm64": "dotenv npm run build && electron-builder --mac --arm64",
"build:mac:x64": "dotenv npm run build && electron-builder --mac --x64",
"build:linux": "dotenv npm run build && electron-builder --linux --x64 --arm64",
"build:linux:arm64": "dotenv npm run build && electron-builder --linux --arm64",
"build:linux:x64": "dotenv npm run build && electron-builder --linux --x64",
"build:npm": "node scripts/build-npm.js",
"release": "node scripts/version.js",
"publish": "yarn build:check && yarn release patch push",
@ -55,17 +55,24 @@
"test:lint": "eslint . --ext .js,.jsx,.cjs,.mjs,.ts,.tsx,.cts,.mts",
"format": "prettier --write .",
"lint": "eslint . --ext .js,.jsx,.cjs,.mjs,.ts,.tsx,.cts,.mts --fix",
"prepare": "husky"
"prepare": "git config blame.ignoreRevsFile .git-blame-ignore-revs && husky"
},
"dependencies": {
"@aws-sdk/client-s3": "^3.840.0",
"@cherrystudio/pdf-to-img-napi": "^0.0.1",
"@libsql/client": "0.14.0",
"@libsql/win32-x64-msvc": "^0.4.7",
"@strongtz/win32-arm64-msvc": "^0.4.7",
"iconv-lite": "^0.6.3",
"jaison": "^2.0.2",
"jschardet": "^3.1.4",
"jsdom": "26.1.0",
"macos-release": "^3.4.0",
"node-stream-zip": "^1.15.0",
"notion-helper": "^1.3.22",
"os-proxy-config": "^1.1.2",
"selection-hook": "^0.9.23",
"pdfjs-dist": "4.10.38",
"selection-hook": "^1.0.6",
"turndown": "7.2.0"
},
"devDependencies": {
@ -86,6 +93,7 @@
"@cherrystudio/embedjs-loader-xml": "^0.1.31",
"@cherrystudio/embedjs-ollama": "^0.1.31",
"@cherrystudio/embedjs-openai": "^0.1.31",
"@codemirror/view": "^6.0.0",
"@electron-toolkit/eslint-config-prettier": "^3.0.0",
"@electron-toolkit/eslint-config-ts": "^3.0.0",
"@electron-toolkit/preload": "^3.0.0",
@ -100,14 +108,16 @@
"@kangfenmao/keyv-storage": "^0.1.0",
"@langchain/community": "^0.3.36",
"@langchain/ollama": "^0.2.1",
"@modelcontextprotocol/sdk": "^1.11.4",
"@mistralai/mistralai": "^1.6.0",
"@modelcontextprotocol/sdk": "^1.12.3",
"@mozilla/readability": "^0.6.0",
"@notionhq/client": "^2.2.15",
"@playwright/test": "^1.52.0",
"@reduxjs/toolkit": "^2.2.5",
"@shikijs/markdown-it": "^3.4.2",
"@shikijs/markdown-it": "^3.7.0",
"@swc/plugin-styled-components": "^7.1.5",
"@tanstack/react-query": "^5.27.0",
"@tanstack/react-virtual": "^3.13.12",
"@testing-library/dom": "^10.4.0",
"@testing-library/jest-dom": "^6.6.3",
"@testing-library/react": "^16.3.0",
@ -125,28 +135,32 @@
"@types/react-window": "^1",
"@types/tinycolor2": "^1",
"@types/word-extractor": "^1",
"@uiw/codemirror-extensions-langs": "^4.23.12",
"@uiw/codemirror-themes-all": "^4.23.12",
"@uiw/react-codemirror": "^4.23.12",
"@uiw/codemirror-extensions-langs": "^4.23.14",
"@uiw/codemirror-themes-all": "^4.23.14",
"@uiw/react-codemirror": "^4.23.14",
"@vitejs/plugin-react-swc": "^3.9.0",
"@vitest/browser": "^3.1.4",
"@vitest/coverage-v8": "^3.1.4",
"@vitest/ui": "^3.1.4",
"@vitest/web-worker": "^3.1.4",
"@viz-js/lang-dot": "^1.0.5",
"@viz-js/viz": "^3.14.0",
"@xyflow/react": "^12.4.4",
"antd": "^5.22.5",
"antd": "patch:antd@npm%3A5.24.7#~/.yarn/patches/antd-npm-5.24.7-356a553ae5.patch",
"archiver": "^7.0.1",
"async-mutex": "^0.5.0",
"axios": "^1.7.3",
"browser-image-compression": "^2.0.2",
"code-inspector-plugin": "^0.20.14",
"color": "^5.0.0",
"country-flag-emoji-polyfill": "0.1.8",
"dayjs": "^1.11.11",
"dexie": "^4.0.8",
"dexie-react-hooks": "^1.1.7",
"diff": "^7.0.0",
"docx": "^9.0.2",
"dotenv-cli": "^7.4.2",
"electron": "35.4.0",
"electron": "35.6.0",
"electron-builder": "26.0.15",
"electron-devtools-installer": "^3.2.0",
"electron-log": "^5.1.5",
@ -163,6 +177,7 @@
"eslint-plugin-unused-imports": "^4.1.4",
"fast-diff": "^1.3.0",
"fast-xml-parser": "^5.2.0",
"fetch-socks": "1.3.2",
"franc-min": "^6.2.0",
"fs-extra": "^11.2.0",
"google-auth-library": "^9.15.1",
@ -175,13 +190,12 @@
"lru-cache": "^11.1.0",
"lucide-react": "^0.487.0",
"markdown-it": "^14.1.0",
"mermaid": "^11.6.0",
"mermaid": "^11.7.0",
"mime": "^4.0.4",
"motion": "^12.10.5",
"npx-scope-finder": "^1.2.0",
"officeparser": "^4.1.1",
"openai": "patch:openai@npm%3A5.1.0#~/.yarn/patches/openai-npm-5.1.0-0e7b3ccb07.patch",
"opendal": "0.47.11",
"p-queue": "^8.1.0",
"playwright": "^1.52.0",
"prettier": "^3.5.3",
@ -209,13 +223,15 @@
"remove-markdown": "^0.6.2",
"rollup-plugin-visualizer": "^5.12.0",
"sass": "^1.88.0",
"shiki": "^3.4.2",
"shiki": "^3.7.0",
"string-width": "^7.2.0",
"styled-components": "^6.1.11",
"tar": "^7.4.3",
"tiny-pinyin": "^1.3.2",
"tokenx": "^1.1.0",
"typescript": "^5.6.2",
"undici": "6.21.2",
"unified": "^11.0.5",
"uuid": "^10.0.0",
"vite": "6.2.6",
"vitest": "^3.1.4",
@ -223,6 +239,9 @@
"word-extractor": "^1.0.4",
"zipread": "^1.3.3"
},
"optionalDependencies": {
"@cherrystudio/mac-system-ocr": "^0.2.2"
},
"resolutions": {
"pdf-parse@npm:1.1.1": "patch:pdf-parse@npm%3A1.1.1#~/.yarn/patches/pdf-parse-npm-1.1.1-04a6109b2a.patch",
"@langchain/openai@npm:^0.3.16": "patch:@langchain/openai@npm%3A0.3.16#~/.yarn/patches/@langchain-openai-npm-0.3.16-e525b59526.patch",
@ -233,7 +252,8 @@
"app-builder-lib@npm:26.0.13": "patch:app-builder-lib@npm%3A26.0.13#~/.yarn/patches/app-builder-lib-npm-26.0.13-a064c9e1d0.patch",
"openai@npm:^4.87.3": "patch:openai@npm%3A5.1.0#~/.yarn/patches/openai-npm-5.1.0-0e7b3ccb07.patch",
"app-builder-lib@npm:26.0.15": "patch:app-builder-lib@npm%3A26.0.15#~/.yarn/patches/app-builder-lib-npm-26.0.15-360e5b0476.patch",
"@langchain/core@npm:^0.3.26": "patch:@langchain/core@npm%3A0.3.44#~/.yarn/patches/@langchain-core-npm-0.3.44-41d5c3cb0a.patch"
"@langchain/core@npm:^0.3.26": "patch:@langchain/core@npm%3A0.3.44#~/.yarn/patches/@langchain-core-npm-0.3.44-41d5c3cb0a.patch",
"undici": "6.21.2"
},
"packageManager": "yarn@4.9.1",
"lint-staged": {

View File

@ -32,7 +32,11 @@ export enum IpcChannel {
App_InstallUvBinary = 'app:install-uv-binary',
App_InstallBunBinary = 'app:install-bun-binary',
App_MacIsProcessTrusted = 'app:mac-is-process-trusted',
App_MacRequestProcessTrust = 'app:mac-request-process-trust',
App_QuoteToMain = 'app:quote-to-main',
App_SetDisableHardwareAcceleration = 'app:set-disable-hardware-acceleration',
Notification_Send = 'notification:send',
Notification_OnClick = 'notification:on-click',
@ -70,6 +74,10 @@ export enum IpcChannel {
Mcp_ServersChanged = 'mcp:servers-changed',
Mcp_ServersUpdated = 'mcp:servers-updated',
Mcp_CheckConnectivity = 'mcp:check-connectivity',
Mcp_UploadDxt = 'mcp:upload-dxt',
Mcp_SetProgress = 'mcp:set-progress',
Mcp_AbortTool = 'mcp:abort-tool',
Mcp_GetServerVersion = 'mcp:get-server-version',
// Python
Python_Execute = 'python:execute',
@ -115,6 +123,7 @@ export enum IpcChannel {
KnowledgeBase_Remove = 'knowledge-base:remove',
KnowledgeBase_Search = 'knowledge-base:search',
KnowledgeBase_Rerank = 'knowledge-base:rerank',
KnowledgeBase_Check_Quota = 'knowledge-base:check-quota',
//file
File_Open = 'file:open',
@ -125,9 +134,10 @@ export enum IpcChannel {
File_Clear = 'file:clear',
File_Read = 'file:read',
File_Delete = 'file:delete',
File_DeleteDir = 'file:deleteDir',
File_Get = 'file:get',
File_SelectFolder = 'file:selectFolder',
File_Create = 'file:create',
File_CreateTempFile = 'file:createTempFile',
File_Write = 'file:write',
File_WriteWithId = 'file:writeWithId',
File_SaveImage = 'file:saveImage',
@ -139,6 +149,13 @@ export enum IpcChannel {
File_Base64File = 'file:base64File',
File_GetPdfInfo = 'file:getPdfInfo',
Fs_Read = 'fs:read',
File_OpenWithRelativePath = 'file:openWithRelativePath',
// file service
FileService_Upload = 'file-service:upload',
FileService_List = 'file-service:list',
FileService_Delete = 'file-service:delete',
FileService_Retrieve = 'file-service:retrieve',
Export_Word = 'export:word',
@ -153,6 +170,11 @@ export enum IpcChannel {
Backup_CheckConnection = 'backup:checkConnection',
Backup_CreateDirectory = 'backup:createDirectory',
Backup_DeleteWebdavFile = 'backup:deleteWebdavFile',
Backup_BackupToLocalDir = 'backup:backupToLocalDir',
Backup_RestoreFromLocalBackup = 'backup:restoreFromLocalBackup',
Backup_ListLocalBackupFiles = 'backup:listLocalBackupFiles',
Backup_DeleteLocalBackupFile = 'backup:deleteLocalBackupFile',
Backup_SetLocalBackupDir = 'backup:setLocalBackupDir',
Backup_BackupToS3 = 'backup:backupToS3',
Backup_RestoreFromS3 = 'backup:restoreFromS3',
Backup_ListS3Files = 'backup:listS3Files',
@ -222,5 +244,17 @@ export enum IpcChannel {
Selection_ActionWindowMinimize = 'selection:action-window-minimize',
Selection_ActionWindowPin = 'selection:action-window-pin',
Selection_ProcessAction = 'selection:process-action',
Selection_UpdateActionData = 'selection:update-action-data'
Selection_UpdateActionData = 'selection:update-action-data',
// Memory
Memory_Add = 'memory:add',
Memory_Search = 'memory:search',
Memory_List = 'memory:list',
Memory_Delete = 'memory:delete',
Memory_Update = 'memory:update',
Memory_Get = 'memory:get',
Memory_SetConfig = 'memory:set-config',
Memory_DeleteUser = 'memory:delete-user',
Memory_DeleteAllMemoriesForUser = 'memory:delete-all-memories-for-user',
Memory_GetUsersList = 'memory:get-users-list'
}

View File

@ -193,6 +193,7 @@ const textExtsByCategory = new Map([
'.htm',
'.xhtml', // HTML
'.xml', // XML
'.fxml', // JavaFX XML
'.org', // Org-mode
'.wiki', // Wiki
'.tex',
@ -416,6 +417,6 @@ export enum UpgradeChannel {
BETA = 'beta' // 预览版本
}
export const defaultTimeout = 5 * 1000 * 60
export const defaultTimeout = 10 * 1000 * 60
export const occupiedDirs = ['logs', 'Network', 'Partitions/webview/Network']

View File

@ -1,6 +1,11 @@
import { ProcessingStatus } from '@types'
export type LoaderReturn = {
entriesAdded: number
uniqueId: string
uniqueIds: string[]
loaderType: string
status?: ProcessingStatus
message?: string
messageSource?: 'preprocess' | 'embedding'
}

View File

@ -43,7 +43,7 @@ async function downloadBunBinary(platform, arch, version = DEFAULT_BUN_VERSION,
if (!packageName) {
console.error(`No binary available for ${platformKey}`)
return false
return 101
}
// Create output directory structure
@ -86,7 +86,7 @@ async function downloadBunBinary(platform, arch, version = DEFAULT_BUN_VERSION,
fs.chmodSync(outputPath, 0o755)
} catch (chmodError) {
console.error(`Warning: Failed to set executable permissions on ${filename}`)
return false
return 102
}
}
console.log(`Extracted ${entry.name} -> ${outputPath}`)
@ -97,8 +97,10 @@ async function downloadBunBinary(platform, arch, version = DEFAULT_BUN_VERSION,
// Clean up
fs.unlinkSync(tempFilename)
console.log(`Successfully installed bun ${version} for ${platformKey}`)
return true
return 0
} catch (error) {
let retCode = 103
console.error(`Error installing bun for ${platformKey}: ${error.message}`)
// Clean up temporary file if it exists
if (fs.existsSync(tempFilename)) {
@ -114,9 +116,10 @@ async function downloadBunBinary(platform, arch, version = DEFAULT_BUN_VERSION,
}
} catch (cleanupError) {
console.warn(`Warning: Failed to clean up directory: ${cleanupError.message}`)
retCode = 104
}
return false
return retCode
}
}
@ -159,16 +162,21 @@ async function installBun() {
`Installing bun ${version} for ${platform}-${arch}${isMusl ? ' (MUSL)' : ''}${isBaseline ? ' (baseline)' : ''}...`
)
await downloadBunBinary(platform, arch, version, isMusl, isBaseline)
return await downloadBunBinary(platform, arch, version, isMusl, isBaseline)
}
// Run the installation
installBun()
.then(() => {
console.log('Installation successful')
process.exit(0)
.then((retCode) => {
if (retCode === 0) {
console.log('Installation successful')
process.exit(0)
} else {
console.error('Installation failed')
process.exit(retCode)
}
})
.catch((error) => {
console.error('Installation failed:', error)
process.exit(1)
process.exit(100)
})

View File

@ -44,7 +44,7 @@ async function downloadUvBinary(platform, arch, version = DEFAULT_UV_VERSION, is
if (!packageName) {
console.error(`No binary available for ${platformKey}`)
return false
return 101
}
// Create output directory structure
@ -85,7 +85,7 @@ async function downloadUvBinary(platform, arch, version = DEFAULT_UV_VERSION, is
fs.chmodSync(outputPath, 0o755)
} catch (chmodError) {
console.error(`Warning: Failed to set executable permissions on ${filename}`)
return false
return 102
}
}
console.log(`Extracted ${entry.name} -> ${outputPath}`)
@ -95,8 +95,10 @@ async function downloadUvBinary(platform, arch, version = DEFAULT_UV_VERSION, is
await zip.close()
fs.unlinkSync(tempFilename)
console.log(`Successfully installed uv ${version} for ${platform}-${arch}`)
return true
return 0
} catch (error) {
let retCode = 103
console.error(`Error installing uv for ${platformKey}: ${error.message}`)
if (fs.existsSync(tempFilename)) {
@ -112,9 +114,10 @@ async function downloadUvBinary(platform, arch, version = DEFAULT_UV_VERSION, is
}
} catch (cleanupError) {
console.warn(`Warning: Failed to clean up directory: ${cleanupError.message}`)
retCode = 104
}
return false
return retCode
}
}
@ -154,16 +157,21 @@ async function installUv() {
console.log(`Installing uv ${version} for ${platform}-${arch}${isMusl ? ' (MUSL)' : ''}...`)
await downloadUvBinary(platform, arch, version, isMusl)
return await downloadUvBinary(platform, arch, version, isMusl)
}
// Run the installation
installUv()
.then(() => {
console.log('Installation successful')
process.exit(0)
.then((retCode) => {
if (retCode === 0) {
console.log('Installation successful')
process.exit(0)
} else {
console.error('Installation failed')
process.exit(retCode)
}
})
.catch((error) => {
console.error('Installation failed:', error)
process.exit(1)
process.exit(100)
})

View File

@ -23,6 +23,9 @@ exports.default = async function (context) {
const node_modules_path = path.join(context.appOutDir, 'resources', 'app.asar.unpacked', 'node_modules')
const _arch = arch === Arch.arm64 ? ['linux-arm64-gnu', 'linux-arm64-musl'] : ['linux-x64-gnu', 'linux-x64-musl']
keepPackageNodeFiles(node_modules_path, '@libsql', _arch)
// 删除 macOS 专用的 OCR 包
removeMacOnlyPackages(node_modules_path)
}
if (platform === 'windows') {
@ -35,6 +38,8 @@ exports.default = async function (context) {
keepPackageNodeFiles(node_modules_path, '@strongtz', ['win32-x64-msvc'])
keepPackageNodeFiles(node_modules_path, '@libsql', ['win32-x64-msvc'])
}
removeMacOnlyPackages(node_modules_path)
}
if (platform === 'windows') {
@ -43,6 +48,22 @@ exports.default = async function (context) {
}
}
/**
* 删除 macOS 专用的包
* @param {string} nodeModulesPath
*/
function removeMacOnlyPackages(nodeModulesPath) {
const macOnlyPackages = ['@cherrystudio/mac-system-ocr']
macOnlyPackages.forEach((packageName) => {
const packagePath = path.join(nodeModulesPath, packageName)
if (fs.existsSync(packagePath)) {
fs.rmSync(packagePath, { recursive: true, force: true })
console.log(`[After Pack] Removed macOS-only package: ${packageName}`)
}
})
}
/**
* 使用指定架构的 node_modules 文件
* @param {*} nodeModulesPath

View File

@ -1,9 +1,60 @@
'use strict'
var __createBinding =
(this && this.__createBinding) ||
(Object.create
? function (o, m, k, k2) {
if (k2 === undefined) k2 = k
var desc = Object.getOwnPropertyDescriptor(m, k)
if (!desc || ('get' in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = {
enumerable: true,
get: function () {
return m[k]
}
}
}
Object.defineProperty(o, k2, desc)
}
: function (o, m, k, k2) {
if (k2 === undefined) k2 = k
o[k2] = m[k]
})
var __setModuleDefault =
(this && this.__setModuleDefault) ||
(Object.create
? function (o, v) {
Object.defineProperty(o, 'default', { enumerable: true, value: v })
}
: function (o, v) {
o['default'] = v
})
var __importStar =
(this && this.__importStar) ||
(function () {
var ownKeys = function (o) {
ownKeys =
Object.getOwnPropertyNames ||
function (o) {
var ar = []
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k
return ar
}
return ownKeys(o)
}
return function (mod) {
if (mod && mod.__esModule) return mod
var result = {}
if (mod != null)
for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== 'default') __createBinding(result, mod, k[i])
__setModuleDefault(result, mod)
return result
}
})()
Object.defineProperty(exports, '__esModule', { value: true })
var fs = require('fs')
var path = require('path')
var fs = __importStar(require('fs'))
var path = __importStar(require('path'))
var translationsDir = path.join(__dirname, '../src/renderer/src/i18n/locales')
var baseLocale = 'en-us'
var baseLocale = 'zh-cn'
var baseFileName = ''.concat(baseLocale, '.json')
var baseFilePath = path.join(translationsDir, baseFileName)
/**
@ -48,12 +99,43 @@ function syncRecursively(target, template) {
}
return isUpdated
}
/**
* 检查 JSON 对象中是否存在重复键并收集所有重复键
* @param obj 要检查的对象
* @returns 返回重复键的数组若无重复则返回空数组
*/
function checkDuplicateKeys(obj) {
var keys = new Set()
var duplicateKeys = []
var checkObject = function (obj, path) {
if (path === void 0) {
path = ''
}
for (var key in obj) {
var fullPath = path ? ''.concat(path, '.').concat(key) : key
if (keys.has(fullPath)) {
// 发现重复键时,添加到数组中(避免重复添加)
if (!duplicateKeys.includes(fullPath)) {
duplicateKeys.push(fullPath)
}
} else {
keys.add(fullPath)
}
// 递归检查子对象
if (typeof obj[key] === 'object' && obj[key] !== null) {
checkObject(obj[key], fullPath)
}
}
}
checkObject(obj)
return duplicateKeys
}
function syncTranslations() {
if (!fs.existsSync(baseFilePath)) {
console.error(
'\u4E3B\u6A21\u677F\u6587\u4EF6 '.concat(
baseFileName,
' \u4E0D\u5B58\u5728\uFF0C\u8BF7\u68C0\u67E5\u8DEF\u5F84\u6216\u6587\u4EF6\u540D\u3002'
' \u4E0D\u5B58\u5728\uFF0C\u8BF7\u68C0\u67E5\u8DEF\u5F84\u6216\u6587\u4EF6\u540D'
)
)
return
@ -63,9 +145,18 @@ function syncTranslations() {
try {
baseJson = JSON.parse(baseContent)
} catch (error) {
console.error('\u89E3\u6790 '.concat(baseFileName, ' \u51FA\u9519:'), error)
console.error('\u89E3\u6790 '.concat(baseFileName, ' \u51FA\u9519\u3002').concat(error))
return
}
// 检查主模板是否存在重复键
var duplicateKeys = checkDuplicateKeys(baseJson)
if (duplicateKeys.length > 0) {
throw new Error(
'\u4E3B\u6A21\u677F\u6587\u4EF6 '
.concat(baseFileName, ' \u5B58\u5728\u4EE5\u4E0B\u91CD\u590D\u952E\uFF1A\n')
.concat(duplicateKeys.join('\n'))
)
}
var files = fs.readdirSync(translationsDir).filter(function (file) {
return file.endsWith('.json') && file !== baseFileName
})
@ -77,27 +168,19 @@ function syncTranslations() {
var fileContent = fs.readFileSync(filePath, 'utf-8')
targetJson = JSON.parse(fileContent)
} catch (error) {
console.error(
'\u89E3\u6790 '.concat(
file,
' \u51FA\u9519\uFF0C\u8DF3\u8FC7\u6B64\u6587\u4EF6\u3002\u9519\u8BEF\u4FE1\u606F:'
),
error
)
console.error('\u89E3\u6790 '.concat(file, ' \u51FA\u9519\uFF0C\u8DF3\u8FC7\u6B64\u6587\u4EF6\u3002'), error)
continue
}
var isUpdated = syncRecursively(targetJson, baseJson)
if (isUpdated) {
try {
fs.writeFileSync(filePath, JSON.stringify(targetJson, null, 2), 'utf-8')
console.log(
'\u6587\u4EF6 '.concat(file, ' \u5DF2\u66F4\u65B0\u540C\u6B65\u4E3B\u6A21\u677F\u7684\u5185\u5BB9\u3002')
)
fs.writeFileSync(filePath, JSON.stringify(targetJson, null, 2) + '\n', 'utf-8')
console.log('\u6587\u4EF6 '.concat(file, ' \u5DF2\u66F4\u65B0\u540C\u6B65\u4E3B\u6A21\u677F\u7684\u5185\u5BB9'))
} catch (error) {
console.error('\u5199\u5165 '.concat(file, ' \u51FA\u9519:'), error)
console.error('\u5199\u5165 '.concat(file, ' \u51FA\u9519\u3002').concat(error))
}
} else {
console.log('\u6587\u4EF6 '.concat(file, ' \u65E0\u9700\u66F4\u65B0\u3002'))
console.log('\u6587\u4EF6 '.concat(file, ' \u65E0\u9700\u66F4\u65B0'))
}
}
}

View File

@ -2,7 +2,7 @@ import * as fs from 'fs'
import * as path from 'path'
const translationsDir = path.join(__dirname, '../src/renderer/src/i18n/locales')
const baseLocale = 'zh-CN'
const baseLocale = 'zh-cn'
const baseFileName = `${baseLocale}.json`
const baseFilePath = path.join(translationsDir, baseFileName)
@ -52,6 +52,39 @@ function syncRecursively(target: any, template: any): boolean {
return isUpdated
}
/**
* JSON
* @param obj
* @returns
*/
function checkDuplicateKeys(obj: Record<string, any>): string[] {
const keys = new Set<string>()
const duplicateKeys: string[] = []
const checkObject = (obj: Record<string, any>, path: string = '') => {
for (const key in obj) {
const fullPath = path ? `${path}.${key}` : key
if (keys.has(fullPath)) {
// 发现重复键时,添加到数组中(避免重复添加)
if (!duplicateKeys.includes(fullPath)) {
duplicateKeys.push(fullPath)
}
} else {
keys.add(fullPath)
}
// 递归检查子对象
if (typeof obj[key] === 'object' && obj[key] !== null) {
checkObject(obj[key], fullPath)
}
}
}
checkObject(obj)
return duplicateKeys
}
function syncTranslations() {
if (!fs.existsSync(baseFilePath)) {
console.error(`主模板文件 ${baseFileName} 不存在,请检查路径或文件名`)
@ -63,10 +96,16 @@ function syncTranslations() {
try {
baseJson = JSON.parse(baseContent)
} catch (error) {
console.error(`解析 ${baseFileName} 出错:`, error)
console.error(`解析 ${baseFileName} 出错${error}`)
return
}
// 检查主模板是否存在重复键
const duplicateKeys = checkDuplicateKeys(baseJson)
if (duplicateKeys.length > 0) {
throw new Error(`主模板文件 ${baseFileName} 存在以下重复键:\n${duplicateKeys.join('\n')}`)
}
const files = fs.readdirSync(translationsDir).filter((file) => file.endsWith('.json') && file !== baseFileName)
for (const file of files) {
@ -76,7 +115,7 @@ function syncTranslations() {
const fileContent = fs.readFileSync(filePath, 'utf-8')
targetJson = JSON.parse(fileContent)
} catch (error) {
console.error(`解析 ${file} 出错,跳过此文件。错误信息:`, error)
console.error(`解析 ${file} 出错,跳过此文件。`, error)
continue
}
@ -87,7 +126,7 @@ function syncTranslations() {
fs.writeFileSync(filePath, JSON.stringify(targetJson, null, 2) + '\n', 'utf-8')
console.log(`文件 ${file} 已更新同步主模板的内容`)
} catch (error) {
console.error(`写入 ${file} 出错:`, error)
console.error(`写入 ${file} 出错${error}`)
}
} else {
console.log(`文件 ${file} 无需更新`)

View File

@ -1,16 +1,19 @@
/**
* Paratera_API_KEY=sk-abcxxxxxxxxxxxxxxxxxxxxxxx123 ts-node scripts/update-i18n.ts
* 使 OpenAI i18n translate
*
* API_KEY=sk-xxxx BASE_URL=xxxx MODEL=xxxx ts-node scripts/update-i18n.ts
*/
// OCOOL API KEY
const Paratera_API_KEY = process.env.Paratera_API_KEY
const API_KEY = process.env.API_KEY
const BASE_URL = process.env.BASE_URL || 'https://llmapi.paratera.com/v1'
const MODEL = process.env.MODEL || 'Qwen3-235B-A22B'
const INDEX = [
// 语言的名称 代码 用来翻译的模型
{ name: 'France', code: 'fr-fr', model: 'Qwen3-235B-A22B' },
{ name: 'Spanish', code: 'es-es', model: 'Qwen3-235B-A22B' },
{ name: 'Portuguese', code: 'pt-pt', model: 'Qwen3-235B-A22B' },
{ name: 'Greek', code: 'el-gr', model: 'Qwen3-235B-A22B' }
// 语言的名称代码用来翻译的模型
{ name: 'France', code: 'fr-fr', model: MODEL },
{ name: 'Spanish', code: 'es-es', model: MODEL },
{ name: 'Portuguese', code: 'pt-pt', model: MODEL },
{ name: 'Greek', code: 'el-gr', model: MODEL }
]
const fs = require('fs')
@ -19,8 +22,8 @@ import OpenAI from 'openai'
const zh = JSON.parse(fs.readFileSync('src/renderer/src/i18n/locales/zh-cn.json', 'utf8')) as object
const openai = new OpenAI({
apiKey: Paratera_API_KEY,
baseURL: 'https://llmapi.paratera.com/v1'
apiKey: API_KEY,
baseURL: BASE_URL
})
// 递归遍历翻译

View File

@ -1,6 +1,6 @@
interface IFilterList {
WINDOWS: string[]
MAC?: string[]
MAC: string[]
}
interface IFinetunedList {
@ -45,14 +45,17 @@ export const SELECTION_PREDEFINED_BLACKLIST: IFilterList = {
'sldworks.exe',
// Remote Desktop
'mstsc.exe'
]
],
MAC: []
}
export const SELECTION_FINETUNED_LIST: IFinetunedList = {
EXCLUDE_CLIPBOARD_CURSOR_DETECT: {
WINDOWS: ['acrobat.exe', 'wps.exe', 'cajviewer.exe']
WINDOWS: ['acrobat.exe', 'wps.exe', 'cajviewer.exe'],
MAC: []
},
INCLUDE_CLIPBOARD_DELAY_READ: {
WINDOWS: ['acrobat.exe', 'wps.exe', 'cajviewer.exe', 'foxitphantom.exe']
WINDOWS: ['acrobat.exe', 'wps.exe', 'cajviewer.exe', 'foxitphantom.exe'],
MAC: []
}
}

View File

@ -11,7 +11,7 @@ import { app } from 'electron'
import installExtension, { REACT_DEVELOPER_TOOLS, REDUX_DEVTOOLS } from 'electron-devtools-installer'
import Logger from 'electron-log'
import { isDev, isWin } from './constant'
import { isDev, isWin, isLinux } from './constant'
import { registerIpc } from './ipc'
import { configManager } from './services/ConfigManager'
import mcpService from './services/MCPService'
@ -28,6 +28,14 @@ import { windowService } from './services/WindowService'
Logger.initialize()
/**
* Disable hardware acceleration if setting is enabled
*/
const disableHardwareAcceleration = configManager.getDisableHardwareAcceleration()
if (disableHardwareAcceleration) {
app.disableHardwareAcceleration()
}
/**
* Disable chromium's window animations
* main purpose for this is to avoid the transparent window flashing when it is shown
@ -38,6 +46,14 @@ if (isWin) {
app.commandLine.appendSwitch('wm-window-animations-disabled')
}
/**
* Enable GlobalShortcutsPortal for Linux Wayland Protocol
* see: https://www.electronjs.org/docs/latest/api/global-shortcut
*/
if (isLinux && process.env.XDG_SESSION_TYPE === 'wayland') {
app.commandLine.appendSwitch('enable-features', 'GlobalShortcutsPortal')
}
// Enable features for unresponsive renderer js call stacks
app.commandLine.appendSwitch('enable-features', 'DocumentPolicyIncludeJSCallStacksInCrashReports')
app.on('web-contents-created', (_, webContents) => {
@ -124,19 +140,27 @@ if (!app.requestSingleInstanceLock()) {
registerProtocolClient(app)
// macOS specific: handle protocol when app is already running
app.on('open-url', (event, url) => {
event.preventDefault()
handleProtocolUrl(url)
})
const handleOpenUrl = (args: string[]) => {
const url = args.find((arg) => arg.startsWith(CHERRY_STUDIO_PROTOCOL + '://'))
if (url) handleProtocolUrl(url)
}
// for windows to start with url
handleOpenUrl(process.argv)
// Listen for second instance
app.on('second-instance', (_event, argv) => {
windowService.showMainWindow()
// Protocol handler for Windows/Linux
// The commandLine is an array of strings where the last item might be the URL
const url = argv.find((arg) => arg.startsWith(CHERRY_STUDIO_PROTOCOL + '://'))
if (url) handleProtocolUrl(url)
handleOpenUrl(argv)
})
app.on('browser-window-created', (_, window) => {

View File

@ -2,30 +2,34 @@ import fs from 'node:fs'
import { arch } from 'node:os'
import path from 'node:path'
import { isMac, isWin } from '@main/constant'
import { isLinux, isMac, isWin } from '@main/constant'
import { getBinaryPath, isBinaryExists, runInstallScript } from '@main/utils/process'
import { handleZoomFactor } from '@main/utils/zoom'
import { UpgradeChannel } from '@shared/config/constant'
import { IpcChannel } from '@shared/IpcChannel'
import { Shortcut, ThemeMode } from '@types'
import { BrowserWindow, dialog, ipcMain, session, shell, webContents } from 'electron'
import { FileMetadata, Provider, Shortcut, ThemeMode } from '@types'
import { BrowserWindow, dialog, ipcMain, ProxyConfig, session, shell, systemPreferences, webContents } from 'electron'
import log from 'electron-log'
import { Notification } from 'src/renderer/src/types/notification'
import appService from './services/AppService'
import AppUpdater from './services/AppUpdater'
import BackupManager from './services/BackupManager'
import { configManager } from './services/ConfigManager'
import CopilotService from './services/CopilotService'
import DxtService from './services/DxtService'
import { ExportService } from './services/ExportService'
import FileService from './services/FileService'
import FileStorage from './services/FileStorage'
import FileService from './services/FileSystemService'
import KnowledgeService from './services/KnowledgeService'
import mcpService from './services/MCPService'
import MemoryService from './services/memory/MemoryService'
import NotificationService from './services/NotificationService'
import * as NutstoreService from './services/NutstoreService'
import ObsidianVaultService from './services/ObsidianVaultService'
import { ProxyConfig, proxyManager } from './services/ProxyManager'
import { proxyManager } from './services/ProxyManager'
import { pythonService } from './services/PythonService'
import { FileServiceManager } from './services/remotefile/FileServiceManager'
import { searchService } from './services/SearchService'
import { SelectionService } from './services/SelectionService'
import { registerShortcuts, unregisterAllShortcuts } from './services/ShortcutService'
@ -44,6 +48,8 @@ const backupManager = new BackupManager()
const exportService = new ExportService(fileManager)
const obsidianVaultService = new ObsidianVaultService()
const vertexAIService = VertexAIService.getInstance()
const memoryService = MemoryService.getInstance()
const dxtService = new DxtService()
export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
const appUpdater = new AppUpdater(mainWindow)
@ -72,9 +78,9 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
if (proxy === 'system') {
proxyConfig = { mode: 'system' }
} else if (proxy) {
proxyConfig = { mode: 'custom', url: proxy }
proxyConfig = { mode: 'fixed_servers', proxyRules: proxy }
} else {
proxyConfig = { mode: 'none' }
proxyConfig = { mode: 'direct' }
}
await proxyManager.configureProxy(proxyConfig)
@ -113,12 +119,8 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
})
// launch on boot
ipcMain.handle(IpcChannel.App_SetLaunchOnBoot, (_, openAtLogin: boolean) => {
// Set login item settings for windows and mac
// linux is not supported because it requires more file operations
if (isWin || isMac) {
app.setLoginItemSettings({ openAtLogin })
}
ipcMain.handle(IpcChannel.App_SetLaunchOnBoot, (_, isLaunchOnBoot: boolean) => {
appService.setAppLaunchOnBoot(isLaunchOnBoot)
})
// launch to tray
@ -158,6 +160,18 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
}
})
//only for mac
if (isMac) {
ipcMain.handle(IpcChannel.App_MacIsProcessTrusted, (): boolean => {
return systemPreferences.isTrustedAccessibilityClient(false)
})
//return is only the current state, not the new state
ipcMain.handle(IpcChannel.App_MacRequestProcessTrust, (): boolean => {
return systemPreferences.isTrustedAccessibilityClient(true)
})
}
ipcMain.handle(IpcChannel.Config_Set, (_, key: string, value: any, isNotify: boolean = false) => {
configManager.set(key, value, isNotify)
})
@ -306,6 +320,17 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
// Relaunch app
ipcMain.handle(IpcChannel.App_RelaunchApp, (_, options?: Electron.RelaunchOptions) => {
// Fix for .AppImage
if (isLinux && process.env.APPIMAGE) {
log.info('Relaunching app with options:', process.env.APPIMAGE, options)
// On Linux, we need to use the APPIMAGE environment variable to relaunch
// https://github.com/electron-userland/electron-builder/issues/1727#issuecomment-769896927
options = options || {}
options.execPath = process.env.APPIMAGE
options.args = options.args || []
options.args.unshift('--appimage-extract-and-run')
}
app.relaunch(options)
app.exit(0)
})
@ -344,6 +369,11 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
ipcMain.handle(IpcChannel.Backup_CheckConnection, backupManager.checkConnection)
ipcMain.handle(IpcChannel.Backup_CreateDirectory, backupManager.createDirectory)
ipcMain.handle(IpcChannel.Backup_DeleteWebdavFile, backupManager.deleteWebdavFile)
ipcMain.handle(IpcChannel.Backup_BackupToLocalDir, backupManager.backupToLocalDir)
ipcMain.handle(IpcChannel.Backup_RestoreFromLocalBackup, backupManager.restoreFromLocalBackup)
ipcMain.handle(IpcChannel.Backup_ListLocalBackupFiles, backupManager.listLocalBackupFiles)
ipcMain.handle(IpcChannel.Backup_DeleteLocalBackupFile, backupManager.deleteLocalBackupFile)
ipcMain.handle(IpcChannel.Backup_SetLocalBackupDir, backupManager.setLocalBackupDir)
ipcMain.handle(IpcChannel.Backup_BackupToS3, backupManager.backupToS3)
ipcMain.handle(IpcChannel.Backup_RestoreFromS3, backupManager.restoreFromS3)
ipcMain.handle(IpcChannel.Backup_ListS3Files, backupManager.listS3Files)
@ -359,9 +389,10 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
ipcMain.handle(IpcChannel.File_Clear, fileManager.clear)
ipcMain.handle(IpcChannel.File_Read, fileManager.readFile)
ipcMain.handle(IpcChannel.File_Delete, fileManager.deleteFile)
ipcMain.handle('file:deleteDir', fileManager.deleteDir)
ipcMain.handle(IpcChannel.File_Get, fileManager.getFile)
ipcMain.handle(IpcChannel.File_SelectFolder, fileManager.selectFolder)
ipcMain.handle(IpcChannel.File_Create, fileManager.createTempFile)
ipcMain.handle(IpcChannel.File_CreateTempFile, fileManager.createTempFile)
ipcMain.handle(IpcChannel.File_Write, fileManager.writeFile)
ipcMain.handle(IpcChannel.File_WriteWithId, fileManager.writeFileWithId)
ipcMain.handle(IpcChannel.File_SaveImage, fileManager.saveImage)
@ -372,6 +403,28 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
ipcMain.handle(IpcChannel.File_Download, fileManager.downloadFile)
ipcMain.handle(IpcChannel.File_Copy, fileManager.copyFile)
ipcMain.handle(IpcChannel.File_BinaryImage, fileManager.binaryImage)
ipcMain.handle(IpcChannel.File_OpenWithRelativePath, fileManager.openFileWithRelativePath)
// file service
ipcMain.handle(IpcChannel.FileService_Upload, async (_, provider: Provider, file: FileMetadata) => {
const service = FileServiceManager.getInstance().getService(provider)
return await service.uploadFile(file)
})
ipcMain.handle(IpcChannel.FileService_List, async (_, provider: Provider) => {
const service = FileServiceManager.getInstance().getService(provider)
return await service.listFiles()
})
ipcMain.handle(IpcChannel.FileService_Delete, async (_, provider: Provider, fileId: string) => {
const service = FileServiceManager.getInstance().getService(provider)
return await service.deleteFile(fileId)
})
ipcMain.handle(IpcChannel.FileService_Retrieve, async (_, provider: Provider, fileId: string) => {
const service = FileServiceManager.getInstance().getService(provider)
return await service.retrieveFile(fileId)
})
// fs
ipcMain.handle(IpcChannel.Fs_Read, FileService.readFile)
@ -402,6 +455,39 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
ipcMain.handle(IpcChannel.KnowledgeBase_Remove, KnowledgeService.remove)
ipcMain.handle(IpcChannel.KnowledgeBase_Search, KnowledgeService.search)
ipcMain.handle(IpcChannel.KnowledgeBase_Rerank, KnowledgeService.rerank)
ipcMain.handle(IpcChannel.KnowledgeBase_Check_Quota, KnowledgeService.checkQuota)
// memory
ipcMain.handle(IpcChannel.Memory_Add, async (_, messages, config) => {
return await memoryService.add(messages, config)
})
ipcMain.handle(IpcChannel.Memory_Search, async (_, query, config) => {
return await memoryService.search(query, config)
})
ipcMain.handle(IpcChannel.Memory_List, async (_, config) => {
return await memoryService.list(config)
})
ipcMain.handle(IpcChannel.Memory_Delete, async (_, id) => {
return await memoryService.delete(id)
})
ipcMain.handle(IpcChannel.Memory_Update, async (_, id, memory, metadata) => {
return await memoryService.update(id, memory, metadata)
})
ipcMain.handle(IpcChannel.Memory_Get, async (_, memoryId) => {
return await memoryService.get(memoryId)
})
ipcMain.handle(IpcChannel.Memory_SetConfig, async (_, config) => {
memoryService.setConfig(config)
})
ipcMain.handle(IpcChannel.Memory_DeleteUser, async (_, userId) => {
return await memoryService.deleteUser(userId)
})
ipcMain.handle(IpcChannel.Memory_DeleteAllMemoriesForUser, async (_, userId) => {
return await memoryService.deleteAllMemoriesForUser(userId)
})
ipcMain.handle(IpcChannel.Memory_GetUsersList, async () => {
return await memoryService.getUsersList()
})
// window
ipcMain.handle(IpcChannel.Windows_SetMinimumSize, (_, width: number, height: number) => {
@ -452,6 +538,29 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
ipcMain.handle(IpcChannel.Mcp_GetResource, mcpService.getResource)
ipcMain.handle(IpcChannel.Mcp_GetInstallInfo, mcpService.getInstallInfo)
ipcMain.handle(IpcChannel.Mcp_CheckConnectivity, mcpService.checkMcpConnectivity)
ipcMain.handle(IpcChannel.Mcp_AbortTool, mcpService.abortTool)
ipcMain.handle(IpcChannel.Mcp_GetServerVersion, mcpService.getServerVersion)
ipcMain.handle(IpcChannel.Mcp_SetProgress, (_, progress: number) => {
mainWindow.webContents.send('mcp-progress', progress)
})
// DXT upload handler
ipcMain.handle(IpcChannel.Mcp_UploadDxt, async (event, fileBuffer: ArrayBuffer, fileName: string) => {
try {
// Create a temporary file with the uploaded content
const tempPath = await fileManager.createTempFile(event, fileName)
await fileManager.writeFile(event, tempPath, Buffer.from(fileBuffer))
// Process DXT file using the temporary path
return await dxtService.uploadDxt(event, tempPath)
} catch (error) {
log.error('[IPC] DXT upload error:', error)
return {
success: false,
error: error instanceof Error ? error.message : 'Failed to upload DXT file'
}
}
})
// Register Python execution handler
ipcMain.handle(
@ -519,4 +628,8 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
SelectionService.registerIpcHandler()
ipcMain.handle(IpcChannel.App_QuoteToMain, (_, text: string) => windowService.quoteToMainWindow(text))
ipcMain.handle(IpcChannel.App_SetDisableHardwareAcceleration, (_, isDisable: boolean) => {
configManager.setDisableHardwareAcceleration(isDisable)
})
}

View File

@ -0,0 +1,122 @@
import fs from 'node:fs'
import path from 'node:path'
import { windowService } from '@main/services/WindowService'
import { getFileExt } from '@main/utils/file'
import { FileMetadata, OcrProvider } from '@types'
import { app } from 'electron'
import { TypedArray } from 'pdfjs-dist/types/src/display/api'
export default abstract class BaseOcrProvider {
protected provider: OcrProvider
public storageDir = path.join(app.getPath('userData'), 'Data', 'Files')
constructor(provider: OcrProvider) {
if (!provider) {
throw new Error('OCR provider is not set')
}
this.provider = provider
}
abstract parseFile(sourceId: string, file: FileMetadata): Promise<{ processedFile: FileMetadata; quota?: number }>
/**
*
* Data/Files/{file.id}
* @param file
* @returns null
*/
public async checkIfAlreadyProcessed(file: FileMetadata): Promise<FileMetadata | null> {
try {
// 检查 Data/Files/{file.id} 是否是目录
const preprocessDirPath = path.join(this.storageDir, file.id)
if (fs.existsSync(preprocessDirPath)) {
const stats = await fs.promises.stat(preprocessDirPath)
// 如果是目录,说明已经被预处理过
if (stats.isDirectory()) {
// 查找目录中的处理结果文件
const files = await fs.promises.readdir(preprocessDirPath)
// 查找主要的处理结果文件(.md 或 .txt
const processedFile = files.find((fileName) => fileName.endsWith('.md') || fileName.endsWith('.txt'))
if (processedFile) {
const processedFilePath = path.join(preprocessDirPath, processedFile)
const processedStats = await fs.promises.stat(processedFilePath)
const ext = getFileExt(processedFile)
return {
...file,
name: file.name.replace(file.ext, ext),
path: processedFilePath,
ext: ext,
size: processedStats.size,
created_at: processedStats.birthtime.toISOString()
}
}
}
}
return null
} catch (error) {
// 如果检查过程中出现错误返回null表示未处理
return null
}
}
/**
*
*/
public delay = (ms: number): Promise<void> => {
return new Promise((resolve) => setTimeout(resolve, ms))
}
public async readPdf(
source: string | URL | TypedArray,
passwordCallback?: (fn: (password: string) => void, reason: string) => string
) {
const { getDocument } = await import('pdfjs-dist/legacy/build/pdf.mjs')
const documentLoadingTask = getDocument(source)
if (passwordCallback) {
documentLoadingTask.onPassword = passwordCallback
}
const document = await documentLoadingTask.promise
return document
}
public async sendOcrProgress(sourceId: string, progress: number): Promise<void> {
const mainWindow = windowService.getMainWindow()
mainWindow?.webContents.send('file-ocr-progress', {
itemId: sourceId,
progress: progress
})
}
/**
*
* @param fileId id
* @param filePaths
* @returns
*/
public moveToAttachmentsDir(fileId: string, filePaths: string[]): string[] {
const attachmentsPath = path.join(this.storageDir, fileId)
if (!fs.existsSync(attachmentsPath)) {
fs.mkdirSync(attachmentsPath, { recursive: true })
}
const movedPaths: string[] = []
for (const filePath of filePaths) {
if (fs.existsSync(filePath)) {
const fileName = path.basename(filePath)
const destPath = path.join(attachmentsPath, fileName)
fs.copyFileSync(filePath, destPath)
fs.unlinkSync(filePath) // 删除原文件,实现"移动"
movedPaths.push(destPath)
}
}
return movedPaths
}
}

View File

@ -0,0 +1,12 @@
import { FileMetadata, OcrProvider } from '@types'
import BaseOcrProvider from './BaseOcrProvider'
export default class DefaultOcrProvider extends BaseOcrProvider {
constructor(provider: OcrProvider) {
super(provider)
}
public parseFile(): Promise<{ processedFile: FileMetadata }> {
throw new Error('Method not implemented.')
}
}

View File

@ -0,0 +1,128 @@
import { isMac } from '@main/constant'
import { FileMetadata, OcrProvider } from '@types'
import Logger from 'electron-log'
import * as fs from 'fs'
import * as path from 'path'
import { TextItem } from 'pdfjs-dist/types/src/display/api'
import BaseOcrProvider from './BaseOcrProvider'
export default class MacSysOcrProvider extends BaseOcrProvider {
private readonly MIN_TEXT_LENGTH = 1000
private MacOCR: any
private async initMacOCR() {
if (!isMac) {
throw new Error('MacSysOcrProvider is only available on macOS')
}
if (!this.MacOCR) {
try {
// @ts-ignore This module is optional and only installed/available on macOS. Runtime checks prevent execution on other platforms.
const module = await import('@cherrystudio/mac-system-ocr')
this.MacOCR = module.default
} catch (error) {
Logger.error('[OCR] Failed to load mac-system-ocr:', error)
throw error
}
}
return this.MacOCR
}
private getRecognitionLevel(level?: number) {
return level === 0 ? this.MacOCR.RECOGNITION_LEVEL_FAST : this.MacOCR.RECOGNITION_LEVEL_ACCURATE
}
constructor(provider: OcrProvider) {
super(provider)
}
private async processPages(
results: any,
totalPages: number,
sourceId: string,
writeStream: fs.WriteStream
): Promise<void> {
await this.initMacOCR()
// TODO: 下个版本后面使用批处理以及p-queue来优化
for (let i = 0; i < totalPages; i++) {
// Convert pages to buffers
const pageNum = i + 1
const pageBuffer = await results.getPage(pageNum)
// Process batch
const ocrResult = await this.MacOCR.recognizeFromBuffer(pageBuffer, {
ocrOptions: {
recognitionLevel: this.getRecognitionLevel(this.provider.options?.recognitionLevel),
minConfidence: this.provider.options?.minConfidence || 0.5
}
})
// Write results in order
writeStream.write(ocrResult.text + '\n')
// Update progress
await this.sendOcrProgress(sourceId, (pageNum / totalPages) * 100)
}
}
public async isScanPdf(buffer: Buffer): Promise<boolean> {
const doc = await this.readPdf(new Uint8Array(buffer))
const pageLength = doc.numPages
let counts = 0
const pagesToCheck = Math.min(pageLength, 10)
for (let i = 0; i < pagesToCheck; i++) {
const page = await doc.getPage(i + 1)
const pageData = await page.getTextContent()
const pageText = pageData.items.map((item) => (item as TextItem).str).join('')
counts += pageText.length
if (counts >= this.MIN_TEXT_LENGTH) {
return false
}
}
return true
}
public async parseFile(sourceId: string, file: FileMetadata): Promise<{ processedFile: FileMetadata }> {
Logger.info(`[OCR] Starting OCR process for file: ${file.name}`)
if (file.ext === '.pdf') {
try {
const { pdf } = await import('@cherrystudio/pdf-to-img-napi')
const pdfBuffer = await fs.promises.readFile(file.path)
const results = await pdf(pdfBuffer, {
scale: 2
})
const totalPages = results.length
const baseDir = path.dirname(file.path)
const baseName = path.basename(file.path, path.extname(file.path))
const txtFileName = `${baseName}.txt`
const txtFilePath = path.join(baseDir, txtFileName)
const writeStream = fs.createWriteStream(txtFilePath)
await this.processPages(results, totalPages, sourceId, writeStream)
await new Promise<void>((resolve, reject) => {
writeStream.end(() => {
Logger.info(`[OCR] OCR process completed successfully for ${file.origin_name}`)
resolve()
})
writeStream.on('error', reject)
})
const movedPaths = this.moveToAttachmentsDir(file.id, [txtFilePath])
return {
processedFile: {
...file,
name: txtFileName,
path: movedPaths[0],
ext: '.txt',
size: fs.statSync(movedPaths[0]).size
}
}
} catch (error) {
Logger.error('[OCR] Error during OCR process:', error)
throw error
}
}
return { processedFile: file }
}
}

View File

@ -0,0 +1,26 @@
import { FileMetadata, OcrProvider as Provider } from '@types'
import BaseOcrProvider from './BaseOcrProvider'
import OcrProviderFactory from './OcrProviderFactory'
export default class OcrProvider {
private sdk: BaseOcrProvider
constructor(provider: Provider) {
this.sdk = OcrProviderFactory.create(provider)
}
public async parseFile(
sourceId: string,
file: FileMetadata
): Promise<{ processedFile: FileMetadata; quota?: number }> {
return this.sdk.parseFile(sourceId, file)
}
/**
*
* @param file
* @returns null
*/
public async checkIfAlreadyProcessed(file: FileMetadata): Promise<FileMetadata | null> {
return this.sdk.checkIfAlreadyProcessed(file)
}
}

View File

@ -0,0 +1,20 @@
import { isMac } from '@main/constant'
import { OcrProvider } from '@types'
import Logger from 'electron-log'
import BaseOcrProvider from './BaseOcrProvider'
import DefaultOcrProvider from './DefaultOcrProvider'
import MacSysOcrProvider from './MacSysOcrProvider'
export default class OcrProviderFactory {
static create(provider: OcrProvider): BaseOcrProvider {
switch (provider.id) {
case 'system':
if (!isMac) {
Logger.warn('[OCR] System OCR provider is only available on macOS')
}
return new MacSysOcrProvider(provider)
default:
return new DefaultOcrProvider(provider)
}
}
}

View File

@ -0,0 +1,126 @@
import fs from 'node:fs'
import path from 'node:path'
import { windowService } from '@main/services/WindowService'
import { getFileExt } from '@main/utils/file'
import { FileMetadata, PreprocessProvider } from '@types'
import { app } from 'electron'
import { TypedArray } from 'pdfjs-dist/types/src/display/api'
export default abstract class BasePreprocessProvider {
protected provider: PreprocessProvider
protected userId?: string
public storageDir = path.join(app.getPath('userData'), 'Data', 'Files')
constructor(provider: PreprocessProvider, userId?: string) {
if (!provider) {
throw new Error('Preprocess provider is not set')
}
this.provider = provider
this.userId = userId
}
abstract parseFile(sourceId: string, file: FileMetadata): Promise<{ processedFile: FileMetadata; quota?: number }>
abstract checkQuota(): Promise<number>
/**
*
* Data/Files/{file.id}
* @param file
* @returns null
*/
public async checkIfAlreadyProcessed(file: FileMetadata): Promise<FileMetadata | null> {
try {
// 检查 Data/Files/{file.id} 是否是目录
const preprocessDirPath = path.join(this.storageDir, file.id)
if (fs.existsSync(preprocessDirPath)) {
const stats = await fs.promises.stat(preprocessDirPath)
// 如果是目录,说明已经被预处理过
if (stats.isDirectory()) {
// 查找目录中的处理结果文件
const files = await fs.promises.readdir(preprocessDirPath)
// 查找主要的处理结果文件(.md 或 .txt
const processedFile = files.find((fileName) => fileName.endsWith('.md') || fileName.endsWith('.txt'))
if (processedFile) {
const processedFilePath = path.join(preprocessDirPath, processedFile)
const processedStats = await fs.promises.stat(processedFilePath)
const ext = getFileExt(processedFile)
return {
...file,
name: file.name.replace(file.ext, ext),
path: processedFilePath,
ext: ext,
size: processedStats.size,
created_at: processedStats.birthtime.toISOString()
}
}
}
}
return null
} catch (error) {
// 如果检查过程中出现错误返回null表示未处理
return null
}
}
/**
*
*/
public delay = (ms: number): Promise<void> => {
return new Promise((resolve) => setTimeout(resolve, ms))
}
public async readPdf(
source: string | URL | TypedArray,
passwordCallback?: (fn: (password: string) => void, reason: string) => string
) {
const { getDocument } = await import('pdfjs-dist/legacy/build/pdf.mjs')
const documentLoadingTask = getDocument(source)
if (passwordCallback) {
documentLoadingTask.onPassword = passwordCallback
}
const document = await documentLoadingTask.promise
return document
}
public async sendPreprocessProgress(sourceId: string, progress: number): Promise<void> {
const mainWindow = windowService.getMainWindow()
mainWindow?.webContents.send('file-preprocess-progress', {
itemId: sourceId,
progress: progress
})
}
/**
*
* @param fileId id
* @param filePaths
* @returns
*/
public moveToAttachmentsDir(fileId: string, filePaths: string[]): string[] {
const attachmentsPath = path.join(this.storageDir, fileId)
if (!fs.existsSync(attachmentsPath)) {
fs.mkdirSync(attachmentsPath, { recursive: true })
}
const movedPaths: string[] = []
for (const filePath of filePaths) {
if (fs.existsSync(filePath)) {
const fileName = path.basename(filePath)
const destPath = path.join(attachmentsPath, fileName)
fs.copyFileSync(filePath, destPath)
fs.unlinkSync(filePath) // 删除原文件,实现"移动"
movedPaths.push(destPath)
}
}
return movedPaths
}
}

View File

@ -0,0 +1,16 @@
import { FileMetadata, PreprocessProvider } from '@types'
import BasePreprocessProvider from './BasePreprocessProvider'
export default class DefaultPreprocessProvider extends BasePreprocessProvider {
constructor(provider: PreprocessProvider) {
super(provider)
}
public parseFile(): Promise<{ processedFile: FileMetadata }> {
throw new Error('Method not implemented.')
}
public checkQuota(): Promise<number> {
throw new Error('Method not implemented.')
}
}

View File

@ -0,0 +1,329 @@
import fs from 'node:fs'
import path from 'node:path'
import { FileMetadata, PreprocessProvider } from '@types'
import AdmZip from 'adm-zip'
import axios, { AxiosRequestConfig } from 'axios'
import Logger from 'electron-log'
import BasePreprocessProvider from './BasePreprocessProvider'
type ApiResponse<T> = {
code: string
data: T
message?: string
}
type PreuploadResponse = {
uid: string
url: string
}
type StatusResponse = {
status: string
progress: number
}
type ParsedFileResponse = {
status: string
url: string
}
export default class Doc2xPreprocessProvider extends BasePreprocessProvider {
constructor(provider: PreprocessProvider) {
super(provider)
}
private async validateFile(filePath: string): Promise<void> {
const pdfBuffer = await fs.promises.readFile(filePath)
const doc = await this.readPdf(new Uint8Array(pdfBuffer))
// 文件页数小于1000页
if (doc.numPages >= 1000) {
throw new Error(`PDF page count (${doc.numPages}) exceeds the limit of 1000 pages`)
}
// 文件大小小于300MB
if (pdfBuffer.length >= 300 * 1024 * 1024) {
const fileSizeMB = Math.round(pdfBuffer.length / (1024 * 1024))
throw new Error(`PDF file size (${fileSizeMB}MB) exceeds the limit of 300MB`)
}
}
public async parseFile(sourceId: string, file: FileMetadata): Promise<{ processedFile: FileMetadata }> {
try {
Logger.info(`Preprocess processing started: ${file.path}`)
// 步骤1: 准备上传
const { uid, url } = await this.preupload()
Logger.info(`Preprocess preupload completed: uid=${uid}`)
await this.validateFile(file.path)
// 步骤2: 上传文件
await this.putFile(file.path, url)
// 步骤3: 等待处理完成
await this.waitForProcessing(sourceId, uid)
Logger.info(`Preprocess parsing completed successfully for: ${file.path}`)
// 步骤4: 导出文件
const { path: outputPath } = await this.exportFile(file, uid)
// 步骤5: 创建处理后的文件信息
return {
processedFile: this.createProcessedFileInfo(file, outputPath)
}
} catch (error) {
Logger.error(
`Preprocess processing failed for ${file.path}: ${error instanceof Error ? error.message : String(error)}`
)
throw error
}
}
private createProcessedFileInfo(file: FileMetadata, outputPath: string): FileMetadata {
const outputFilePath = `${outputPath}/${file.name.split('.').slice(0, -1).join('.')}.md`
return {
...file,
name: file.name.replace('.pdf', '.md'),
path: outputFilePath,
ext: '.md',
size: fs.statSync(outputFilePath).size
}
}
/**
*
* @param file
* @param uid uid
* @returns
*/
public async exportFile(file: FileMetadata, uid: string): Promise<{ path: string }> {
Logger.info(`Exporting file: ${file.path}`)
// 步骤1: 转换文件
await this.convertFile(uid, file.path)
Logger.info(`File conversion completed for: ${file.path}`)
// 步骤2: 等待导出并获取URL
const exportUrl = await this.waitForExport(uid)
// 步骤3: 下载并解压文件
return this.downloadFile(exportUrl, file)
}
/**
*
* @param sourceId ID
* @param uid uid
*/
private async waitForProcessing(sourceId: string, uid: string): Promise<void> {
while (true) {
await this.delay(1000)
const { status, progress } = await this.getStatus(uid)
await this.sendPreprocessProgress(sourceId, progress)
Logger.info(`Preprocess processing status: ${status}, progress: ${progress}%`)
if (status === 'success') {
return
} else if (status === 'failed') {
throw new Error('Preprocess processing failed')
}
}
}
/**
*
* @param uid uid
* @returns url
*/
private async waitForExport(uid: string): Promise<string> {
while (true) {
await this.delay(1000)
const { status, url } = await this.getParsedFile(uid)
Logger.info(`Export status: ${status}`)
if (status === 'success' && url) {
return url
} else if (status === 'failed') {
throw new Error('Export failed')
}
}
}
/**
*
* @returns url和uid
*/
private async preupload(): Promise<PreuploadResponse> {
const config = this.createAuthConfig()
const endpoint = `${this.provider.apiHost}/api/v2/parse/preupload`
try {
const { data } = await axios.post<ApiResponse<PreuploadResponse>>(endpoint, null, config)
if (data.code === 'success' && data.data) {
return data.data
} else {
throw new Error(`API returned error: ${data.message || JSON.stringify(data)}`)
}
} catch (error) {
Logger.error(`Failed to get preupload URL: ${error instanceof Error ? error.message : String(error)}`)
throw new Error('Failed to get preupload URL')
}
}
/**
*
* @param filePath
* @param url url
*/
private async putFile(filePath: string, url: string): Promise<void> {
try {
const fileStream = fs.createReadStream(filePath)
const response = await axios.put(url, fileStream)
if (response.status !== 200) {
throw new Error(`HTTP status ${response.status}: ${response.statusText}`)
}
} catch (error) {
Logger.error(`Failed to upload file ${filePath}: ${error instanceof Error ? error.message : String(error)}`)
throw new Error('Failed to upload file')
}
}
private async getStatus(uid: string): Promise<StatusResponse> {
const config = this.createAuthConfig()
const endpoint = `${this.provider.apiHost}/api/v2/parse/status?uid=${uid}`
try {
const response = await axios.get<ApiResponse<StatusResponse>>(endpoint, config)
if (response.data.code === 'success' && response.data.data) {
return response.data.data
} else {
throw new Error(`API returned error: ${response.data.message || JSON.stringify(response.data)}`)
}
} catch (error) {
Logger.error(`Failed to get status for uid ${uid}: ${error instanceof Error ? error.message : String(error)}`)
throw new Error('Failed to get processing status')
}
}
/**
* Preprocess文件
* @param uid uid
* @param filePath
*/
private async convertFile(uid: string, filePath: string): Promise<void> {
const fileName = path.parse(filePath).name
const config = {
...this.createAuthConfig(),
headers: {
...this.createAuthConfig().headers,
'Content-Type': 'application/json'
}
}
const payload = {
uid,
to: 'md',
formula_mode: 'normal',
filename: fileName
}
const endpoint = `${this.provider.apiHost}/api/v2/convert/parse`
try {
const response = await axios.post<ApiResponse<any>>(endpoint, payload, config)
if (response.data.code !== 'success') {
throw new Error(`API returned error: ${response.data.message || JSON.stringify(response.data)}`)
}
} catch (error) {
Logger.error(`Failed to convert file ${filePath}: ${error instanceof Error ? error.message : String(error)}`)
throw new Error('Failed to convert file')
}
}
/**
*
* @param uid uid
* @returns
*/
private async getParsedFile(uid: string): Promise<ParsedFileResponse> {
const config = this.createAuthConfig()
const endpoint = `${this.provider.apiHost}/api/v2/convert/parse/result?uid=${uid}`
try {
const response = await axios.get<ApiResponse<ParsedFileResponse>>(endpoint, config)
if (response.status === 200 && response.data.data) {
return response.data.data
} else {
throw new Error(`HTTP status ${response.status}: ${response.statusText}`)
}
} catch (error) {
Logger.error(
`Failed to get parsed file for uid ${uid}: ${error instanceof Error ? error.message : String(error)}`
)
throw new Error('Failed to get parsed file information')
}
}
/**
*
* @param url url
* @param file
* @returns
*/
private async downloadFile(url: string, file: FileMetadata): Promise<{ path: string }> {
const dirPath = this.storageDir
// 使用统一的存储路径Data/Files/{file.id}/
const extractPath = path.join(dirPath, file.id)
const zipPath = path.join(dirPath, `${file.id}.zip`)
// 确保目录存在
fs.mkdirSync(dirPath, { recursive: true })
fs.mkdirSync(extractPath, { recursive: true })
Logger.info(`Downloading to export path: ${zipPath}`)
try {
// 下载文件
const response = await axios.get(url, { responseType: 'arraybuffer' })
fs.writeFileSync(zipPath, response.data)
// 确保提取目录存在
if (!fs.existsSync(extractPath)) {
fs.mkdirSync(extractPath, { recursive: true })
}
// 解压文件
const zip = new AdmZip(zipPath)
zip.extractAllTo(extractPath, true)
Logger.info(`Extracted files to: ${extractPath}`)
// 删除临时ZIP文件
fs.unlinkSync(zipPath)
return { path: extractPath }
} catch (error) {
Logger.error(`Failed to download and extract file: ${error instanceof Error ? error.message : String(error)}`)
throw new Error('Failed to download and extract file')
}
}
private createAuthConfig(): AxiosRequestConfig {
return {
headers: {
Authorization: `Bearer ${this.provider.apiKey}`
}
}
}
public checkQuota(): Promise<number> {
throw new Error('Method not implemented.')
}
}

View File

@ -0,0 +1,394 @@
import fs from 'node:fs'
import path from 'node:path'
import { FileMetadata, PreprocessProvider } from '@types'
import AdmZip from 'adm-zip'
import axios from 'axios'
import Logger from 'electron-log'
import BasePreprocessProvider from './BasePreprocessProvider'
type ApiResponse<T> = {
code: number
data: T
msg?: string
trace_id?: string
}
type BatchUploadResponse = {
batch_id: string
file_urls: string[]
}
type ExtractProgress = {
extracted_pages: number
total_pages: number
start_time: string
}
type ExtractFileResult = {
file_name: string
state: 'done' | 'waiting-file' | 'pending' | 'running' | 'converting' | 'failed'
err_msg: string
full_zip_url?: string
extract_progress?: ExtractProgress
}
type ExtractResultResponse = {
batch_id: string
extract_result: ExtractFileResult[]
}
type QuotaResponse = {
code: number
data: {
user_left_quota: number
total_left_quota: number
}
msg?: string
trace_id?: string
}
export default class MineruPreprocessProvider extends BasePreprocessProvider {
constructor(provider: PreprocessProvider, userId?: string) {
super(provider, userId)
// todo免费期结束后删除
this.provider.apiKey = this.provider.apiKey || import.meta.env.MAIN_VITE_MINERU_API_KEY
}
public async parseFile(
sourceId: string,
file: FileMetadata
): Promise<{ processedFile: FileMetadata; quota: number }> {
try {
Logger.info(`MinerU preprocess processing started: ${file.path}`)
await this.validateFile(file.path)
// 1. 获取上传URL并上传文件
const batchId = await this.uploadFile(file)
Logger.info(`MinerU file upload completed: batch_id=${batchId}`)
// 2. 等待处理完成并获取结果
const extractResult = await this.waitForCompletion(sourceId, batchId, file.origin_name)
Logger.info(`MinerU processing completed for batch: ${batchId}`)
// 3. 下载并解压文件
const { path: outputPath } = await this.downloadAndExtractFile(extractResult.full_zip_url!, file)
// 4. check quota
const quota = await this.checkQuota()
// 5. 创建处理后的文件信息
return {
processedFile: this.createProcessedFileInfo(file, outputPath),
quota
}
} catch (error: any) {
Logger.error(`MinerU preprocess processing failed for ${file.path}: ${error.message}`)
throw new Error(error.message)
}
}
public async checkQuota() {
try {
const quota = await fetch(`${this.provider.apiHost}/api/v4/quota`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${this.provider.apiKey}`,
token: this.userId ?? ''
}
})
if (!quota.ok) {
throw new Error(`HTTP ${quota.status}: ${quota.statusText}`)
}
const response: QuotaResponse = await quota.json()
return response.data.user_left_quota
} catch (error) {
console.error('Error checking quota:', error)
throw error
}
}
private async validateFile(filePath: string): Promise<void> {
const pdfBuffer = await fs.promises.readFile(filePath)
const doc = await this.readPdf(new Uint8Array(pdfBuffer))
// 文件页数小于600页
if (doc.numPages >= 600) {
throw new Error(`PDF page count (${doc.numPages}) exceeds the limit of 600 pages`)
}
// 文件大小小于200MB
if (pdfBuffer.length >= 200 * 1024 * 1024) {
const fileSizeMB = Math.round(pdfBuffer.length / (1024 * 1024))
throw new Error(`PDF file size (${fileSizeMB}MB) exceeds the limit of 200MB`)
}
}
private createProcessedFileInfo(file: FileMetadata, outputPath: string): FileMetadata {
// 查找解压后的主要文件
let finalPath = ''
let finalName = file.origin_name.replace('.pdf', '.md')
try {
const files = fs.readdirSync(outputPath)
const mdFile = files.find((f) => f.endsWith('.md'))
if (mdFile) {
const originalMdPath = path.join(outputPath, mdFile)
const newMdPath = path.join(outputPath, finalName)
// 重命名文件为原始文件名
try {
fs.renameSync(originalMdPath, newMdPath)
finalPath = newMdPath
Logger.info(`Renamed markdown file from ${mdFile} to ${finalName}`)
} catch (renameError) {
Logger.warn(`Failed to rename file ${mdFile} to ${finalName}: ${renameError}`)
// 如果重命名失败,使用原文件
finalPath = originalMdPath
finalName = mdFile
}
}
} catch (error) {
Logger.warn(`Failed to read output directory ${outputPath}: ${error}`)
finalPath = path.join(outputPath, `${file.id}.md`)
}
return {
...file,
name: finalName,
path: finalPath,
ext: '.md',
size: fs.existsSync(finalPath) ? fs.statSync(finalPath).size : 0
}
}
private async downloadAndExtractFile(zipUrl: string, file: FileMetadata): Promise<{ path: string }> {
const dirPath = this.storageDir
const zipPath = path.join(dirPath, `${file.id}.zip`)
const extractPath = path.join(dirPath, `${file.id}`)
Logger.info(`Downloading MinerU result to: ${zipPath}`)
try {
// 下载ZIP文件
const response = await axios.get(zipUrl, { responseType: 'arraybuffer' })
fs.writeFileSync(zipPath, response.data)
Logger.info(`Downloaded ZIP file: ${zipPath}`)
// 确保提取目录存在
if (!fs.existsSync(extractPath)) {
fs.mkdirSync(extractPath, { recursive: true })
}
// 解压文件
const zip = new AdmZip(zipPath)
zip.extractAllTo(extractPath, true)
Logger.info(`Extracted files to: ${extractPath}`)
// 删除临时ZIP文件
fs.unlinkSync(zipPath)
return { path: extractPath }
} catch (error: any) {
Logger.error(`Failed to download and extract file: ${error.message}`)
throw new Error(error.message)
}
}
private async uploadFile(file: FileMetadata): Promise<string> {
try {
// 步骤1: 获取上传URL
const { batchId, fileUrls } = await this.getBatchUploadUrls(file)
Logger.info(`Got upload URLs for batch: ${batchId}`)
console.log('batchId:', batchId, 'fileurls:', fileUrls)
// 步骤2: 上传文件到获取的URL
await this.putFileToUrl(file.path, fileUrls[0])
Logger.info(`File uploaded successfully: ${file.path}`)
return batchId
} catch (error: any) {
Logger.error(`Failed to upload file ${file.path}: ${error.message}`)
throw new Error(error.message)
}
}
private async getBatchUploadUrls(file: FileMetadata): Promise<{ batchId: string; fileUrls: string[] }> {
const endpoint = `${this.provider.apiHost}/api/v4/file-urls/batch`
const payload = {
language: 'auto',
enable_formula: true,
enable_table: true,
files: [
{
name: file.origin_name,
is_ocr: true,
data_id: file.id
}
]
}
try {
const response = await fetch(endpoint, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${this.provider.apiKey}`,
token: this.userId ?? '',
Accept: '*/*'
},
body: JSON.stringify(payload)
})
if (response.ok) {
const data: ApiResponse<BatchUploadResponse> = await response.json()
if (data.code === 0 && data.data) {
const { batch_id, file_urls } = data.data
return {
batchId: batch_id,
fileUrls: file_urls
}
} else {
throw new Error(`API returned error: ${data.msg || JSON.stringify(data)}`)
}
} else {
throw new Error(`HTTP ${response.status}: ${response.statusText}`)
}
} catch (error: any) {
Logger.error(`Failed to get batch upload URLs: ${error.message}`)
throw new Error(error.message)
}
}
private async putFileToUrl(filePath: string, uploadUrl: string): Promise<void> {
try {
const fileBuffer = await fs.promises.readFile(filePath)
const response = await fetch(uploadUrl, {
method: 'PUT',
body: fileBuffer,
headers: {
'Content-Type': 'application/pdf'
}
// headers: {
// 'Content-Length': fileBuffer.length.toString()
// }
})
if (!response.ok) {
// 克隆 response 以避免消费 body stream
const responseClone = response.clone()
try {
const responseBody = await responseClone.text()
const errorInfo = {
status: response.status,
statusText: response.statusText,
url: response.url,
type: response.type,
redirected: response.redirected,
headers: Object.fromEntries(response.headers.entries()),
body: responseBody
}
console.error('Response details:', errorInfo)
throw new Error(`Upload failed with status ${response.status}: ${responseBody}`)
} catch (parseError) {
throw new Error(`Upload failed with status ${response.status}. Could not parse response body.`)
}
}
Logger.info(`File uploaded successfully to: ${uploadUrl}`)
} catch (error: any) {
Logger.error(`Failed to upload file to URL ${uploadUrl}: ${error}`)
throw new Error(error.message)
}
}
private async getExtractResults(batchId: string): Promise<ExtractResultResponse> {
const endpoint = `${this.provider.apiHost}/api/v4/extract-results/batch/${batchId}`
try {
const response = await fetch(endpoint, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${this.provider.apiKey}`,
token: this.userId ?? ''
}
})
if (response.ok) {
const data: ApiResponse<ExtractResultResponse> = await response.json()
if (data.code === 0 && data.data) {
return data.data
} else {
throw new Error(`API returned error: ${data.msg || JSON.stringify(data)}`)
}
} else {
throw new Error(`HTTP ${response.status}: ${response.statusText}`)
}
} catch (error: any) {
Logger.error(`Failed to get extract results for batch ${batchId}: ${error.message}`)
throw new Error(error.message)
}
}
private async waitForCompletion(
sourceId: string,
batchId: string,
fileName: string,
maxRetries: number = 60,
intervalMs: number = 5000
): Promise<ExtractFileResult> {
let retries = 0
while (retries < maxRetries) {
try {
const result = await this.getExtractResults(batchId)
// 查找对应文件的处理结果
const fileResult = result.extract_result.find((item) => item.file_name === fileName)
if (!fileResult) {
throw new Error(`File ${fileName} not found in batch results`)
}
// 检查处理状态
if (fileResult.state === 'done' && fileResult.full_zip_url) {
Logger.info(`Processing completed for file: ${fileName}`)
return fileResult
} else if (fileResult.state === 'failed') {
throw new Error(`Processing failed for file: ${fileName}, error: ${fileResult.err_msg}`)
} else if (fileResult.state === 'running') {
// 发送进度更新
if (fileResult.extract_progress) {
const progress = Math.round(
(fileResult.extract_progress.extracted_pages / fileResult.extract_progress.total_pages) * 100
)
await this.sendPreprocessProgress(sourceId, progress)
Logger.info(`File ${fileName} processing progress: ${progress}%`)
} else {
// 如果没有具体进度信息,发送一个通用进度
await this.sendPreprocessProgress(sourceId, 50)
Logger.info(`File ${fileName} is still processing...`)
}
}
} catch (error) {
Logger.warn(`Failed to check status for batch ${batchId}, retry ${retries + 1}/${maxRetries}`)
if (retries === maxRetries - 1) {
throw error
}
}
retries++
await new Promise((resolve) => setTimeout(resolve, intervalMs))
}
throw new Error(`Processing timeout for batch: ${batchId}`)
}
}

View File

@ -0,0 +1,187 @@
import fs from 'node:fs'
import { MistralClientManager } from '@main/services/MistralClientManager'
import { MistralService } from '@main/services/remotefile/MistralService'
import { Mistral } from '@mistralai/mistralai'
import { DocumentURLChunk } from '@mistralai/mistralai/models/components/documenturlchunk'
import { ImageURLChunk } from '@mistralai/mistralai/models/components/imageurlchunk'
import { OCRResponse } from '@mistralai/mistralai/models/components/ocrresponse'
import { FileMetadata, FileTypes, PreprocessProvider, Provider } from '@types'
import Logger from 'electron-log'
import path from 'path'
import BasePreprocessProvider from './BasePreprocessProvider'
type PreuploadResponse = DocumentURLChunk | ImageURLChunk
export default class MistralPreprocessProvider extends BasePreprocessProvider {
private sdk: Mistral
private fileService: MistralService
constructor(provider: PreprocessProvider) {
super(provider)
const clientManager = MistralClientManager.getInstance()
const aiProvider: Provider = {
id: provider.id,
type: 'mistral',
name: provider.name,
apiKey: provider.apiKey!,
apiHost: provider.apiHost!,
models: []
}
clientManager.initializeClient(aiProvider)
this.sdk = clientManager.getClient()
this.fileService = new MistralService(aiProvider)
}
private async preupload(file: FileMetadata): Promise<PreuploadResponse> {
let document: PreuploadResponse
Logger.info(`preprocess preupload started for local file: ${file.path}`)
if (file.ext.toLowerCase() === '.pdf') {
const uploadResponse = await this.fileService.uploadFile(file)
if (uploadResponse.status === 'failed') {
Logger.error('File upload failed:', uploadResponse)
throw new Error('Failed to upload file: ' + uploadResponse.displayName)
}
await this.sendPreprocessProgress(file.id, 15)
const fileUrl = await this.sdk.files.getSignedUrl({
fileId: uploadResponse.fileId
})
Logger.info('Got signed URL:', fileUrl)
await this.sendPreprocessProgress(file.id, 20)
document = {
type: 'document_url',
documentUrl: fileUrl.url
}
} else {
const base64Image = Buffer.from(fs.readFileSync(file.path)).toString('base64')
document = {
type: 'image_url',
imageUrl: `data:image/png;base64,${base64Image}`
}
}
if (!document) {
throw new Error('Unsupported file type')
}
return document
}
public async parseFile(sourceId: string, file: FileMetadata): Promise<{ processedFile: FileMetadata }> {
try {
const document = await this.preupload(file)
const result = await this.sdk.ocr.process({
model: this.provider.model!,
document: document,
includeImageBase64: true
})
if (result) {
await this.sendPreprocessProgress(sourceId, 100)
const processedFile = this.convertFile(result, file)
return {
processedFile
}
} else {
throw new Error('preprocess processing failed: OCR response is empty')
}
} catch (error) {
throw new Error('preprocess processing failed: ' + error)
}
}
private convertFile(result: OCRResponse, file: FileMetadata): FileMetadata {
// 使用统一的存储路径Data/Files/{file.id}/
const conversionId = file.id
const outputPath = path.join(this.storageDir, file.id)
// const outputPath = this.storageDir
const outputFileName = path.basename(file.path, path.extname(file.path))
fs.mkdirSync(outputPath, { recursive: true })
const markdownParts: string[] = []
let counter = 0
// Process each page
result.pages.forEach((page) => {
let pageMarkdown = page.markdown
// Process images from this page
page.images.forEach((image) => {
if (image.imageBase64) {
let imageFormat = 'jpeg' // default format
let imageBase64Data = image.imageBase64
// Check for data URL prefix more efficiently
const prefixEnd = image.imageBase64.indexOf(';base64,')
if (prefixEnd > 0) {
const prefix = image.imageBase64.substring(0, prefixEnd)
const formatIndex = prefix.indexOf('image/')
if (formatIndex >= 0) {
imageFormat = prefix.substring(formatIndex + 6)
}
imageBase64Data = image.imageBase64.substring(prefixEnd + 8)
}
const imageFileName = `img-${counter}.${imageFormat}`
const imagePath = path.join(outputPath, imageFileName)
// Save image file
try {
fs.writeFileSync(imagePath, Buffer.from(imageBase64Data, 'base64'))
// Update image reference in markdown
// Use relative path for better portability
const relativeImagePath = `./${imageFileName}`
// Find the start and end of the image markdown
const imgStart = pageMarkdown.indexOf(image.imageBase64)
if (imgStart >= 0) {
// Find the markdown image syntax around this base64
const mdStart = pageMarkdown.lastIndexOf('![', imgStart)
const mdEnd = pageMarkdown.indexOf(')', imgStart)
if (mdStart >= 0 && mdEnd >= 0) {
// Replace just this specific image reference
pageMarkdown =
pageMarkdown.substring(0, mdStart) +
`![Image ${counter}](${relativeImagePath})` +
pageMarkdown.substring(mdEnd + 1)
}
}
counter++
} catch (error) {
Logger.error(`Failed to save image ${imageFileName}:`, error)
}
}
})
markdownParts.push(pageMarkdown)
})
// Combine all markdown content with double newlines for readability
const combinedMarkdown = markdownParts.join('\n\n')
// Write the markdown content to a file
const mdFileName = `${outputFileName}.md`
const mdFilePath = path.join(outputPath, mdFileName)
fs.writeFileSync(mdFilePath, combinedMarkdown)
return {
id: conversionId,
name: file.name.replace(/\.[^/.]+$/, '.md'),
origin_name: file.origin_name,
path: mdFilePath,
created_at: new Date().toISOString(),
type: FileTypes.DOCUMENT,
ext: '.md',
size: fs.statSync(mdFilePath).size,
count: 1
} as FileMetadata
}
public checkQuota(): Promise<number> {
throw new Error('Method not implemented.')
}
}

View File

@ -0,0 +1,30 @@
import { FileMetadata, PreprocessProvider as Provider } from '@types'
import BasePreprocessProvider from './BasePreprocessProvider'
import PreprocessProviderFactory from './PreprocessProviderFactory'
export default class PreprocessProvider {
private sdk: BasePreprocessProvider
constructor(provider: Provider, userId?: string) {
this.sdk = PreprocessProviderFactory.create(provider, userId)
}
public async parseFile(
sourceId: string,
file: FileMetadata
): Promise<{ processedFile: FileMetadata; quota?: number }> {
return this.sdk.parseFile(sourceId, file)
}
public async checkQuota(): Promise<number> {
return this.sdk.checkQuota()
}
/**
*
* @param file
* @returns null
*/
public async checkIfAlreadyProcessed(file: FileMetadata): Promise<FileMetadata | null> {
return this.sdk.checkIfAlreadyProcessed(file)
}
}

View File

@ -0,0 +1,21 @@
import { PreprocessProvider } from '@types'
import BasePreprocessProvider from './BasePreprocessProvider'
import DefaultPreprocessProvider from './DefaultPreprocessProvider'
import Doc2xPreprocessProvider from './Doc2xPreprocessProvider'
import MineruPreprocessProvider from './MineruPreprocessProvider'
import MistralPreprocessProvider from './MistralPreprocessProvider'
export default class PreprocessProviderFactory {
static create(provider: PreprocessProvider, userId?: string): BasePreprocessProvider {
switch (provider.id) {
case 'doc2x':
return new Doc2xPreprocessProvider(provider)
case 'mistral':
return new MistralPreprocessProvider(provider)
case 'mineru':
return new MineruPreprocessProvider(provider, userId)
default:
return new DefaultPreprocessProvider(provider)
}
}
}

View File

@ -1,19 +1,15 @@
import type { BaseEmbeddings } from '@cherrystudio/embedjs-interfaces'
import { KnowledgeBaseParams } from '@types'
import { ApiClient } from '@types'
import EmbeddingsFactory from './EmbeddingsFactory'
export default class Embeddings {
private sdk: BaseEmbeddings
constructor({ model, provider, apiKey, apiVersion, baseURL, dimensions }: KnowledgeBaseParams) {
constructor({ embedApiClient, dimensions }: { embedApiClient: ApiClient; dimensions?: number }) {
this.sdk = EmbeddingsFactory.create({
model,
provider,
apiKey,
apiVersion,
baseURL,
embedApiClient,
dimensions
} as KnowledgeBaseParams)
})
}
public async init(): Promise<void> {
return this.sdk.init()

View File

@ -3,28 +3,22 @@ import { OllamaEmbeddings } from '@cherrystudio/embedjs-ollama'
import { OpenAiEmbeddings } from '@cherrystudio/embedjs-openai'
import { AzureOpenAiEmbeddings } from '@cherrystudio/embedjs-openai/src/azure-openai-embeddings'
import { getInstanceName } from '@main/utils'
import { KnowledgeBaseParams } from '@types'
import { ApiClient } from '@types'
import { SUPPORTED_DIM_MODELS as VOYAGE_SUPPORTED_DIM_MODELS, VoyageEmbeddings } from './VoyageEmbeddings'
import { VOYAGE_SUPPORTED_DIM_MODELS } from './utils'
import { VoyageEmbeddings } from './VoyageEmbeddings'
export default class EmbeddingsFactory {
static create({ model, provider, apiKey, apiVersion, baseURL, dimensions }: KnowledgeBaseParams): BaseEmbeddings {
static create({ embedApiClient, dimensions }: { embedApiClient: ApiClient; dimensions?: number }): BaseEmbeddings {
const batchSize = 10
const { model, provider, apiKey, apiVersion, baseURL } = embedApiClient
if (provider === 'voyageai') {
if (VOYAGE_SUPPORTED_DIM_MODELS.includes(model)) {
return new VoyageEmbeddings({
modelName: model,
apiKey,
outputDimension: dimensions,
batchSize: 8
})
} else {
return new VoyageEmbeddings({
modelName: model,
apiKey,
batchSize: 8
})
}
return new VoyageEmbeddings({
modelName: model,
apiKey,
outputDimension: VOYAGE_SUPPORTED_DIM_MODELS.includes(model) ? dimensions : undefined,
batchSize: 8
})
}
if (provider === 'ollama') {
if (baseURL.includes('v1/')) {

View File

@ -1,27 +1,29 @@
import { BaseEmbeddings } from '@cherrystudio/embedjs-interfaces'
import { VoyageEmbeddings as _VoyageEmbeddings } from '@langchain/community/embeddings/voyage'
import { VOYAGE_SUPPORTED_DIM_MODELS } from './utils'
/**
*
*/
export const SUPPORTED_DIM_MODELS = ['voyage-3-large', 'voyage-3.5', 'voyage-3.5-lite', 'voyage-code-3']
export class VoyageEmbeddings extends BaseEmbeddings {
private model: _VoyageEmbeddings
constructor(private readonly configuration?: ConstructorParameters<typeof _VoyageEmbeddings>[0]) {
super()
if (!this.configuration) this.configuration = {}
if (!this.configuration.modelName) this.configuration.modelName = 'voyage-3'
if (!SUPPORTED_DIM_MODELS.includes(this.configuration.modelName) && this.configuration.outputDimension) {
throw new Error(`VoyageEmbeddings only supports ${SUPPORTED_DIM_MODELS.join(', ')}`)
if (!this.configuration) {
throw new Error('Pass in a configuration.')
}
if (!this.configuration.modelName) this.configuration.modelName = 'voyage-3'
this.model = new _VoyageEmbeddings(this.configuration)
if (!VOYAGE_SUPPORTED_DIM_MODELS.includes(this.configuration.modelName) && this.configuration.outputDimension) {
console.error(`VoyageEmbeddings only supports ${VOYAGE_SUPPORTED_DIM_MODELS.join(', ')} to set outputDimension.`)
this.model = new _VoyageEmbeddings({ ...this.configuration, outputDimension: undefined })
} else {
this.model = new _VoyageEmbeddings(this.configuration)
}
}
override async getDimensions(): Promise<number> {
if (!this.configuration?.outputDimension) {
throw new Error('You need to pass in the optional dimensions parameter for this model')
}
return this.configuration?.outputDimension
return this.configuration?.outputDimension ?? (this.configuration?.modelName === 'voyage-code-2' ? 1536 : 1024)
}
override async embedDocuments(texts: string[]): Promise<number[][]> {

View File

@ -0,0 +1,45 @@
export const VOYAGE_SUPPORTED_DIM_MODELS = ['voyage-3-large', 'voyage-3.5', 'voyage-3.5-lite', 'voyage-code-3']
// NOTE: 下面的暂时没用上,但先留着吧
export const OPENAI_SUPPORTED_DIM_MODELS = ['text-embedding-3-small', 'text-embedding-3-large']
export const DASHSCOPE_SUPPORTED_DIM_MODELS = ['text-embedding-v3', 'text-embedding-v4']
export const OPENSOURCE_SUPPORTED_DIM_MODELS = ['qwen3-embedding-0.6B', 'qwen3-embedding-4B', 'qwen3-embedding-8B']
export const GOOGLE_SUPPORTED_DIM_MODELS = ['gemini-embedding-exp-03-07', 'gemini-embedding-exp']
export const SUPPORTED_DIM_MODELS = [
...VOYAGE_SUPPORTED_DIM_MODELS,
...OPENAI_SUPPORTED_DIM_MODELS,
...DASHSCOPE_SUPPORTED_DIM_MODELS,
...OPENSOURCE_SUPPORTED_DIM_MODELS,
...GOOGLE_SUPPORTED_DIM_MODELS
]
/**
* ID
*
* - 'deepseek/deepseek-r1' => 'deepseek-r1'
* - 'deepseek-ai/deepseek/deepseek-r1' => 'deepseek-r1'
* @param {string} id ID
* @param {string} [delimiter='/'] '/'
* @returns {string}
*/
export const getBaseModelName = (id: string, delimiter: string = '/'): string => {
const parts = id.split(delimiter)
return parts[parts.length - 1]
}
/**
* ID
*
* - 'deepseek/DeepSeek-R1' => 'deepseek-r1'
* - 'deepseek-ai/deepseek/DeepSeek-R1' => 'deepseek-r1'
* @param {string} id ID
* @param {string} [delimiter='/'] '/'
* @returns {string}
*/
export const getLowerBaseModelName = (id: string, delimiter: string = '/'): string => {
return getBaseModelName(id, delimiter).toLowerCase()
}

View File

@ -1,10 +1,9 @@
import * as fs from 'node:fs'
import { JsonLoader, LocalPathLoader, RAGApplication, TextLoader } from '@cherrystudio/embedjs'
import type { AddLoaderReturn } from '@cherrystudio/embedjs-interfaces'
import { WebLoader } from '@cherrystudio/embedjs-loader-web'
import { readTextFileWithAutoEncoding } from '@main/utils/file'
import { LoaderReturn } from '@shared/config/types'
import { FileType, KnowledgeBaseParams } from '@types'
import { FileMetadata, KnowledgeBaseParams } from '@types'
import Logger from 'electron-log'
import { DraftsExportLoader } from './draftsExportLoader'
@ -39,7 +38,7 @@ const FILE_LOADER_MAP: Record<string, string> = {
export async function addOdLoader(
ragApplication: RAGApplication,
file: FileType,
file: FileMetadata,
base: KnowledgeBaseParams,
forceReload: boolean
): Promise<AddLoaderReturn> {
@ -65,7 +64,7 @@ export async function addOdLoader(
export async function addFileLoader(
ragApplication: RAGApplication,
file: FileType,
file: FileMetadata,
base: KnowledgeBaseParams,
forceReload: boolean
): Promise<LoaderReturn> {
@ -115,7 +114,7 @@ export async function addFileLoader(
// HTML类型处理
loaderReturn = await ragApplication.addLoader(
new WebLoader({
urlOrContent: fs.readFileSync(file.path, 'utf-8'),
urlOrContent: await readTextFileWithAutoEncoding(file.path),
chunkSize: base.chunkSize,
chunkOverlap: base.chunkOverlap
}) as any,
@ -125,7 +124,7 @@ export async function addFileLoader(
case 'json':
try {
jsonObject = JSON.parse(fs.readFileSync(file.path, 'utf-8'))
jsonObject = JSON.parse(await readTextFileWithAutoEncoding(file.path))
} catch (error) {
jsonParsed = false
Logger.warn('[KnowledgeBase] failed parsing json file, falling back to text processing:', file.path, error)
@ -141,7 +140,7 @@ export async function addFileLoader(
// 如果是其他文本类型且尚未读取文件,则读取文件
loaderReturn = await ragApplication.addLoader(
new TextLoader({
text: fs.readFileSync(file.path, 'utf-8'),
text: await readTextFileWithAutoEncoding(file.path),
chunkSize: base.chunkSize,
chunkOverlap: base.chunkOverlap
}) as any,

View File

@ -5,7 +5,7 @@ export default abstract class BaseReranker {
protected base: KnowledgeBaseParams
constructor(base: KnowledgeBaseParams) {
if (!base.rerankModel) {
if (!base.rerankApiClient) {
throw new Error('Rerank model is required')
}
this.base = base
@ -17,11 +17,11 @@ export default abstract class BaseReranker {
* Get Rerank Request Url
*/
protected getRerankUrl() {
if (this.base.rerankModelProvider === 'bailian') {
if (this.base.rerankApiClient?.provider === 'bailian') {
return 'https://dashscope.aliyuncs.com/api/v1/services/rerank/text-rerank/text-rerank'
}
let baseURL = this.base.rerankBaseURL
let baseURL = this.base.rerankApiClient?.baseURL
if (baseURL && baseURL.endsWith('/')) {
// `/` 结尾强制使用rerankBaseURL
@ -39,20 +39,20 @@ export default abstract class BaseReranker {
* Get Rerank Request Body
*/
protected getRerankRequestBody(query: string, searchResults: ExtractChunkData[]) {
const provider = this.base.rerankModelProvider
const provider = this.base.rerankApiClient?.provider
const documents = searchResults.map((doc) => doc.pageContent)
const topN = this.base.documentCount
if (provider === 'voyageai') {
return {
model: this.base.rerankModel,
model: this.base.rerankApiClient?.model,
query,
documents,
top_k: topN
}
} else if (provider === 'bailian') {
return {
model: this.base.rerankModel,
model: this.base.rerankApiClient?.model,
input: {
query,
documents
@ -69,7 +69,7 @@ export default abstract class BaseReranker {
}
} else {
return {
model: this.base.rerankModel,
model: this.base.rerankApiClient?.model,
query,
documents,
top_n: topN
@ -81,7 +81,7 @@ export default abstract class BaseReranker {
* Extract Rerank Result
*/
protected extractRerankResult(data: any) {
const provider = this.base.rerankModelProvider
const provider = this.base.rerankApiClient?.provider
if (provider === 'bailian') {
return data.output.results
} else if (provider === 'voyageai') {
@ -129,7 +129,7 @@ export default abstract class BaseReranker {
public defaultHeaders() {
return {
Authorization: `Bearer ${this.base.rerankApiKey}`,
Authorization: `Bearer ${this.base.rerankApiClient?.apiKey}`,
'Content-Type': 'application/json'
}
}

View File

@ -1,6 +1,6 @@
import { ExtractChunkData } from '@cherrystudio/embedjs-interfaces'
import AxiosProxy from '@main/services/AxiosProxy'
import { KnowledgeBaseParams } from '@types'
import axios from 'axios'
import BaseReranker from './BaseReranker'
@ -15,7 +15,7 @@ export default class GeneralReranker extends BaseReranker {
const requestBody = this.getRerankRequestBody(query, searchResults)
try {
const { data } = await AxiosProxy.axios.post(url, requestBody, { headers: this.defaultHeaders() })
const { data } = await axios.post(url, requestBody, { headers: this.defaultHeaders() })
const rerankResults = this.extractRerankResult(data)
return this.getRerankResult(searchResults, rerankResults)

View File

@ -0,0 +1,81 @@
import { isDev, isLinux, isMac, isWin } from '@main/constant'
import { app } from 'electron'
import log from 'electron-log'
import fs from 'fs'
import os from 'os'
import path from 'path'
export class AppService {
private static instance: AppService
private constructor() {
// Private constructor to prevent direct instantiation
}
public static getInstance(): AppService {
if (!AppService.instance) {
AppService.instance = new AppService()
}
return AppService.instance
}
public async setAppLaunchOnBoot(isLaunchOnBoot: boolean): Promise<void> {
// Set login item settings for windows and mac
// linux is not supported because it requires more file operations
if (isWin || isMac) {
app.setLoginItemSettings({ openAtLogin: isLaunchOnBoot })
} else if (isLinux) {
try {
const autostartDir = path.join(os.homedir(), '.config', 'autostart')
const desktopFile = path.join(autostartDir, isDev ? 'cherry-studio-dev.desktop' : 'cherry-studio.desktop')
if (isLaunchOnBoot) {
// Ensure autostart directory exists
try {
await fs.promises.access(autostartDir)
} catch {
await fs.promises.mkdir(autostartDir, { recursive: true })
}
// Get executable path
let executablePath = app.getPath('exe')
if (process.env.APPIMAGE) {
// For AppImage packaged apps, use APPIMAGE environment variable
executablePath = process.env.APPIMAGE
}
// Create desktop file content
const desktopContent = `[Desktop Entry]
Type=Application
Name=Cherry Studio
Comment=A powerful AI assistant for producer.
Exec=${executablePath}
Icon=cherrystudio
Terminal=false
StartupNotify=false
Categories=Development;Utility;
X-GNOME-Autostart-enabled=true
Hidden=false`
// Write desktop file
await fs.promises.writeFile(desktopFile, desktopContent)
log.info('Created autostart desktop file for Linux')
} else {
// Remove desktop file
try {
await fs.promises.access(desktopFile)
await fs.promises.unlink(desktopFile)
log.info('Removed autostart desktop file for Linux')
} catch {
// File doesn't exist, no need to remove
}
}
} catch (error) {
log.error('Failed to set launch on boot for Linux:', error)
}
}
}
}
// Default export as singleton instance
export default AppService.getInstance()

View File

@ -1,5 +1,6 @@
import { isWin } from '@main/constant'
import { locales } from '@main/utils/locales'
import { generateUserAgent } from '@main/utils/systemInfo'
import { FeedUrl, UpgradeChannel } from '@shared/config/constant'
import { IpcChannel } from '@shared/IpcChannel'
import { CancellationToken, UpdateInfo } from 'builder-util-runtime'
@ -24,6 +25,10 @@ export default class AppUpdater {
autoUpdater.forceDevUpdateConfig = !app.isPackaged
autoUpdater.autoDownload = configManager.getAutoUpdate()
autoUpdater.autoInstallOnAppQuit = configManager.getAutoUpdate()
autoUpdater.requestHeaders = {
...autoUpdater.requestHeaders,
'User-Agent': generateUserAgent()
}
autoUpdater.on('error', (error) => {
// 简单记录错误信息和时间戳

View File

@ -1,29 +0,0 @@
import { AxiosInstance, default as axios_ } from 'axios'
import { ProxyAgent } from 'proxy-agent'
import { proxyManager } from './ProxyManager'
class AxiosProxy {
private cacheAxios: AxiosInstance | null = null
private proxyAgent: ProxyAgent | null = null
get axios(): AxiosInstance {
const currentProxyAgent = proxyManager.getProxyAgent()
// 如果代理发生变化或尚未初始化,则重新创建 axios 实例
if (this.cacheAxios === null || (currentProxyAgent !== null && this.proxyAgent !== currentProxyAgent)) {
this.proxyAgent = currentProxyAgent
// 创建带有代理配置的 axios 实例
this.cacheAxios = axios_.create({
proxy: false,
httpAgent: currentProxyAgent || undefined,
httpsAgent: currentProxyAgent || undefined
})
}
return this.cacheAxios
}
}
export default new AxiosProxy()

View File

@ -11,7 +11,7 @@ import * as path from 'path'
import { CreateDirectoryOptions, FileStat } from 'webdav'
import { getDataPath } from '../utils'
import S3Storage from './RemoteStorage'
import S3Storage from './S3Storage'
import WebDav from './WebDav'
import { windowService } from './WindowService'
@ -27,6 +27,11 @@ class BackupManager {
this.restoreFromWebdav = this.restoreFromWebdav.bind(this)
this.listWebdavFiles = this.listWebdavFiles.bind(this)
this.deleteWebdavFile = this.deleteWebdavFile.bind(this)
this.listLocalBackupFiles = this.listLocalBackupFiles.bind(this)
this.deleteLocalBackupFile = this.deleteLocalBackupFile.bind(this)
this.backupToLocalDir = this.backupToLocalDir.bind(this)
this.restoreFromLocalBackup = this.restoreFromLocalBackup.bind(this)
this.setLocalBackupDir = this.setLocalBackupDir.bind(this)
this.backupToS3 = this.backupToS3.bind(this)
this.restoreFromS3 = this.restoreFromS3.bind(this)
this.listS3Files = this.listS3Files.bind(this)
@ -316,14 +321,22 @@ class BackupManager {
async backupToWebdav(_: Electron.IpcMainInvokeEvent, data: string, webdavConfig: WebDavConfig) {
const filename = webdavConfig.fileName || 'cherry-studio.backup.zip'
const backupedFilePath = await this.backup(_, filename, data, undefined, webdavConfig.skipBackupFile)
const contentLength = (await fs.stat(backupedFilePath)).size
const webdavClient = new WebDav(webdavConfig)
try {
const result = await webdavClient.putFileContents(filename, fs.createReadStream(backupedFilePath), {
overwrite: true,
contentLength
})
// 上传成功后删除本地备份文件
let result
if (webdavConfig.disableStream) {
const fileContent = await fs.readFile(backupedFilePath)
result = await webdavClient.putFileContents(filename, fileContent, {
overwrite: true
})
} else {
const contentLength = (await fs.stat(backupedFilePath)).size
result = await webdavClient.putFileContents(filename, fs.createReadStream(backupedFilePath), {
overwrite: true,
contentLength
})
}
await fs.remove(backupedFilePath)
return result
} catch (error) {
@ -477,8 +490,29 @@ class BackupManager {
}
}
async backupToLocalDir(
_: Electron.IpcMainInvokeEvent,
data: string,
fileName: string,
localConfig: {
localBackupDir: string
skipBackupFile: boolean
}
) {
try {
const backupDir = localConfig.localBackupDir
// Create backup directory if it doesn't exist
await fs.ensureDir(backupDir)
const backupedFilePath = await this.backup(_, fileName, data, backupDir, localConfig.skipBackupFile)
return backupedFilePath
} catch (error) {
Logger.error('[BackupManager] Local backup failed:', error)
throw error
}
}
async backupToS3(_: Electron.IpcMainInvokeEvent, data: string, s3Config: S3Config) {
// 获取设备名
const os = require('os')
const deviceName = os.hostname ? os.hostname() : 'device'
const timestamp = new Date()
@ -487,18 +521,10 @@ class BackupManager {
.slice(0, 14)
const filename = s3Config.fileName || `cherry-studio.backup.${deviceName}.${timestamp}.zip`
// 不记录详细日志,只记录开始和结束
Logger.log(`[BackupManager] Starting S3 backup to ${filename}`)
const backupedFilePath = await this.backup(_, filename, data, undefined, s3Config.skipBackupFile)
const s3Client = new S3Storage('s3', {
endpoint: s3Config.endpoint,
region: s3Config.region,
bucket: s3Config.bucket,
access_key_id: s3Config.access_key_id,
secret_access_key: s3Config.secret_access_key,
root: s3Config.root || ''
})
const s3Client = new S3Storage(s3Config)
try {
const fileBuffer = await fs.promises.readFile(backupedFilePath)
const result = await s3Client.putFileContents(filename, fileBuffer)
@ -513,20 +539,81 @@ class BackupManager {
}
}
async restoreFromLocalBackup(_: Electron.IpcMainInvokeEvent, fileName: string, localBackupDir: string) {
try {
const backupDir = localBackupDir
const backupPath = path.join(backupDir, fileName)
if (!fs.existsSync(backupPath)) {
throw new Error(`Backup file not found: ${backupPath}`)
}
return await this.restore(_, backupPath)
} catch (error) {
Logger.error('[BackupManager] Local restore failed:', error)
throw error
}
}
async listLocalBackupFiles(_: Electron.IpcMainInvokeEvent, localBackupDir: string) {
try {
const files = await fs.readdir(localBackupDir)
const result: Array<{ fileName: string; modifiedTime: string; size: number }> = []
for (const file of files) {
const filePath = path.join(localBackupDir, file)
const stat = await fs.stat(filePath)
if (stat.isFile() && file.endsWith('.zip')) {
result.push({
fileName: file,
modifiedTime: stat.mtime.toISOString(),
size: stat.size
})
}
}
// Sort by modified time, newest first
return result.sort((a, b) => new Date(b.modifiedTime).getTime() - new Date(a.modifiedTime).getTime())
} catch (error) {
Logger.error('[BackupManager] List local backup files failed:', error)
throw error
}
}
async deleteLocalBackupFile(_: Electron.IpcMainInvokeEvent, fileName: string, localBackupDir: string) {
try {
const filePath = path.join(localBackupDir, fileName)
if (!fs.existsSync(filePath)) {
throw new Error(`Backup file not found: ${filePath}`)
}
await fs.remove(filePath)
return true
} catch (error) {
Logger.error('[BackupManager] Delete local backup file failed:', error)
throw error
}
}
async setLocalBackupDir(_: Electron.IpcMainInvokeEvent, dirPath: string) {
try {
// Check if directory exists
await fs.ensureDir(dirPath)
return true
} catch (error) {
Logger.error('[BackupManager] Set local backup directory failed:', error)
throw error
}
}
async restoreFromS3(_: Electron.IpcMainInvokeEvent, s3Config: S3Config) {
const filename = s3Config.fileName || 'cherry-studio.backup.zip'
// 只记录开始和结束或错误
Logger.log(`[BackupManager] Starting restore from S3: ${filename}`)
const s3Client = new S3Storage('s3', {
endpoint: s3Config.endpoint,
region: s3Config.region,
bucket: s3Config.bucket,
access_key_id: s3Config.access_key_id,
secret_access_key: s3Config.secret_access_key,
root: s3Config.root || ''
})
const s3Client = new S3Storage(s3Config)
try {
const retrievedFile = await s3Client.getFileContents(filename)
const backupedFilePath = path.join(this.backupDir, filename)
@ -551,31 +638,21 @@ class BackupManager {
listS3Files = async (_: Electron.IpcMainInvokeEvent, s3Config: S3Config) => {
try {
const s3Client = new S3Storage('s3', {
endpoint: s3Config.endpoint,
region: s3Config.region,
bucket: s3Config.bucket,
access_key_id: s3Config.access_key_id,
secret_access_key: s3Config.secret_access_key,
root: s3Config.root || ''
})
const entries = await s3Client.instance?.list('/')
const files: Array<{ fileName: string; modifiedTime: string; size: number }> = []
if (entries) {
for await (const entry of entries) {
const path = entry.path()
if (path.endsWith('.zip')) {
const meta = await s3Client.instance!.stat(path)
if (meta.isFile()) {
files.push({
fileName: path.replace(/^\/+/, ''),
modifiedTime: meta.lastModified || '',
size: Number(meta.contentLength || 0n)
})
}
const s3Client = new S3Storage(s3Config)
const objects = await s3Client.listFiles()
const files = objects
.filter((obj) => obj.key.endsWith('.zip'))
.map((obj) => {
const segments = obj.key.split('/')
const fileName = segments[segments.length - 1]
return {
fileName,
modifiedTime: obj.lastModified || '',
size: obj.size
}
}
}
})
return files.sort((a, b) => new Date(b.modifiedTime).getTime() - new Date(a.modifiedTime).getTime())
} catch (error: any) {
Logger.error('Failed to list S3 files:', error)
@ -585,14 +662,7 @@ class BackupManager {
async deleteS3File(_: Electron.IpcMainInvokeEvent, fileName: string, s3Config: S3Config) {
try {
const s3Client = new S3Storage('s3', {
endpoint: s3Config.endpoint,
region: s3Config.region,
bucket: s3Config.bucket,
access_key_id: s3Config.access_key_id,
secret_access_key: s3Config.secret_access_key,
root: s3Config.root || ''
})
const s3Client = new S3Storage(s3Config)
return await s3Client.deleteFile(fileName)
} catch (error: any) {
Logger.error('Failed to delete S3 file:', error)
@ -601,14 +671,7 @@ class BackupManager {
}
async checkS3Connection(_: Electron.IpcMainInvokeEvent, s3Config: S3Config) {
const s3Client = new S3Storage('s3', {
endpoint: s3Config.endpoint,
region: s3Config.region,
bucket: s3Config.bucket,
access_key_id: s3Config.access_key_id,
secret_access_key: s3Config.secret_access_key,
root: s3Config.root || ''
})
const s3Client = new S3Storage(s3Config)
return await s3Client.checkConnection()
}
}

View File

@ -24,7 +24,9 @@ export enum ConfigKeys {
SelectionAssistantFollowToolbar = 'selectionAssistantFollowToolbar',
SelectionAssistantRemeberWinSize = 'selectionAssistantRemeberWinSize',
SelectionAssistantFilterMode = 'selectionAssistantFilterMode',
SelectionAssistantFilterList = 'selectionAssistantFilterList'
SelectionAssistantFilterList = 'selectionAssistantFilterList',
DisableHardwareAcceleration = 'disableHardwareAcceleration',
Proxy = 'proxy'
}
export class ConfigManager {
@ -218,6 +220,14 @@ export class ConfigManager {
this.setAndNotify(ConfigKeys.SelectionAssistantFilterList, value)
}
getDisableHardwareAcceleration(): boolean {
return this.get<boolean>(ConfigKeys.DisableHardwareAcceleration, false)
}
setDisableHardwareAcceleration(value: boolean) {
this.set(ConfigKeys.DisableHardwareAcceleration, value)
}
setAndNotify(key: string, value: unknown) {
this.set(key, value, true)
}

View File

@ -1,11 +1,10 @@
import { AxiosRequestConfig } from 'axios'
import axios from 'axios'
import { app, safeStorage } from 'electron'
import Logger from 'electron-log'
import fs from 'fs/promises'
import path from 'path'
import aoxisProxy from './AxiosProxy'
// 配置常量,集中管理
const CONFIG = {
GITHUB_CLIENT_ID: 'Iv1.b507a08c87ecfe98',
@ -96,7 +95,7 @@ class CopilotService {
}
}
const response = await aoxisProxy.axios.get(CONFIG.API_URLS.GITHUB_USER, config)
const response = await axios.get(CONFIG.API_URLS.GITHUB_USER, config)
return {
login: response.data.login,
avatar: response.data.avatar_url
@ -117,7 +116,7 @@ class CopilotService {
try {
this.updateHeaders(headers)
const response = await aoxisProxy.axios.post<AuthResponse>(
const response = await axios.post<AuthResponse>(
CONFIG.API_URLS.GITHUB_DEVICE_CODE,
{
client_id: CONFIG.GITHUB_CLIENT_ID,
@ -149,7 +148,7 @@ class CopilotService {
await this.delay(currentDelay)
try {
const response = await aoxisProxy.axios.post<TokenResponse>(
const response = await axios.post<TokenResponse>(
CONFIG.API_URLS.GITHUB_ACCESS_TOKEN,
{
client_id: CONFIG.GITHUB_CLIENT_ID,
@ -211,7 +210,7 @@ class CopilotService {
}
}
const response = await aoxisProxy.axios.get<CopilotTokenResponse>(CONFIG.API_URLS.COPILOT_TOKEN, config)
const response = await axios.get<CopilotTokenResponse>(CONFIG.API_URLS.COPILOT_TOKEN, config)
return response.data
} catch (error) {

View File

@ -0,0 +1,396 @@
import { getMcpDir, getTempDir } from '@main/utils/file'
import logger from 'electron-log'
import * as fs from 'fs'
import StreamZip from 'node-stream-zip'
import * as os from 'os'
import * as path from 'path'
import { v4 as uuidv4 } from 'uuid'
// Type definitions
export interface DxtManifest {
dxt_version: string
name: string
display_name?: string
version: string
description?: string
long_description?: string
author?: {
name?: string
email?: string
url?: string
}
repository?: {
type?: string
url?: string
}
homepage?: string
documentation?: string
support?: string
icon?: string
server: {
type: string
entry_point: string
mcp_config: {
command: string
args: string[]
env?: Record<string, string>
platform_overrides?: {
[platform: string]: {
command?: string
args?: string[]
env?: Record<string, string>
}
}
}
}
tools?: Array<{
name: string
description: string
}>
keywords?: string[]
license?: string
user_config?: Record<string, any>
compatibility?: {
claude_desktop?: string
platforms?: string[]
runtimes?: Record<string, string>
}
}
export interface DxtUploadResult {
success: boolean
data?: {
manifest: DxtManifest
extractDir: string
}
error?: string
}
export function performVariableSubstitution(
value: string,
extractDir: string,
userConfig?: Record<string, any>
): string {
let result = value
// Replace ${__dirname} with the extraction directory
result = result.replace(/\$\{__dirname\}/g, extractDir)
// Replace ${HOME} with user's home directory
result = result.replace(/\$\{HOME\}/g, os.homedir())
// Replace ${DESKTOP} with user's desktop directory
const desktopDir = path.join(os.homedir(), 'Desktop')
result = result.replace(/\$\{DESKTOP\}/g, desktopDir)
// Replace ${DOCUMENTS} with user's documents directory
const documentsDir = path.join(os.homedir(), 'Documents')
result = result.replace(/\$\{DOCUMENTS\}/g, documentsDir)
// Replace ${DOWNLOADS} with user's downloads directory
const downloadsDir = path.join(os.homedir(), 'Downloads')
result = result.replace(/\$\{DOWNLOADS\}/g, downloadsDir)
// Replace ${pathSeparator} or ${/} with the platform-specific path separator
result = result.replace(/\$\{pathSeparator\}/g, path.sep)
result = result.replace(/\$\{\/\}/g, path.sep)
// Replace ${user_config.KEY} with user-configured values
if (userConfig) {
result = result.replace(/\$\{user_config\.([^}]+)\}/g, (match, key) => {
return userConfig[key] || match // Keep original if not found
})
}
return result
}
export function applyPlatformOverrides(mcpConfig: any, extractDir: string, userConfig?: Record<string, any>): any {
const platform = process.platform
const resolvedConfig = { ...mcpConfig }
// Apply platform-specific overrides
if (mcpConfig.platform_overrides && mcpConfig.platform_overrides[platform]) {
const override = mcpConfig.platform_overrides[platform]
// Override command if specified
if (override.command) {
resolvedConfig.command = override.command
}
// Override args if specified
if (override.args) {
resolvedConfig.args = override.args
}
// Merge environment variables
if (override.env) {
resolvedConfig.env = { ...resolvedConfig.env, ...override.env }
}
}
// Apply variable substitution to all string values
if (resolvedConfig.command) {
resolvedConfig.command = performVariableSubstitution(resolvedConfig.command, extractDir, userConfig)
}
if (resolvedConfig.args) {
resolvedConfig.args = resolvedConfig.args.map((arg: string) =>
performVariableSubstitution(arg, extractDir, userConfig)
)
}
if (resolvedConfig.env) {
for (const [key, value] of Object.entries(resolvedConfig.env)) {
resolvedConfig.env[key] = performVariableSubstitution(value as string, extractDir, userConfig)
}
}
return resolvedConfig
}
export interface ResolvedMcpConfig {
command: string
args: string[]
env?: Record<string, string>
}
class DxtService {
private tempDir = path.join(getTempDir(), 'dxt_uploads')
private mcpDir = getMcpDir()
constructor() {
this.ensureDirectories()
}
private ensureDirectories() {
try {
// Create temp directory
if (!fs.existsSync(this.tempDir)) {
fs.mkdirSync(this.tempDir, { recursive: true })
}
// Create MCP directory
if (!fs.existsSync(this.mcpDir)) {
fs.mkdirSync(this.mcpDir, { recursive: true })
}
} catch (error) {
logger.error('[DxtService] Failed to create directories:', error)
}
}
private async moveDirectory(source: string, destination: string): Promise<void> {
try {
// Try rename first (works if on same filesystem)
fs.renameSync(source, destination)
} catch (error) {
// If rename fails (cross-filesystem), use copy + remove
logger.info('[DxtService] Cross-filesystem move detected, using copy + remove')
// Ensure parent directory exists
const parentDir = path.dirname(destination)
if (!fs.existsSync(parentDir)) {
fs.mkdirSync(parentDir, { recursive: true })
}
// Recursively copy directory
await this.copyDirectory(source, destination)
// Remove source directory
fs.rmSync(source, { recursive: true, force: true })
}
}
private async copyDirectory(source: string, destination: string): Promise<void> {
// Create destination directory
fs.mkdirSync(destination, { recursive: true })
// Read source directory
const entries = fs.readdirSync(source, { withFileTypes: true })
// Copy each entry
for (const entry of entries) {
const sourcePath = path.join(source, entry.name)
const destPath = path.join(destination, entry.name)
if (entry.isDirectory()) {
await this.copyDirectory(sourcePath, destPath)
} else {
fs.copyFileSync(sourcePath, destPath)
}
}
}
public async uploadDxt(_: Electron.IpcMainInvokeEvent, filePath: string): Promise<DxtUploadResult> {
const tempExtractDir = path.join(this.tempDir, `dxt_${uuidv4()}`)
try {
// Validate file exists
if (!fs.existsSync(filePath)) {
throw new Error('DXT file not found')
}
// Extract the DXT file (which is a ZIP archive) to a temporary directory
logger.info('[DxtService] Extracting DXT file:', filePath)
const zip = new StreamZip.async({ file: filePath })
await zip.extract(null, tempExtractDir)
await zip.close()
// Read and validate the manifest.json
const manifestPath = path.join(tempExtractDir, 'manifest.json')
if (!fs.existsSync(manifestPath)) {
throw new Error('manifest.json not found in DXT file')
}
const manifestContent = fs.readFileSync(manifestPath, 'utf-8')
const manifest: DxtManifest = JSON.parse(manifestContent)
// Validate required fields in manifest
if (!manifest.dxt_version) {
throw new Error('Invalid manifest: missing dxt_version')
}
if (!manifest.name) {
throw new Error('Invalid manifest: missing name')
}
if (!manifest.version) {
throw new Error('Invalid manifest: missing version')
}
if (!manifest.server) {
throw new Error('Invalid manifest: missing server configuration')
}
if (!manifest.server.mcp_config) {
throw new Error('Invalid manifest: missing server.mcp_config')
}
if (!manifest.server.mcp_config.command) {
throw new Error('Invalid manifest: missing server.mcp_config.command')
}
if (!Array.isArray(manifest.server.mcp_config.args)) {
throw new Error('Invalid manifest: server.mcp_config.args must be an array')
}
// Use server name as the final extract directory for automatic version management
// Sanitize the name to prevent creating subdirectories
const sanitizedName = manifest.name.replace(/\//g, '-')
const serverDirName = `server-${sanitizedName}`
const finalExtractDir = path.join(this.mcpDir, serverDirName)
// Clean up any existing version of this server
if (fs.existsSync(finalExtractDir)) {
logger.info('[DxtService] Removing existing server directory:', finalExtractDir)
fs.rmSync(finalExtractDir, { recursive: true, force: true })
}
// Move the temporary directory to the final location
// Use recursive copy + remove instead of rename to handle cross-filesystem moves
await this.moveDirectory(tempExtractDir, finalExtractDir)
logger.info('[DxtService] DXT server extracted to:', finalExtractDir)
// Clean up the uploaded DXT file if it's in temp directory
if (filePath.startsWith(this.tempDir)) {
fs.unlinkSync(filePath)
}
// Return success with manifest and extraction path
return {
success: true,
data: {
manifest,
extractDir: finalExtractDir
}
}
} catch (error) {
// Clean up on error
if (fs.existsSync(tempExtractDir)) {
fs.rmSync(tempExtractDir, { recursive: true, force: true })
}
const errorMessage = error instanceof Error ? error.message : 'Failed to process DXT file'
logger.error('[DxtService] DXT upload error:', error)
return {
success: false,
error: errorMessage
}
}
}
/**
* Get resolved MCP configuration for a DXT server with platform overrides and variable substitution
*/
public getResolvedMcpConfig(dxtPath: string, userConfig?: Record<string, any>): ResolvedMcpConfig | null {
try {
// Read the manifest from the DXT server directory
const manifestPath = path.join(dxtPath, 'manifest.json')
if (!fs.existsSync(manifestPath)) {
logger.error('[DxtService] Manifest not found:', manifestPath)
return null
}
const manifestContent = fs.readFileSync(manifestPath, 'utf-8')
const manifest: DxtManifest = JSON.parse(manifestContent)
if (!manifest.server?.mcp_config) {
logger.error('[DxtService] No mcp_config found in manifest')
return null
}
// Apply platform overrides and variable substitution
const resolvedConfig = applyPlatformOverrides(manifest.server.mcp_config, dxtPath, userConfig)
logger.info('[DxtService] Resolved MCP config:', {
command: resolvedConfig.command,
args: resolvedConfig.args,
env: resolvedConfig.env ? Object.keys(resolvedConfig.env) : undefined
})
return resolvedConfig
} catch (error) {
logger.error('[DxtService] Failed to resolve MCP config:', error)
return null
}
}
public cleanupDxtServer(serverName: string): boolean {
try {
// Handle server names that might contain slashes (e.g., "anthropic/sequential-thinking")
// by replacing slashes with the same separator used during installation
const sanitizedName = serverName.replace(/\//g, '-')
const serverDirName = `server-${sanitizedName}`
const serverDir = path.join(this.mcpDir, serverDirName)
// First try the sanitized path
if (fs.existsSync(serverDir)) {
logger.info('[DxtService] Removing DXT server directory:', serverDir)
fs.rmSync(serverDir, { recursive: true, force: true })
return true
}
// Fallback: try with original name in case it was stored differently
const originalServerDir = path.join(this.mcpDir, `server-${serverName}`)
if (fs.existsSync(originalServerDir)) {
logger.info('[DxtService] Removing DXT server directory:', originalServerDir)
fs.rmSync(originalServerDir, { recursive: true, force: true })
return true
}
logger.warn('[DxtService] Server directory not found:', serverDir)
return false
} catch (error) {
logger.error('[DxtService] Failed to cleanup DXT server:', error)
return false
}
}
public cleanup() {
try {
// Clean up temp directory
if (fs.existsSync(this.tempDir)) {
fs.rmSync(this.tempDir, { recursive: true, force: true })
}
} catch (error) {
logger.error('[DxtService] Cleanup error:', error)
}
}
}
export default DxtService

View File

@ -1,6 +1,6 @@
import { getFilesDir, getFileType, getTempDir } from '@main/utils/file'
import { getFilesDir, getFileType, getTempDir, readTextFileWithAutoEncoding } from '@main/utils/file'
import { documentExts, imageExts, MB } from '@shared/config/constant'
import { FileType } from '@types'
import { FileMetadata } from '@types'
import * as crypto from 'crypto'
import {
dialog,
@ -53,8 +53,9 @@ class FileStorage {
})
}
findDuplicateFile = async (filePath: string): Promise<FileType | null> => {
findDuplicateFile = async (filePath: string): Promise<FileMetadata | null> => {
const stats = fs.statSync(filePath)
console.log('stats', stats, filePath)
const fileSize = stats.size
const files = await fs.promises.readdir(this.storageDir)
@ -92,7 +93,7 @@ class FileStorage {
public selectFile = async (
_: Electron.IpcMainInvokeEvent,
options?: OpenDialogOptions
): Promise<FileType[] | null> => {
): Promise<FileMetadata[] | null> => {
const defaultOptions: OpenDialogOptions = {
properties: ['openFile']
}
@ -151,7 +152,7 @@ class FileStorage {
}
}
public uploadFile = async (_: Electron.IpcMainInvokeEvent, file: FileType): Promise<FileType> => {
public uploadFile = async (_: Electron.IpcMainInvokeEvent, file: FileMetadata): Promise<FileMetadata> => {
const duplicateFile = await this.findDuplicateFile(file.path)
if (duplicateFile) {
@ -175,7 +176,7 @@ class FileStorage {
const stats = await fs.promises.stat(destPath)
const fileType = getFileType(ext)
const fileMetadata: FileType = {
const fileMetadata: FileMetadata = {
id: uuid,
origin_name,
name: uuid + ext,
@ -187,10 +188,12 @@ class FileStorage {
count: 1
}
logger.info('[FileStorage] File uploaded:', fileMetadata)
return fileMetadata
}
public getFile = async (_: Electron.IpcMainInvokeEvent, filePath: string): Promise<FileType | null> => {
public getFile = async (_: Electron.IpcMainInvokeEvent, filePath: string): Promise<FileMetadata | null> => {
if (!fs.existsSync(filePath)) {
return null
}
@ -199,7 +202,7 @@ class FileStorage {
const ext = path.extname(filePath)
const fileType = getFileType(ext)
const fileInfo: FileType = {
const fileInfo: FileMetadata = {
id: uuidv4(),
origin_name: path.basename(filePath),
name: path.basename(filePath),
@ -215,10 +218,24 @@ class FileStorage {
}
public deleteFile = async (_: Electron.IpcMainInvokeEvent, id: string): Promise<void> => {
if (!fs.existsSync(path.join(this.storageDir, id))) {
return
}
await fs.promises.unlink(path.join(this.storageDir, id))
}
public readFile = async (_: Electron.IpcMainInvokeEvent, id: string): Promise<string> => {
public deleteDir = async (_: Electron.IpcMainInvokeEvent, id: string): Promise<void> => {
if (!fs.existsSync(path.join(this.storageDir, id))) {
return
}
await fs.promises.rm(path.join(this.storageDir, id), { recursive: true })
}
public readFile = async (
_: Electron.IpcMainInvokeEvent,
id: string,
detectEncoding: boolean = false
): Promise<string> => {
const filePath = path.join(this.storageDir, id)
const fileExtension = path.extname(filePath)
@ -245,15 +262,24 @@ class FileStorage {
}
}
return fs.readFileSync(filePath, 'utf8')
try {
if (detectEncoding) {
return readTextFileWithAutoEncoding(filePath)
} else {
return fs.readFileSync(filePath, 'utf-8')
}
} catch (error) {
logger.error(error)
throw new Error(`Failed to read file: ${filePath}.`)
}
}
public createTempFile = async (_: Electron.IpcMainInvokeEvent, fileName: string): Promise<string> => {
if (!fs.existsSync(this.tempDir)) {
fs.mkdirSync(this.tempDir, { recursive: true })
}
const tempFilePath = path.join(this.tempDir, `temp_file_${uuidv4()}_${fileName}`)
return tempFilePath
return path.join(this.tempDir, `temp_file_${uuidv4()}_${fileName}`)
}
public writeFile = async (
@ -280,7 +306,7 @@ class FileStorage {
}
}
public saveBase64Image = async (_: Electron.IpcMainInvokeEvent, base64Data: string): Promise<FileType> => {
public saveBase64Image = async (_: Electron.IpcMainInvokeEvent, base64Data: string): Promise<FileMetadata> => {
try {
if (!base64Data) {
throw new Error('Base64 data is required')
@ -306,7 +332,7 @@ class FileStorage {
await fs.promises.writeFile(destPath, buffer)
const fileMetadata: FileType = {
const fileMetadata: FileMetadata = {
id: uuid,
origin_name: uuid + ext,
name: uuid + ext,
@ -398,6 +424,19 @@ class FileStorage {
shell.openPath(path).catch((err) => logger.error('[IPC - Error] Failed to open file:', err))
}
/**
* 使
* @param file
*/
public openFileWithRelativePath = async (_: Electron.IpcMainInvokeEvent, file: FileMetadata): Promise<void> => {
const filePath = path.join(this.storageDir, file.name)
if (fs.existsSync(filePath)) {
shell.openPath(filePath).catch((err) => logger.error('[IPC - Error] Failed to open file:', err))
} else {
logger.warn('[IPC - Warning] File does not exist:', filePath)
}
}
public save = async (
_: Electron.IpcMainInvokeEvent,
fileName: string,
@ -465,7 +504,7 @@ class FileStorage {
_: Electron.IpcMainInvokeEvent,
url: string,
isUseContentType?: boolean
): Promise<FileType> => {
): Promise<FileMetadata> => {
try {
const response = await fetch(url)
if (!response.ok) {
@ -507,7 +546,7 @@ class FileStorage {
const stats = await fs.promises.stat(destPath)
const fileType = getFileType(ext)
const fileMetadata: FileType = {
const fileMetadata: FileMetadata = {
id: uuid,
origin_name: filename,
name: uuid + ext,

View File

@ -21,17 +21,19 @@ import type { ExtractChunkData } from '@cherrystudio/embedjs-interfaces'
import { LibSqlDb } from '@cherrystudio/embedjs-libsql'
import { SitemapLoader } from '@cherrystudio/embedjs-loader-sitemap'
import { WebLoader } from '@cherrystudio/embedjs-loader-web'
import Embeddings from '@main/embeddings/Embeddings'
import { addFileLoader } from '@main/loader'
import { NoteLoader } from '@main/loader/noteLoader'
import Reranker from '@main/reranker/Reranker'
import OcrProvider from '@main/knowledage/ocr/OcrProvider'
import PreprocessProvider from '@main/knowledage/preprocess/PreprocessProvider'
import Embeddings from '@main/knowledge/embeddings/Embeddings'
import { addFileLoader } from '@main/knowledge/loader'
import { NoteLoader } from '@main/knowledge/loader/noteLoader'
import Reranker from '@main/knowledge/reranker/Reranker'
import { windowService } from '@main/services/WindowService'
import { getDataPath } from '@main/utils'
import { getAllFiles } from '@main/utils/file'
import { MB } from '@shared/config/constant'
import type { LoaderReturn } from '@shared/config/types'
import { IpcChannel } from '@shared/IpcChannel'
import { FileType, KnowledgeBaseParams, KnowledgeItem } from '@types'
import { FileMetadata, KnowledgeBaseParams, KnowledgeItem } from '@types'
import Logger from 'electron-log'
import { v4 as uuidv4 } from 'uuid'
@ -39,12 +41,14 @@ export interface KnowledgeBaseAddItemOptions {
base: KnowledgeBaseParams
item: KnowledgeItem
forceReload?: boolean
userId?: string
}
interface KnowledgeBaseAddItemOptionsNonNullableAttribute {
base: KnowledgeBaseParams
item: KnowledgeItem
forceReload: boolean
userId: string
}
interface EvaluateTaskWorkload {
@ -96,7 +100,13 @@ class KnowledgeService {
private knowledgeItemProcessingQueueMappingPromise: Map<LoaderTaskOfSet, () => void> = new Map()
private static MAXIMUM_WORKLOAD = 80 * MB
private static MAXIMUM_PROCESSING_ITEM_COUNT = 30
private static ERROR_LOADER_RETURN: LoaderReturn = { entriesAdded: 0, uniqueId: '', uniqueIds: [''], loaderType: '' }
private static ERROR_LOADER_RETURN: LoaderReturn = {
entriesAdded: 0,
uniqueId: '',
uniqueIds: [''],
loaderType: '',
status: 'failed'
}
constructor() {
this.initStorageDir()
@ -110,27 +120,21 @@ class KnowledgeService {
private getRagApplication = async ({
id,
model,
provider,
apiKey,
apiVersion,
baseURL,
dimensions
embedApiClient,
dimensions,
documentCount
}: KnowledgeBaseParams): Promise<RAGApplication> => {
let ragApplication: RAGApplication
const embeddings = new Embeddings({
model,
provider,
apiKey,
apiVersion,
baseURL,
embedApiClient,
dimensions
} as KnowledgeBaseParams)
})
try {
ragApplication = await new RAGApplicationBuilder()
.setModel('NO_MODEL')
.setEmbeddingModel(embeddings)
.setVectorDatabase(new LibSqlDb({ path: path.join(this.storageDir, id) }))
.setSearchResultCount(documentCount || 30)
.build()
} catch (e) {
Logger.error(e)
@ -150,6 +154,7 @@ class KnowledgeService {
}
public delete = async (_: Electron.IpcMainInvokeEvent, id: string): Promise<void> => {
console.log('id', id)
const dbPath = path.join(this.storageDir, id)
if (fs.existsSync(dbPath)) {
fs.rmSync(dbPath, { recursive: true })
@ -162,28 +167,49 @@ class KnowledgeService {
this.workload >= KnowledgeService.MAXIMUM_WORKLOAD
)
}
private fileTask(
ragApplication: RAGApplication,
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
): LoaderTask {
const { base, item, forceReload } = options
const file = item.content as FileType
const { base, item, forceReload, userId } = options
const file = item.content as FileMetadata
const loaderTask: LoaderTask = {
loaderTasks: [
{
state: LoaderTaskItemState.PENDING,
task: () =>
addFileLoader(ragApplication, file, base, forceReload)
.then((result) => {
loaderTask.loaderDoneReturn = result
return result
})
.catch((err) => {
Logger.error(err)
return KnowledgeService.ERROR_LOADER_RETURN
}),
task: async () => {
try {
// 添加预处理逻辑
const fileToProcess: FileMetadata = await this.preprocessing(file, base, item, userId)
// 使用处理后的文件进行加载
return addFileLoader(ragApplication, fileToProcess, base, forceReload)
.then((result) => {
loaderTask.loaderDoneReturn = result
return result
})
.catch((e) => {
Logger.error(`Error in addFileLoader for ${file.name}: ${e}`)
const errorResult: LoaderReturn = {
...KnowledgeService.ERROR_LOADER_RETURN,
message: e.message,
messageSource: 'embedding'
}
loaderTask.loaderDoneReturn = errorResult
return errorResult
})
} catch (e: any) {
Logger.error(`Preprocessing failed for ${file.name}: ${e}`)
const errorResult: LoaderReturn = {
...KnowledgeService.ERROR_LOADER_RETURN,
message: e.message,
messageSource: 'preprocess'
}
loaderTask.loaderDoneReturn = errorResult
return errorResult
}
},
evaluateTaskWorkload: { workload: file.size }
}
],
@ -192,7 +218,6 @@ class KnowledgeService {
return loaderTask
}
private directoryTask(
ragApplication: RAGApplication,
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
@ -232,7 +257,11 @@ class KnowledgeService {
})
.catch((err) => {
Logger.error(err)
return KnowledgeService.ERROR_LOADER_RETURN
return {
...KnowledgeService.ERROR_LOADER_RETURN,
message: `Failed to add dir loader: ${err.message}`,
messageSource: 'embedding'
}
}),
evaluateTaskWorkload: { workload: file.size }
})
@ -278,7 +307,11 @@ class KnowledgeService {
})
.catch((err) => {
Logger.error(err)
return KnowledgeService.ERROR_LOADER_RETURN
return {
...KnowledgeService.ERROR_LOADER_RETURN,
message: `Failed to add url loader: ${err.message}`,
messageSource: 'embedding'
}
})
},
evaluateTaskWorkload: { workload: 2 * MB }
@ -318,7 +351,11 @@ class KnowledgeService {
})
.catch((err) => {
Logger.error(err)
return KnowledgeService.ERROR_LOADER_RETURN
return {
...KnowledgeService.ERROR_LOADER_RETURN,
message: `Failed to add sitemap loader: ${err.message}`,
messageSource: 'embedding'
}
}),
evaluateTaskWorkload: { workload: 20 * MB }
}
@ -364,7 +401,11 @@ class KnowledgeService {
})
.catch((err) => {
Logger.error(err)
return KnowledgeService.ERROR_LOADER_RETURN
return {
...KnowledgeService.ERROR_LOADER_RETURN,
message: `Failed to add note loader: ${err.message}`,
messageSource: 'embedding'
}
})
},
evaluateTaskWorkload: { workload: contentBytes.length }
@ -430,10 +471,10 @@ class KnowledgeService {
})
}
public add = (_: Electron.IpcMainInvokeEvent, options: KnowledgeBaseAddItemOptions): Promise<LoaderReturn> => {
public add = async (_: Electron.IpcMainInvokeEvent, options: KnowledgeBaseAddItemOptions): Promise<LoaderReturn> => {
return new Promise((resolve) => {
const { base, item, forceReload = false } = options
const optionsNonNullableAttribute = { base, item, forceReload }
const { base, item, forceReload = false, userId = '' } = options
const optionsNonNullableAttribute = { base, item, forceReload, userId }
this.getRagApplication(base)
.then((ragApplication) => {
const task = (() => {
@ -459,12 +500,20 @@ class KnowledgeService {
})
this.processingQueueHandle()
} else {
resolve(KnowledgeService.ERROR_LOADER_RETURN)
resolve({
...KnowledgeService.ERROR_LOADER_RETURN,
message: 'Unsupported item type',
messageSource: 'embedding'
})
}
})
.catch((err) => {
Logger.error(err)
resolve(KnowledgeService.ERROR_LOADER_RETURN)
resolve({
...KnowledgeService.ERROR_LOADER_RETURN,
message: `Failed to add item: ${err.message}`,
messageSource: 'embedding'
})
})
})
}
@ -497,6 +546,69 @@ class KnowledgeService {
}
return await new Reranker(base).rerank(search, results)
}
public getStorageDir = (): string => {
return this.storageDir
}
private preprocessing = async (
file: FileMetadata,
base: KnowledgeBaseParams,
item: KnowledgeItem,
userId: string
): Promise<FileMetadata> => {
let fileToProcess: FileMetadata = file
if (base.preprocessOrOcrProvider && file.ext.toLowerCase() === '.pdf') {
try {
let provider: PreprocessProvider | OcrProvider
if (base.preprocessOrOcrProvider.type === 'preprocess') {
provider = new PreprocessProvider(base.preprocessOrOcrProvider.provider, userId)
} else {
provider = new OcrProvider(base.preprocessOrOcrProvider.provider)
}
// 首先检查文件是否已经被预处理过
const alreadyProcessed = await provider.checkIfAlreadyProcessed(file)
if (alreadyProcessed) {
Logger.info(`File already preprocess processed, using cached result: ${file.path}`)
return alreadyProcessed
}
// 执行预处理
Logger.info(`Starting preprocess processing for scanned PDF: ${file.path}`)
const { processedFile, quota } = await provider.parseFile(item.id, file)
fileToProcess = processedFile
const mainWindow = windowService.getMainWindow()
mainWindow?.webContents.send('file-preprocess-finished', {
itemId: item.id,
quota: quota
})
} catch (err) {
Logger.error(`Preprocess processing failed: ${err}`)
// 如果预处理失败,使用原始文件
// fileToProcess = file
throw new Error(`Preprocess processing failed: ${err}`)
}
}
return fileToProcess
}
public checkQuota = async (
_: Electron.IpcMainInvokeEvent,
base: KnowledgeBaseParams,
userId: string
): Promise<number> => {
try {
if (base.preprocessOrOcrProvider && base.preprocessOrOcrProvider.type === 'preprocess') {
const provider = new PreprocessProvider(base.preprocessOrOcrProvider.provider, userId)
return await provider.checkQuota()
}
throw new Error('No preprocess provider configured')
} catch (err) {
Logger.error(`Failed to check quota: ${err}`)
throw new Error(`Failed to check quota: ${err}`)
}
}
}
export default new KnowledgeService()

View File

@ -14,6 +14,16 @@ import {
type StreamableHTTPClientTransportOptions
} from '@modelcontextprotocol/sdk/client/streamableHttp'
import { InMemoryTransport } from '@modelcontextprotocol/sdk/inMemory'
// Import notification schemas from MCP SDK
import {
CancelledNotificationSchema,
LoggingMessageNotificationSchema,
ProgressNotificationSchema,
PromptListChangedNotificationSchema,
ResourceListChangedNotificationSchema,
ResourceUpdatedNotificationSchema,
ToolListChangedNotificationSchema
} from '@modelcontextprotocol/sdk/types.js'
import { nanoid } from '@reduxjs/toolkit'
import {
GetMCPPromptResponse,
@ -28,8 +38,10 @@ import { app } from 'electron'
import Logger from 'electron-log'
import { EventEmitter } from 'events'
import { memoize } from 'lodash'
import { v4 as uuidv4 } from 'uuid'
import { CacheService } from './CacheService'
import DxtService from './DxtService'
import { CallBackServer } from './mcp/oauth/callback'
import { McpOAuthClientProvider } from './mcp/oauth/provider'
import getLoginShellEnvironment from './mcp/shell-env'
@ -71,6 +83,8 @@ function withCache<T extends unknown[], R>(
class McpService {
private clients: Map<string, Client> = new Map()
private pendingClients: Map<string, Promise<Client>> = new Map()
private dxtService = new DxtService()
private activeToolCalls: Map<string, AbortController> = new Map()
constructor() {
this.initClient = this.initClient.bind(this)
@ -84,7 +98,10 @@ class McpService {
this.removeServer = this.removeServer.bind(this)
this.restartServer = this.restartServer.bind(this)
this.stopServer = this.stopServer.bind(this)
this.abortTool = this.abortTool.bind(this)
this.cleanup = this.cleanup.bind(this)
this.checkMcpConnectivity = this.checkMcpConnectivity.bind(this)
this.getServerVersion = this.getServerVersion.bind(this)
}
private getServerKey(server: MCPServer): string {
@ -133,7 +150,7 @@ class McpService {
// Create new client instance for each connection
const client = new Client({ name: 'Cherry Studio', version: app.getVersion() }, { capabilities: {} })
const args = [...(server.args || [])]
let args = [...(server.args || [])]
// let transport: StdioClientTransport | SSEClientTransport | InMemoryTransport | StreamableHTTPClientTransport
const authProvider = new McpOAuthClientProvider({
@ -203,6 +220,23 @@ class McpService {
} else if (server.command) {
let cmd = server.command
// For DXT servers, use resolved configuration with platform overrides and variable substitution
if (server.dxtPath) {
const resolvedConfig = this.dxtService.getResolvedMcpConfig(server.dxtPath)
if (resolvedConfig) {
cmd = resolvedConfig.command
args = resolvedConfig.args
// Merge resolved environment variables with existing ones
server.env = {
...server.env,
...resolvedConfig.env
}
Logger.info(`[MCP] Using resolved DXT config - command: ${cmd}, args: ${args?.join(' ')}`)
} else {
Logger.warn(`[MCP] Failed to resolve DXT config for ${server.name}, falling back to manifest values`)
}
}
if (server.command === 'npx') {
cmd = await getBinaryPath('bun')
Logger.info(`[MCP] Using command: ${cmd}`)
@ -249,7 +283,7 @@ class McpService {
this.removeProxyEnv(loginShellEnv)
}
const stdioTransport = new StdioClientTransport({
const transportOptions: any = {
command: cmd,
args,
env: {
@ -257,7 +291,15 @@ class McpService {
...server.env
},
stderr: 'pipe'
})
}
// For DXT servers, set the working directory to the extracted path
if (server.dxtPath) {
transportOptions.cwd = server.dxtPath
Logger.info(`[MCP] Setting working directory for DXT server: ${server.dxtPath}`)
}
const stdioTransport = new StdioClientTransport(transportOptions)
stdioTransport.stderr?.on('data', (data) =>
Logger.info(`[MCP] Stdio stderr for server: ${server.name} `, data.toString())
)
@ -331,6 +373,12 @@ class McpService {
// Store the new client in the cache
this.clients.set(serverKey, client)
// Set up notification handlers
this.setupNotificationHandlers(client, server)
// Clear existing cache to ensure fresh data
this.clearServerCache(serverKey)
Logger.info(`[MCP] Activated server: ${server.name}`)
return client
} catch (error: any) {
@ -349,6 +397,79 @@ class McpService {
return initPromise
}
/**
* Set up notification handlers for MCP client
*/
private setupNotificationHandlers(client: Client, server: MCPServer) {
const serverKey = this.getServerKey(server)
try {
// Set up tools list changed notification handler
client.setNotificationHandler(ToolListChangedNotificationSchema, async () => {
Logger.info(`[MCP] Tools list changed for server: ${server.name}`)
// Clear tools cache
CacheService.remove(`mcp:list_tool:${serverKey}`)
})
// Set up resources list changed notification handler
client.setNotificationHandler(ResourceListChangedNotificationSchema, async () => {
Logger.info(`[MCP] Resources list changed for server: ${server.name}`)
// Clear resources cache
CacheService.remove(`mcp:list_resources:${serverKey}`)
})
// Set up prompts list changed notification handler
client.setNotificationHandler(PromptListChangedNotificationSchema, async () => {
Logger.info(`[MCP] Prompts list changed for server: ${server.name}`)
// Clear prompts cache
CacheService.remove(`mcp:list_prompts:${serverKey}`)
})
// Set up resource updated notification handler
client.setNotificationHandler(ResourceUpdatedNotificationSchema, async () => {
Logger.info(`[MCP] Resource updated for server: ${server.name}`)
// Clear resource-specific caches
this.clearResourceCaches(serverKey)
})
// Set up progress notification handler
client.setNotificationHandler(ProgressNotificationSchema, async (notification) => {
Logger.info(`[MCP] Progress notification received for server: ${server.name}`, notification.params)
})
// Set up cancelled notification handler
client.setNotificationHandler(CancelledNotificationSchema, async (notification) => {
Logger.info(`[MCP] Operation cancelled for server: ${server.name}`, notification.params)
})
// Set up logging message notification handler
client.setNotificationHandler(LoggingMessageNotificationSchema, async (notification) => {
Logger.info(`[MCP] Message from server ${server.name}:`, notification.params)
})
Logger.info(`[MCP] Set up notification handlers for server: ${server.name}`)
} catch (error) {
Logger.error(`[MCP] Failed to set up notification handlers for server ${server.name}:`, error)
}
}
/**
* Clear resource-specific caches for a server
*/
private clearResourceCaches(serverKey: string) {
CacheService.remove(`mcp:list_resources:${serverKey}`)
}
/**
* Clear all caches for a specific server
*/
private clearServerCache(serverKey: string) {
CacheService.remove(`mcp:list_tool:${serverKey}`)
CacheService.remove(`mcp:list_prompts:${serverKey}`)
CacheService.remove(`mcp:list_resources:${serverKey}`)
Logger.info(`[MCP] Cleared all caches for server: ${serverKey}`)
}
async closeClient(serverKey: string) {
const client = this.clients.get(serverKey)
if (client) {
@ -356,8 +477,8 @@ class McpService {
await client.close()
Logger.info(`[MCP] Closed server: ${serverKey}`)
this.clients.delete(serverKey)
CacheService.remove(`mcp:list_tool:${serverKey}`)
Logger.info(`[MCP] Cleared cache for server: ${serverKey}`)
// Clear all caches for this server
this.clearServerCache(serverKey)
} else {
Logger.warn(`[MCP] No client found for server: ${serverKey}`)
}
@ -375,12 +496,26 @@ class McpService {
if (existingClient) {
await this.closeClient(serverKey)
}
// If this is a DXT server, cleanup its directory
if (server.dxtPath) {
try {
const cleaned = this.dxtService.cleanupDxtServer(server.name)
if (cleaned) {
Logger.info(`[MCP] Cleaned up DXT server directory for: ${server.name}`)
}
} catch (error) {
Logger.error(`[MCP] Failed to cleanup DXT server: ${server.name}`, error)
}
}
}
async restartServer(_: Electron.IpcMainInvokeEvent, server: MCPServer) {
Logger.info(`[MCP] Restarting server: ${server.name}`)
const serverKey = this.getServerKey(server)
await this.closeClient(serverKey)
// Clear cache before restarting to ensure fresh data
this.clearServerCache(serverKey)
await this.initClient(server)
}
@ -400,6 +535,12 @@ class McpService {
public async checkMcpConnectivity(_: Electron.IpcMainInvokeEvent, server: MCPServer): Promise<boolean> {
Logger.info(`[MCP] Checking connectivity for server: ${server.name}`)
try {
Logger.info(`[MCP] About to call initClient for server: ${server.name}`, { hasInitClient: !!this.initClient })
if (!this.initClient) {
throw new Error('initClient method is not available')
}
const client = await this.initClient(server)
// Attempt to list tools as a way to check connectivity
await client.listTools()
@ -455,10 +596,14 @@ class McpService {
*/
public async callTool(
_: Electron.IpcMainInvokeEvent,
{ server, name, args }: { server: MCPServer; name: string; args: any }
{ server, name, args, callId }: { server: MCPServer; name: string; args: any; callId?: string }
): Promise<MCPCallToolResponse> {
const toolCallId = callId || uuidv4()
const abortController = new AbortController()
this.activeToolCalls.set(toolCallId, abortController)
try {
Logger.info('[MCP] Calling:', server.name, name, args)
Logger.info('[MCP] Calling:', server.name, name, args, 'callId:', toolCallId)
if (typeof args === 'string') {
try {
args = JSON.parse(args)
@ -468,12 +613,19 @@ class McpService {
}
const client = await this.initClient(server)
const result = await client.callTool({ name, arguments: args }, undefined, {
timeout: server.timeout ? server.timeout * 1000 : 60000 // Default timeout of 1 minute
onprogress: (process) => {
console.log('[MCP] Progress:', process.progress / (process.total || 1))
window.api.mcp.setProgress(process.progress / (process.total || 1))
},
timeout: server.timeout ? server.timeout * 1000 : 60000, // Default timeout of 1 minute
signal: this.activeToolCalls.get(toolCallId)?.signal
})
return result as MCPCallToolResponse
} catch (error) {
Logger.error(`[MCP] Error calling tool ${name} on ${server.name}:`, error)
throw error
} finally {
this.activeToolCalls.delete(toolCallId)
}
}
@ -664,6 +816,45 @@ class McpService {
delete env.http_proxy
delete env.https_proxy
}
// 实现 abortTool 方法
public async abortTool(_: Electron.IpcMainInvokeEvent, callId: string) {
const activeToolCall = this.activeToolCalls.get(callId)
if (activeToolCall) {
activeToolCall.abort()
this.activeToolCalls.delete(callId)
Logger.info(`[MCP] Aborted tool call: ${callId}`)
return true
} else {
Logger.warn(`[MCP] No active tool call found for callId: ${callId}`)
return false
}
}
/**
* Get the server version information
*/
public async getServerVersion(_: Electron.IpcMainInvokeEvent, server: MCPServer): Promise<string | null> {
try {
Logger.info(`[MCP] Getting server version for: ${server.name}`)
const client = await this.initClient(server)
// Try to get server information which may include version
const serverInfo = client.getServerVersion()
Logger.info(`[MCP] Server info for ${server.name}:`, serverInfo)
if (serverInfo && serverInfo.version) {
Logger.info(`[MCP] Server version for ${server.name}: ${serverInfo.version}`)
return serverInfo.version
}
Logger.warn(`[MCP] No version information available for server: ${server.name}`)
return null
} catch (error: any) {
Logger.error(`[MCP] Failed to get server version for ${server.name}:`, error?.message)
return null
}
}
}
export default new McpService()

View File

@ -0,0 +1,33 @@
import { Mistral } from '@mistralai/mistralai'
import { Provider } from '@types'
export class MistralClientManager {
private static instance: MistralClientManager
private client: Mistral | null = null
// eslint-disable-next-line @typescript-eslint/no-empty-function
private constructor() {}
public static getInstance(): MistralClientManager {
if (!MistralClientManager.instance) {
MistralClientManager.instance = new MistralClientManager()
}
return MistralClientManager.instance
}
public initializeClient(provider: Provider): void {
if (!this.client) {
this.client = new Mistral({
apiKey: provider.apiKey,
serverURL: provider.apiHost
})
}
}
public getClient(): Mistral {
if (!this.client) {
throw new Error('Mistral client not initialized. Call initializeClient first.')
}
return this.client
}
}

View File

@ -1,8 +1,6 @@
import { BrowserWindow, Notification as ElectronNotification } from 'electron'
import { Notification } from 'src/renderer/src/types/notification'
import icon from '../../../build/icon.png?asset'
class NotificationService {
private window: BrowserWindow
@ -15,8 +13,7 @@ class NotificationService {
// 使用 Electron Notification API
const electronNotification = new ElectronNotification({
title: notification.title,
body: notification.message,
icon: icon
body: notification.message
})
electronNotification.on('click', () => {

View File

@ -19,7 +19,7 @@ export function registerProtocolClient(app: Electron.App) {
}
}
app.setAsDefaultProtocolClient('cherrystudio')
app.setAsDefaultProtocolClient(CHERRY_STUDIO_PROTOCOL)
}
export function handleProtocolUrl(url: string) {

View File

@ -1,38 +1,54 @@
import { ProxyConfig as _ProxyConfig, session } from 'electron'
import axios from 'axios'
import { app, ProxyConfig, session } from 'electron'
import Logger from 'electron-log'
import { socksDispatcher } from 'fetch-socks'
import http from 'http'
import https from 'https'
import { getSystemProxy } from 'os-proxy-config'
import { ProxyAgent as GeneralProxyAgent } from 'proxy-agent'
// import { ProxyAgent, setGlobalDispatcher } from 'undici'
type ProxyMode = 'system' | 'custom' | 'none'
export interface ProxyConfig {
mode: ProxyMode
url?: string
}
import { ProxyAgent } from 'proxy-agent'
import { Dispatcher, EnvHttpProxyAgent, getGlobalDispatcher, setGlobalDispatcher } from 'undici'
export class ProxyManager {
private config: ProxyConfig
private proxyAgent: GeneralProxyAgent | null = null
private config: ProxyConfig = { mode: 'direct' }
private systemProxyInterval: NodeJS.Timeout | null = null
private isSettingProxy = false
private originalGlobalDispatcher: Dispatcher
private originalSocksDispatcher: Dispatcher
// for http and https
private originalHttpGet: typeof http.get
private originalHttpRequest: typeof http.request
private originalHttpsGet: typeof https.get
private originalHttpsRequest: typeof https.request
constructor() {
this.config = {
mode: 'none'
}
}
private async setSessionsProxy(config: _ProxyConfig): Promise<void> {
const sessions = [session.defaultSession, session.fromPartition('persist:webview')]
await Promise.all(sessions.map((session) => session.setProxy(config)))
this.originalGlobalDispatcher = getGlobalDispatcher()
this.originalSocksDispatcher = global[Symbol.for('undici.globalDispatcher.1')]
this.originalHttpGet = http.get
this.originalHttpRequest = http.request
this.originalHttpsGet = https.get
this.originalHttpsRequest = https.request
}
private async monitorSystemProxy(): Promise<void> {
// Clear any existing interval first
this.clearSystemProxyMonitor()
// Set new interval
this.systemProxyInterval = setInterval(async () => {
await this.setSystemProxy()
}, 10000)
this.systemProxyInterval = setInterval(
async () => {
const currentProxy = await getSystemProxy()
if (currentProxy && currentProxy.proxyUrl.toLowerCase() === this.config.proxyRules) {
return
}
await this.configureProxy({
mode: 'system',
proxyRules: currentProxy?.proxyUrl.toLowerCase()
})
},
// 1 minutes
1000 * 60
)
}
private clearSystemProxyMonitor(): void {
@ -43,99 +59,182 @@ export class ProxyManager {
}
async configureProxy(config: ProxyConfig): Promise<void> {
Logger.info('configureProxy', config.mode, config.proxyRules)
if (this.isSettingProxy) {
return
}
this.isSettingProxy = true
try {
if (config?.mode === this.config?.mode && config?.proxyRules === this.config?.proxyRules) {
Logger.info('proxy config is the same, skip configure')
return
}
this.config = config
this.clearSystemProxyMonitor()
if (this.config.mode === 'system') {
await this.setSystemProxy()
this.monitorSystemProxy()
} else if (this.config.mode === 'custom') {
await this.setCustomProxy()
} else {
await this.clearProxy()
if (config.mode === 'system') {
const currentProxy = await getSystemProxy()
if (currentProxy) {
Logger.info('current system proxy', currentProxy.proxyUrl)
this.config.proxyRules = currentProxy.proxyUrl.toLowerCase()
this.monitorSystemProxy()
} else {
// no system proxy, use direct mode
this.config.mode = 'direct'
}
}
this.setGlobalProxy()
} catch (error) {
console.error('Failed to config proxy:', error)
Logger.error('Failed to config proxy:', error)
throw error
} finally {
this.isSettingProxy = false
}
}
private setEnvironment(url: string): void {
if (url === '') {
delete process.env.HTTP_PROXY
delete process.env.HTTPS_PROXY
delete process.env.grpc_proxy
delete process.env.http_proxy
delete process.env.https_proxy
delete process.env.SOCKS_PROXY
delete process.env.ALL_PROXY
return
}
process.env.grpc_proxy = url
process.env.HTTP_PROXY = url
process.env.HTTPS_PROXY = url
process.env.http_proxy = url
process.env.https_proxy = url
}
private async setSystemProxy(): Promise<void> {
try {
const currentProxy = await getSystemProxy()
if (!currentProxy || currentProxy.proxyUrl === this.config.url) {
return
}
await this.setSessionsProxy({ mode: 'system' })
this.config.url = currentProxy.proxyUrl.toLowerCase()
this.setEnvironment(this.config.url)
this.proxyAgent = new GeneralProxyAgent()
} catch (error) {
console.error('Failed to set system proxy:', error)
throw error
if (url.startsWith('socks')) {
process.env.SOCKS_PROXY = url
process.env.ALL_PROXY = url
}
}
private async setCustomProxy(): Promise<void> {
try {
if (this.config.url) {
this.setEnvironment(this.config.url)
this.proxyAgent = new GeneralProxyAgent()
await this.setSessionsProxy({ proxyRules: this.config.url })
private setGlobalProxy() {
this.setEnvironment(this.config.proxyRules || '')
this.setGlobalFetchProxy(this.config)
this.setSessionsProxy(this.config)
this.setGlobalHttpProxy(this.config)
}
private setGlobalHttpProxy(config: ProxyConfig) {
const proxyUrl = config.proxyRules
if (config.mode === 'direct' || !proxyUrl) {
http.get = this.originalHttpGet
http.request = this.originalHttpRequest
https.get = this.originalHttpsGet
https.request = this.originalHttpsRequest
axios.defaults.proxy = undefined
axios.defaults.httpAgent = undefined
axios.defaults.httpsAgent = undefined
return
}
// ProxyAgent 从环境变量读取代理配置
const agent = new ProxyAgent()
// axios 使用代理
axios.defaults.proxy = false
axios.defaults.httpAgent = agent
axios.defaults.httpsAgent = agent
http.get = this.bindHttpMethod(this.originalHttpGet, agent)
http.request = this.bindHttpMethod(this.originalHttpRequest, agent)
https.get = this.bindHttpMethod(this.originalHttpsGet, agent)
https.request = this.bindHttpMethod(this.originalHttpsRequest, agent)
}
// eslint-disable-next-line @typescript-eslint/no-unsafe-function-type
private bindHttpMethod(originalMethod: Function, agent: http.Agent | https.Agent) {
return (...args: any[]) => {
let url: string | URL | undefined
let options: http.RequestOptions | https.RequestOptions
let callback: (res: http.IncomingMessage) => void
if (typeof args[0] === 'string' || args[0] instanceof URL) {
url = args[0]
if (typeof args[1] === 'function') {
options = {}
callback = args[1]
} else {
options = {
...args[1]
}
callback = args[2]
}
} else {
options = {
...args[0]
}
callback = args[1]
}
} catch (error) {
console.error('Failed to set custom proxy:', error)
throw error
// for webdav https self-signed certificate
if (options.agent instanceof https.Agent) {
;(agent as https.Agent).options.rejectUnauthorized = options.agent.options.rejectUnauthorized
}
// 确保只设置 agent不修改其他网络选项
if (!options.agent) {
options.agent = agent
}
if (url) {
return originalMethod(url, options, callback)
}
return originalMethod(options, callback)
}
}
private clearEnvironment(): void {
delete process.env.HTTP_PROXY
delete process.env.HTTPS_PROXY
delete process.env.grpc_proxy
delete process.env.http_proxy
delete process.env.https_proxy
private setGlobalFetchProxy(config: ProxyConfig) {
const proxyUrl = config.proxyRules
if (config.mode === 'direct' || !proxyUrl) {
setGlobalDispatcher(this.originalGlobalDispatcher)
global[Symbol.for('undici.globalDispatcher.1')] = this.originalSocksDispatcher
return
}
const url = new URL(proxyUrl)
if (url.protocol === 'http:' || url.protocol === 'https:') {
setGlobalDispatcher(new EnvHttpProxyAgent())
return
}
global[Symbol.for('undici.globalDispatcher.1')] = socksDispatcher({
port: parseInt(url.port),
type: url.protocol === 'socks4:' ? 4 : 5,
host: url.hostname,
userId: url.username || undefined,
password: url.password || undefined
})
}
private async clearProxy(): Promise<void> {
this.clearEnvironment()
await this.setSessionsProxy({ mode: 'direct' })
this.config = { mode: 'none' }
this.proxyAgent = null
}
private async setSessionsProxy(config: ProxyConfig): Promise<void> {
let c = config
getProxyAgent(): GeneralProxyAgent | null {
return this.proxyAgent
}
if (config.mode === 'direct' || !config.proxyRules) {
c = { mode: 'direct' }
}
getProxyUrl(): string {
return this.config.url || ''
}
const sessions = [session.defaultSession, session.fromPartition('persist:webview')]
await Promise.all(sessions.map((session) => session.setProxy(c)))
// setGlobalProxy() {
// const proxyUrl = this.config.url
// if (proxyUrl) {
// const [protocol, address] = proxyUrl.split('://')
// const [host, port] = address.split(':')
// if (!protocol.includes('socks')) {
// setGlobalDispatcher(new ProxyAgent(proxyUrl))
// } else {
// global[Symbol.for('undici.globalDispatcher.1')] = socksDispatcher({
// port: parseInt(port),
// type: protocol === 'socks5' ? 5 : 4,
// host: host
// })
// }
// }
// }
// set proxy for electron
app.setProxy(c)
}
}
export const proxyManager = new ProxyManager()

View File

@ -1,83 +0,0 @@
import Logger from 'electron-log'
import type { Operator as OperatorType } from 'opendal'
const { Operator } = require('opendal')
export default class S3Storage {
public instance: OperatorType | undefined
/**
*
* @param scheme is the scheme for opendal services. Available value includes "azblob", "azdls", "cos", "gcs", "obs", "oss", "s3", "webdav", "webhdfs", "aliyun-drive", "alluxio", "azfile", "dropbox", "gdrive", "onedrive", "postgresql", "mysql", "redis", "swift", "mongodb", "alluxio", "b2", "seafile", "upyun", "koofr", "yandex-disk"
* @param options is the options for given opendal services. Valid options depend on the scheme. Checkout https://docs.rs/opendal/latest/opendal/services/index.html for all valid options.
*
* For example, use minio as remote storage:
*
* ```typescript
* const storage = new S3Storage('s3', {
* endpoint: 'http://localhost:9000',
* region: 'us-east-1',
* bucket: 'testbucket',
* access_key_id: 'user',
* secret_access_key: 'password',
* root: '/path/to/basepath',
* })
* ```
*/
constructor(scheme: string, options?: Record<string, string> | undefined | null) {
this.instance = new Operator(scheme, options)
this.putFileContents = this.putFileContents.bind(this)
this.getFileContents = this.getFileContents.bind(this)
}
public putFileContents = async (filename: string, data: string | Buffer) => {
if (!this.instance) {
return new Error('RemoteStorage client not initialized')
}
try {
return await this.instance.write(filename, data)
} catch (error) {
Logger.error('[RemoteStorage] Error putting file contents:', error)
throw error
}
}
public getFileContents = async (filename: string) => {
if (!this.instance) {
throw new Error('RemoteStorage client not initialized')
}
try {
return await this.instance.read(filename)
} catch (error) {
Logger.error('[RemoteStorage] Error getting file contents:', error)
throw error
}
}
public deleteFile = async (filename: string) => {
if (!this.instance) {
throw new Error('RemoteStorage client not initialized')
}
try {
return await this.instance.delete(filename)
} catch (error) {
Logger.error('[RemoteStorage] Error deleting file:', error)
throw error
}
}
public checkConnection = async () => {
if (!this.instance) {
throw new Error('RemoteStorage client not initialized')
}
try {
// 检查根目录是否可访问
return await this.instance.stat('/')
} catch (error) {
Logger.error('[RemoteStorage] Error checking connection:', error)
throw error
}
}
}

View File

@ -0,0 +1,183 @@
import {
DeleteObjectCommand,
GetObjectCommand,
HeadBucketCommand,
ListObjectsV2Command,
PutObjectCommand,
S3Client
} from '@aws-sdk/client-s3'
import type { S3Config } from '@types'
import Logger from 'electron-log'
import * as net from 'net'
import { Readable } from 'stream'
/**
* Buffer
*/
function streamToBuffer(stream: Readable): Promise<Buffer> {
return new Promise((resolve, reject) => {
const chunks: Buffer[] = []
stream.on('data', (chunk) => chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)))
stream.on('error', reject)
stream.on('end', () => resolve(Buffer.concat(chunks)))
})
}
// 需要使用 Virtual Host-Style 的服务商域名后缀白名单
const VIRTUAL_HOST_SUFFIXES = ['aliyuncs.com', 'myqcloud.com']
/**
* 使 AWS SDK v3 S3 RemoteStorage
*/
export default class S3Storage {
private client: S3Client
private bucket: string
private root: string
constructor(config: S3Config) {
const { endpoint, region, accessKeyId, secretAccessKey, bucket, root } = config
const usePathStyle = (() => {
if (!endpoint) return false
try {
const { hostname } = new URL(endpoint)
if (hostname === 'localhost' || net.isIP(hostname) !== 0) {
return true
}
const isInWhiteList = VIRTUAL_HOST_SUFFIXES.some((suffix) => hostname.endsWith(suffix))
return !isInWhiteList
} catch (e) {
Logger.warn('[S3Storage] Failed to parse endpoint, fallback to Path-Style:', endpoint, e)
return true
}
})()
this.client = new S3Client({
region,
endpoint: endpoint || undefined,
credentials: {
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey
},
forcePathStyle: usePathStyle
})
this.bucket = bucket
this.root = root?.replace(/^\/+/g, '').replace(/\/+$/g, '') || ''
this.putFileContents = this.putFileContents.bind(this)
this.getFileContents = this.getFileContents.bind(this)
this.deleteFile = this.deleteFile.bind(this)
this.listFiles = this.listFiles.bind(this)
this.checkConnection = this.checkConnection.bind(this)
}
/**
* root key
*/
private buildKey(key: string): string {
if (!this.root) return key
return key.startsWith(`${this.root}/`) ? key : `${this.root}/${key}`
}
async putFileContents(key: string, data: Buffer | string) {
try {
const contentType = key.endsWith('.zip') ? 'application/zip' : 'application/octet-stream'
return await this.client.send(
new PutObjectCommand({
Bucket: this.bucket,
Key: this.buildKey(key),
Body: data,
ContentType: contentType
})
)
} catch (error) {
Logger.error('[S3Storage] Error putting object:', error)
throw error
}
}
async getFileContents(key: string): Promise<Buffer> {
try {
const res = await this.client.send(new GetObjectCommand({ Bucket: this.bucket, Key: this.buildKey(key) }))
if (!res.Body || !(res.Body instanceof Readable)) {
throw new Error('Empty body received from S3')
}
return await streamToBuffer(res.Body as Readable)
} catch (error) {
Logger.error('[S3Storage] Error getting object:', error)
throw error
}
}
async deleteFile(key: string) {
try {
const keyWithRoot = this.buildKey(key)
const variations = new Set([keyWithRoot, key.replace(/^\//, '')])
for (const k of variations) {
try {
await this.client.send(new DeleteObjectCommand({ Bucket: this.bucket, Key: k }))
} catch {
// 忽略删除失败
}
}
} catch (error) {
Logger.error('[S3Storage] Error deleting object:', error)
throw error
}
}
/**
*
*/
async listFiles(prefix = ''): Promise<Array<{ key: string; lastModified?: string; size: number }>> {
const files: Array<{ key: string; lastModified?: string; size: number }> = []
let continuationToken: string | undefined
const fullPrefix = this.buildKey(prefix)
try {
do {
const res = await this.client.send(
new ListObjectsV2Command({
Bucket: this.bucket,
Prefix: fullPrefix === '' ? undefined : fullPrefix,
ContinuationToken: continuationToken
})
)
res.Contents?.forEach((obj) => {
if (!obj.Key) return
files.push({
key: obj.Key,
lastModified: obj.LastModified?.toISOString(),
size: obj.Size ?? 0
})
})
continuationToken = res.IsTruncated ? res.NextContinuationToken : undefined
} while (continuationToken)
return files
} catch (error) {
Logger.error('[S3Storage] Error listing objects:', error)
throw error
}
}
/**
* HeadBucket /
*/
async checkConnection() {
try {
await this.client.send(new HeadBucketCommand({ Bucket: this.bucket }))
return true
} catch (error) {
Logger.error('[S3Storage] Error checking connection:', error)
throw error
}
}
}

View File

@ -1,7 +1,7 @@
import { SELECTION_FINETUNED_LIST, SELECTION_PREDEFINED_BLACKLIST } from '@main/configs/SelectionConfig'
import { isDev, isWin } from '@main/constant'
import { isDev, isMac, isWin } from '@main/constant'
import { IpcChannel } from '@shared/IpcChannel'
import { BrowserWindow, ipcMain, screen } from 'electron'
import { app, BrowserWindow, ipcMain, screen, systemPreferences } from 'electron'
import Logger from 'electron-log'
import { join } from 'path'
import type {
@ -16,9 +16,12 @@ import type { ActionItem } from '../../renderer/src/types/selectionTypes'
import { ConfigKeys, configManager } from './ConfigManager'
import storeSyncService from './StoreSyncService'
const isSupportedOS = isWin || isMac
let SelectionHook: SelectionHookConstructor | null = null
try {
if (isWin) {
//since selection-hook v1.0.0, it supports macOS
if (isSupportedOS) {
SelectionHook = require('selection-hook')
}
} catch (error) {
@ -118,7 +121,7 @@ export class SelectionService {
}
public static getInstance(): SelectionService | null {
if (!isWin) return null
if (!isSupportedOS) return null
if (!SelectionService.instance) {
SelectionService.instance = new SelectionService()
@ -138,7 +141,7 @@ export class SelectionService {
* Initialize zoom factor from config and subscribe to changes
* Ensures UI elements scale properly with system DPI settings
*/
private initZoomFactor() {
private initZoomFactor(): void {
const zoomFactor = configManager.getZoomFactor()
if (zoomFactor) {
this.setZoomFactor(zoomFactor)
@ -151,7 +154,7 @@ export class SelectionService {
this.zoomFactor = zoomFactor
}
private initConfig() {
private initConfig(): void {
this.triggerMode = configManager.getSelectionAssistantTriggerMode() as TriggerMode
this.isFollowToolbar = configManager.getSelectionAssistantFollowToolbar()
this.isRemeberWinSize = configManager.getSelectionAssistantRemeberWinSize()
@ -204,7 +207,7 @@ export class SelectionService {
* @param mode - The mode to set, either 'default', 'whitelist', or 'blacklist'
* @param list - An array of strings representing the list of items to include or exclude
*/
private setHookGlobalFilterMode(mode: string, list: string[]) {
private setHookGlobalFilterMode(mode: string, list: string[]): void {
if (!this.selectionHook) return
const modeMap = {
@ -213,6 +216,8 @@ export class SelectionService {
blacklist: SelectionHook!.FilterMode.EXCLUDE_LIST
}
const predefinedBlacklist = isWin ? SELECTION_PREDEFINED_BLACKLIST.WINDOWS : SELECTION_PREDEFINED_BLACKLIST.MAC
let combinedList: string[] = list
let combinedMode = mode
@ -221,7 +226,7 @@ export class SelectionService {
switch (mode) {
case 'blacklist':
//combine the predefined blacklist with the user-defined blacklist
combinedList = [...new Set([...list, ...SELECTION_PREDEFINED_BLACKLIST.WINDOWS])]
combinedList = [...new Set([...list, ...predefinedBlacklist])]
break
case 'whitelist':
combinedList = [...list]
@ -229,7 +234,7 @@ export class SelectionService {
case 'default':
default:
//use the predefined blacklist as the default filter list
combinedList = [...SELECTION_PREDEFINED_BLACKLIST.WINDOWS]
combinedList = [...predefinedBlacklist]
combinedMode = 'blacklist'
break
}
@ -240,17 +245,24 @@ export class SelectionService {
}
}
private setHookFineTunedList() {
private setHookFineTunedList(): void {
if (!this.selectionHook) return
const excludeClipboardCursorDetectList = isWin
? SELECTION_FINETUNED_LIST.EXCLUDE_CLIPBOARD_CURSOR_DETECT.WINDOWS
: SELECTION_FINETUNED_LIST.EXCLUDE_CLIPBOARD_CURSOR_DETECT.MAC
const includeClipboardDelayReadList = isWin
? SELECTION_FINETUNED_LIST.INCLUDE_CLIPBOARD_DELAY_READ.WINDOWS
: SELECTION_FINETUNED_LIST.INCLUDE_CLIPBOARD_DELAY_READ.MAC
this.selectionHook.setFineTunedList(
SelectionHook!.FineTunedListType.EXCLUDE_CLIPBOARD_CURSOR_DETECT,
SELECTION_FINETUNED_LIST.EXCLUDE_CLIPBOARD_CURSOR_DETECT.WINDOWS
excludeClipboardCursorDetectList
)
this.selectionHook.setFineTunedList(
SelectionHook!.FineTunedListType.INCLUDE_CLIPBOARD_DELAY_READ,
SELECTION_FINETUNED_LIST.INCLUDE_CLIPBOARD_DELAY_READ.WINDOWS
includeClipboardDelayReadList
)
}
@ -259,11 +271,33 @@ export class SelectionService {
* @returns {boolean} Success status of service start
*/
public start(): boolean {
if (!this.selectionHook || this.started) {
this.logError(new Error('SelectionService start(): instance is null or already started'))
if (!isSupportedOS) {
this.logError(new Error('SelectionService start(): not supported on this OS'))
return false
}
if (!this.selectionHook) {
this.logError(new Error('SelectionService start(): instance is null'))
return false
}
if (this.started) {
this.logError(new Error('SelectionService start(): already started'))
return false
}
//On macOS, we need to check if the process is trusted
if (isMac) {
if (!systemPreferences.isTrustedAccessibilityClient(false)) {
this.logError(
new Error(
'SelectionSerice not started: process is not trusted on macOS, please turn on the Accessibility permission'
)
)
return false
}
}
try {
//make sure the toolbar window is ready
this.createToolbarWindow()
@ -306,6 +340,7 @@ export class SelectionService {
if (!this.selectionHook) return false
this.selectionHook.stop()
this.selectionHook.cleanup() //already remove all listeners
//reset the listener states
@ -316,6 +351,7 @@ export class SelectionService {
this.toolbarWindow.close()
this.toolbarWindow = null
}
this.closePreloadedActionWindows()
this.started = false
@ -342,7 +378,7 @@ export class SelectionService {
* Toggle the enabled state of the selection service
* Will sync the new enabled store to all renderer windows
*/
public toggleEnabled(enabled: boolean | undefined = undefined) {
public toggleEnabled(enabled: boolean | undefined = undefined): void {
if (!this.selectionHook) return
const newEnabled = enabled === undefined ? !configManager.getSelectionAssistantEnabled() : enabled
@ -358,7 +394,7 @@ export class SelectionService {
* Sets up window properties, event handlers, and loads the toolbar UI
* @param readyCallback Optional callback when window is ready to show
*/
private createToolbarWindow(readyCallback?: () => void) {
private createToolbarWindow(readyCallback?: () => void): void {
if (this.isToolbarAlive()) return
const { toolbarWidth, toolbarHeight } = this.getToolbarRealSize()
@ -366,21 +402,31 @@ export class SelectionService {
this.toolbarWindow = new BrowserWindow({
width: toolbarWidth,
height: toolbarHeight,
show: false,
frame: false,
transparent: true,
alwaysOnTop: true,
skipTaskbar: true,
autoHideMenuBar: true,
resizable: false,
minimizable: false,
maximizable: false,
fullscreenable: false, // [macOS] must be false
movable: true,
focusable: false,
hasShadow: false,
thickFrame: false,
roundedCorners: true,
backgroundMaterial: 'none',
type: 'toolbar',
show: false,
// Platform specific settings
// [macOS] DO NOT set focusable to false, it will make other windows bring to front together
// [macOS] `panel` conflicts with other settings ,
// and log will show `NSWindow does not support nonactivating panel styleMask 0x80`
// but it seems still work on fullscreen apps, so we set this anyway
...(isWin ? { type: 'toolbar', focusable: false } : { type: 'panel' }),
hiddenInMissionControl: true, // [macOS only]
acceptFirstMouse: true, // [macOS only]
webPreferences: {
preload: join(__dirname, '../preload/index.js'),
contextIsolation: true,
@ -392,7 +438,9 @@ export class SelectionService {
// Hide when losing focus
this.toolbarWindow.on('blur', () => {
this.hideToolbar()
if (this.toolbarWindow!.isVisible()) {
this.hideToolbar()
}
})
// Clean up when closed
@ -437,10 +485,10 @@ export class SelectionService {
* @param point Reference point for positioning, logical coordinates
* @param orientation Preferred position relative to reference point
*/
private showToolbarAtPosition(point: Point, orientation: RelativeOrientation) {
private showToolbarAtPosition(point: Point, orientation: RelativeOrientation, programName: string): void {
if (!this.isToolbarAlive()) {
this.createToolbarWindow(() => {
this.showToolbarAtPosition(point, orientation)
this.showToolbarAtPosition(point, orientation, programName)
})
return
}
@ -460,15 +508,56 @@ export class SelectionService {
//set the window to always on top (highest level)
//should set every time the window is shown
this.toolbarWindow!.setAlwaysOnTop(true, 'screen-saver')
this.toolbarWindow!.show()
/**
* In Windows 10, setOpacity(1) will make the window completely transparent
* It's a strange behavior, so we don't use it for compatibility
*/
// this.toolbarWindow!.setOpacity(1)
if (!isMac) {
this.toolbarWindow!.show()
/**
* [Windows]
* In Windows 10, setOpacity(1) will make the window completely transparent
* It's a strange behavior, so we don't use it for compatibility
*/
// this.toolbarWindow!.setOpacity(1)
this.startHideByMouseKeyListener()
return
}
/************************************************
* [macOS] the following code is only for macOS
*
* WARNING:
* DO NOT MODIFY THESE CODES, UNLESS YOU REALLY KNOW WHAT YOU ARE DOING!!!!
*************************************************/
// [macOS] a hacky way
// when set `skipTransformProcessType: true`, if the selection is in self app, it will make the selection canceled after toolbar showing
// so we just don't set `skipTransformProcessType: true` when in self app
const isSelf = ['com.github.Electron', 'com.kangfenmao.CherryStudio'].includes(programName)
if (!isSelf) {
// [macOS] an ugly hacky way
// `focusable: true` will make mainWindow disappeared when `setVisibleOnAllWorkspaces`
// so we set `focusable: true` before showing, and then set false after showing
this.toolbarWindow!.setFocusable(false)
// [macOS]
// force `setVisibleOnAllWorkspaces: true` to let toolbar show in all workspaces. And we MUST not set it to false again
// set `skipTransformProcessType: true` to avoid dock icon spinning when `setVisibleOnAllWorkspaces`
this.toolbarWindow!.setVisibleOnAllWorkspaces(true, {
visibleOnFullScreen: true,
skipTransformProcessType: true
})
}
// [macOS] MUST use `showInactive()` to prevent other windows bring to front together
// [Windows] is OK for both `show()` and `showInactive()` because of `focusable: false`
this.toolbarWindow!.showInactive()
// [macOS] restore the focusable status
this.toolbarWindow!.setFocusable(true)
this.startHideByMouseKeyListener()
return
}
/**
@ -477,18 +566,60 @@ export class SelectionService {
public hideToolbar(): void {
if (!this.isToolbarAlive()) return
// this.toolbarWindow!.setOpacity(0)
this.stopHideByMouseKeyListener()
// [Windows] just hide the toolbar window is enough
if (!isMac) {
this.toolbarWindow!.hide()
return
}
/************************************************
* [macOS] the following code is only for macOS
*************************************************/
// [macOS] a HACKY way
// make sure other windows do not bring to front when toolbar is hidden
// get all focusable windows and set them to not focusable
const focusableWindows: BrowserWindow[] = []
for (const window of BrowserWindow.getAllWindows()) {
if (!window.isDestroyed() && window.isVisible()) {
if (window.isFocusable()) {
focusableWindows.push(window)
window.setFocusable(false)
}
}
}
this.toolbarWindow!.hide()
this.stopHideByMouseKeyListener()
// set them back to focusable after 50ms
setTimeout(() => {
for (const window of focusableWindows) {
if (!window.isDestroyed()) {
window.setFocusable(true)
}
}
}, 50)
// [macOS] hacky way
// Because toolbar is not a FOCUSED window, so the hover status will remain when next time show
// so we just send mouseMove event to the toolbar window to make the hover status disappear
this.toolbarWindow!.webContents.sendInputEvent({
type: 'mouseMove',
x: -1,
y: -1
})
return
}
/**
* Check if toolbar window exists and is not destroyed
* @returns {boolean} Toolbar window status
*/
private isToolbarAlive() {
return this.toolbarWindow && !this.toolbarWindow.isDestroyed()
private isToolbarAlive(): boolean {
return !!(this.toolbarWindow && !this.toolbarWindow.isDestroyed())
}
/**
@ -497,7 +628,7 @@ export class SelectionService {
* @param width New toolbar width
* @param height New toolbar height
*/
public determineToolbarSize(width: number, height: number) {
public determineToolbarSize(width: number, height: number): void {
const toolbarWidth = Math.ceil(width)
// only update toolbar width if it's changed
@ -510,7 +641,7 @@ export class SelectionService {
* Get actual toolbar dimensions accounting for zoom factor
* @returns Object containing toolbar width and height
*/
private getToolbarRealSize() {
private getToolbarRealSize(): { toolbarWidth: number; toolbarHeight: number } {
return {
toolbarWidth: this.TOOLBAR_WIDTH * this.zoomFactor,
toolbarHeight: this.TOOLBAR_HEIGHT * this.zoomFactor
@ -520,71 +651,71 @@ export class SelectionService {
/**
* Calculate optimal toolbar position based on selection context
* Ensures toolbar stays within screen boundaries and follows selection direction
* @param point Reference point for positioning, must be INTEGER
* @param refPoint Reference point for positioning, must be INTEGER
* @param orientation Preferred position relative to reference point
* @returns Calculated screen coordinates for toolbar, INTEGER
*/
private calculateToolbarPosition(point: Point, orientation: RelativeOrientation): Point {
private calculateToolbarPosition(refPoint: Point, orientation: RelativeOrientation): Point {
// Calculate initial position based on the specified anchor
let posX: number, posY: number
const posPoint: Point = { x: 0, y: 0 }
const { toolbarWidth, toolbarHeight } = this.getToolbarRealSize()
switch (orientation) {
case 'topLeft':
posX = point.x - toolbarWidth
posY = point.y - toolbarHeight
posPoint.x = refPoint.x - toolbarWidth
posPoint.y = refPoint.y - toolbarHeight
break
case 'topRight':
posX = point.x
posY = point.y - toolbarHeight
posPoint.x = refPoint.x
posPoint.y = refPoint.y - toolbarHeight
break
case 'topMiddle':
posX = point.x - toolbarWidth / 2
posY = point.y - toolbarHeight
posPoint.x = refPoint.x - toolbarWidth / 2
posPoint.y = refPoint.y - toolbarHeight
break
case 'bottomLeft':
posX = point.x - toolbarWidth
posY = point.y
posPoint.x = refPoint.x - toolbarWidth
posPoint.y = refPoint.y
break
case 'bottomRight':
posX = point.x
posY = point.y
posPoint.x = refPoint.x
posPoint.y = refPoint.y
break
case 'bottomMiddle':
posX = point.x - toolbarWidth / 2
posY = point.y
posPoint.x = refPoint.x - toolbarWidth / 2
posPoint.y = refPoint.y
break
case 'middleLeft':
posX = point.x - toolbarWidth
posY = point.y - toolbarHeight / 2
posPoint.x = refPoint.x - toolbarWidth
posPoint.y = refPoint.y - toolbarHeight / 2
break
case 'middleRight':
posX = point.x
posY = point.y - toolbarHeight / 2
posPoint.x = refPoint.x
posPoint.y = refPoint.y - toolbarHeight / 2
break
case 'center':
posX = point.x - toolbarWidth / 2
posY = point.y - toolbarHeight / 2
posPoint.x = refPoint.x - toolbarWidth / 2
posPoint.y = refPoint.y - toolbarHeight / 2
break
default:
// Default to 'topMiddle' if invalid position
posX = point.x - toolbarWidth / 2
posY = point.y - toolbarHeight / 2
posPoint.x = refPoint.x - toolbarWidth / 2
posPoint.y = refPoint.y - toolbarHeight / 2
}
//use original point to get the display
const display = screen.getDisplayNearestPoint({ x: point.x, y: point.y })
const display = screen.getDisplayNearestPoint(refPoint)
// Ensure toolbar stays within screen boundaries
posX = Math.round(
Math.max(display.workArea.x, Math.min(posX, display.workArea.x + display.workArea.width - toolbarWidth))
posPoint.x = Math.round(
Math.max(display.workArea.x, Math.min(posPoint.x, display.workArea.x + display.workArea.width - toolbarWidth))
)
posY = Math.round(
Math.max(display.workArea.y, Math.min(posY, display.workArea.y + display.workArea.height - toolbarHeight))
posPoint.y = Math.round(
Math.max(display.workArea.y, Math.min(posPoint.y, display.workArea.y + display.workArea.height - toolbarHeight))
)
return { x: posX, y: posY }
return posPoint
}
private isSamePoint(point1: Point, point2: Point): boolean {
@ -773,13 +904,17 @@ export class SelectionService {
}
if (!isLogical) {
// [macOS] don't need to convert by screenToDipPoint
if (!isMac) {
refPoint = screen.screenToDipPoint(refPoint)
}
//screenToDipPoint can be float, so we need to round it
refPoint = screen.screenToDipPoint(refPoint)
refPoint = { x: Math.round(refPoint.x), y: Math.round(refPoint.y) }
}
this.showToolbarAtPosition(refPoint, refOrientation)
this.toolbarWindow?.webContents.send(IpcChannel.Selection_TextSelected, selectionData)
// [macOS] isFullscreen is only available on macOS
this.showToolbarAtPosition(refPoint, refOrientation, selectionData.programName)
this.toolbarWindow!.webContents.send(IpcChannel.Selection_TextSelected, selectionData)
}
/**
@ -787,7 +922,7 @@ export class SelectionService {
*/
// Start monitoring global mouse clicks
private startHideByMouseKeyListener() {
private startHideByMouseKeyListener(): void {
try {
// Register event handlers
this.selectionHook!.on('mouse-down', this.handleMouseDownHide)
@ -800,7 +935,7 @@ export class SelectionService {
}
// Stop monitoring global mouse clicks
private stopHideByMouseKeyListener() {
private stopHideByMouseKeyListener(): void {
if (!this.isHideByMouseKeyListenerActive) return
try {
@ -832,8 +967,8 @@ export class SelectionService {
return
}
//data point is physical coordinates, convert to logical coordinates
const mousePoint = screen.screenToDipPoint({ x: data.x, y: data.y })
//data point is physical coordinates, convert to logical coordinates(only for windows/linux)
const mousePoint = isMac ? { x: data.x, y: data.y } : screen.screenToDipPoint({ x: data.x, y: data.y })
const bounds = this.toolbarWindow!.getBounds()
@ -966,7 +1101,8 @@ export class SelectionService {
frame: false,
transparent: true,
autoHideMenuBar: true,
titleBarStyle: 'hidden',
titleBarStyle: 'hidden', // [macOS]
trafficLightPosition: { x: 12, y: 9 }, // [macOS]
hasShadow: false,
thickFrame: false,
show: false,
@ -993,7 +1129,7 @@ export class SelectionService {
* Initialize preloaded action windows
* Creates a pool of windows at startup for faster response
*/
private async initPreloadedActionWindows() {
private async initPreloadedActionWindows(): Promise<void> {
try {
// Create initial pool of preloaded windows
for (let i = 0; i < this.PRELOAD_ACTION_WINDOW_COUNT; i++) {
@ -1007,7 +1143,7 @@ export class SelectionService {
/**
* Close all preloaded action windows
*/
private closePreloadedActionWindows() {
private closePreloadedActionWindows(): void {
for (const actionWindow of this.preloadedActionWindows) {
if (!actionWindow.isDestroyed()) {
actionWindow.destroy()
@ -1019,7 +1155,7 @@ export class SelectionService {
* Preload a new action window asynchronously
* This method is called after popping a window to ensure we always have windows ready
*/
private async pushNewActionWindow() {
private async pushNewActionWindow(): Promise<void> {
try {
const actionWindow = this.createPreloadedActionWindow()
this.preloadedActionWindows.push(actionWindow)
@ -1033,7 +1169,7 @@ export class SelectionService {
* Immediately returns a window and asynchronously creates a new one
* @returns {BrowserWindow} The action window
*/
private popActionWindow() {
private popActionWindow(): BrowserWindow {
// Get a window from the preloaded queue or create a new one if empty
const actionWindow = this.preloadedActionWindows.pop() || this.createPreloadedActionWindow()
@ -1043,6 +1179,27 @@ export class SelectionService {
if (!actionWindow.isDestroyed()) {
actionWindow.destroy()
}
// [macOS] a HACKY way
// make sure other windows do not bring to front when action window is closed
if (isMac) {
const focusableWindows: BrowserWindow[] = []
for (const window of BrowserWindow.getAllWindows()) {
if (!window.isDestroyed() && window.isVisible()) {
if (window.isFocusable()) {
focusableWindows.push(window)
window.setFocusable(false)
}
}
}
setTimeout(() => {
for (const window of focusableWindows) {
if (!window.isDestroyed()) {
window.setFocusable(true)
}
}
}, 50)
}
})
//remember the action window size
@ -1063,20 +1220,26 @@ export class SelectionService {
return actionWindow
}
public processAction(actionItem: ActionItem): void {
/**
* Process action item
* @param actionItem Action item to process
* @param isFullScreen [macOS] only macOS has the available isFullscreen mode
*/
public processAction(actionItem: ActionItem, isFullScreen: boolean = false): void {
const actionWindow = this.popActionWindow()
actionWindow.webContents.send(IpcChannel.Selection_UpdateActionData, actionItem)
this.showActionWindow(actionWindow)
this.showActionWindow(actionWindow, isFullScreen)
}
/**
* Show action window with proper positioning relative to toolbar
* Ensures window stays within screen boundaries
* @param actionWindow Window to position and show
* @param isFullScreen [macOS] only macOS has the available isFullscreen mode
*/
private showActionWindow(actionWindow: BrowserWindow) {
private showActionWindow(actionWindow: BrowserWindow, isFullScreen: boolean = false): void {
let actionWindowWidth = this.ACTION_WINDOW_WIDTH
let actionWindowHeight = this.ACTION_WINDOW_HEIGHT
@ -1086,63 +1249,125 @@ export class SelectionService {
actionWindowHeight = this.lastActionWindowSize.height
}
//center way
/********************************************
* Setting the position of the action window
********************************************/
const display = screen.getDisplayNearestPoint(screen.getCursorScreenPoint())
const workArea = display.workArea
// Center of the screen
if (!this.isFollowToolbar || !this.toolbarWindow) {
if (this.isRemeberWinSize) {
actionWindow.setBounds({
width: actionWindowWidth,
height: actionWindowHeight
})
const centerX = Math.round(workArea.x + (workArea.width - actionWindowWidth) / 2)
const centerY = Math.round(workArea.y + (workArea.height - actionWindowHeight) / 2)
actionWindow.setPosition(centerX, centerY, false)
actionWindow.setBounds({
width: actionWindowWidth,
height: actionWindowHeight,
x: centerX,
y: centerY
})
} else {
// Follow toolbar position
const toolbarBounds = this.toolbarWindow!.getBounds()
const GAP = 6 // 6px gap from screen edges
//make sure action window is inside screen
if (actionWindowWidth > workArea.width - 2 * GAP) {
actionWindowWidth = workArea.width - 2 * GAP
}
if (actionWindowHeight > workArea.height - 2 * GAP) {
actionWindowHeight = workArea.height - 2 * GAP
}
// Calculate initial position to center action window horizontally below toolbar
let posX = Math.round(toolbarBounds.x + (toolbarBounds.width - actionWindowWidth) / 2)
let posY = Math.round(toolbarBounds.y)
// Ensure action window stays within screen boundaries with a small gap
if (posX + actionWindowWidth > workArea.x + workArea.width) {
posX = workArea.x + workArea.width - actionWindowWidth - GAP
} else if (posX < workArea.x) {
posX = workArea.x + GAP
}
if (posY + actionWindowHeight > workArea.y + workArea.height) {
// If window would go below screen, try to position it above toolbar
posY = workArea.y + workArea.height - actionWindowHeight - GAP
} else if (posY < workArea.y) {
posY = workArea.y + GAP
}
actionWindow.setPosition(posX, posY, false)
//KEY to make window not resize
actionWindow.setBounds({
width: actionWindowWidth,
height: actionWindowHeight,
x: posX,
y: posY
})
}
if (!isMac) {
actionWindow.show()
this.hideToolbar()
return
}
//follow toolbar
/************************************************
* [macOS] the following code is only for macOS
*
* WARNING:
* DO NOT MODIFY THESE CODES, UNLESS YOU REALLY KNOW WHAT YOU ARE DOING!!!!
*************************************************/
const toolbarBounds = this.toolbarWindow!.getBounds()
const display = screen.getDisplayNearestPoint({ x: toolbarBounds.x, y: toolbarBounds.y })
const workArea = display.workArea
const GAP = 6 // 6px gap from screen edges
//make sure action window is inside screen
if (actionWindowWidth > workArea.width - 2 * GAP) {
actionWindowWidth = workArea.width - 2 * GAP
// act normally when the app is not in fullscreen mode
if (!isFullScreen) {
actionWindow.show()
return
}
if (actionWindowHeight > workArea.height - 2 * GAP) {
actionWindowHeight = workArea.height - 2 * GAP
}
// [macOS] an UGLY HACKY way for fullscreen override settings
// Calculate initial position to center action window horizontally below toolbar
let posX = Math.round(toolbarBounds.x + (toolbarBounds.width - actionWindowWidth) / 2)
let posY = Math.round(toolbarBounds.y)
// FIXME sometimes the dock will be shown when the action window is shown
// FIXME if actionWindow show on the fullscreen app, switch to other space will cause the mainWindow to be shown
// FIXME When setVisibleOnAllWorkspaces is true, docker icon disappeared when the first action window is shown on the fullscreen app
// use app.dock.show() to show the dock again will cause the action window to be closed when auto hide on blur is enabled
// Ensure action window stays within screen boundaries with a small gap
if (posX + actionWindowWidth > workArea.x + workArea.width) {
posX = workArea.x + workArea.width - actionWindowWidth - GAP
} else if (posX < workArea.x) {
posX = workArea.x + GAP
}
if (posY + actionWindowHeight > workArea.y + workArea.height) {
// If window would go below screen, try to position it above toolbar
posY = workArea.y + workArea.height - actionWindowHeight - GAP
} else if (posY < workArea.y) {
posY = workArea.y + GAP
}
// setFocusable(false) to prevent the action window hide when blur (if auto hide on blur is enabled)
actionWindow.setFocusable(false)
actionWindow.setAlwaysOnTop(true, 'floating')
actionWindow.setPosition(posX, posY, false)
//KEY to make window not resize
actionWindow.setBounds({
width: actionWindowWidth,
height: actionWindowHeight,
x: posX,
y: posY
// `setVisibleOnAllWorkspaces(true)` will cause the dock icon disappeared
// just store the dock icon status, and show it again
const isDockShown = app.dock?.isVisible()
// DO NOT set `skipTransformProcessType: true`,
// it will cause the action window to be shown on other space
actionWindow.setVisibleOnAllWorkspaces(true, {
visibleOnFullScreen: true
})
actionWindow.show()
actionWindow.showInactive()
// show the dock again if last time it was shown
// do not put it after `actionWindow.focus()`, will cause the action window to be closed when auto hide on blur is enabled
if (!app.dock?.isVisible() && isDockShown) {
app.dock?.show()
}
// unset everything
setTimeout(() => {
actionWindow.setVisibleOnAllWorkspaces(false, {
visibleOnFullScreen: true,
skipTransformProcessType: true
})
actionWindow.setAlwaysOnTop(false)
actionWindow.setFocusable(true)
// regain the focus when all the works done
actionWindow.focus()
}, 50)
}
public closeActionWindow(actionWindow: BrowserWindow): void {
@ -1162,38 +1387,40 @@ export class SelectionService {
* Switches between selection-based and alt-key based triggering
* Manages appropriate event listeners for each mode
*/
private processTriggerMode() {
private processTriggerMode(): void {
if (!this.selectionHook) return
switch (this.triggerMode) {
case TriggerMode.Selected:
if (this.isCtrlkeyListenerActive) {
this.selectionHook!.off('key-down', this.handleKeyDownCtrlkeyMode)
this.selectionHook!.off('key-up', this.handleKeyUpCtrlkeyMode)
this.selectionHook.off('key-down', this.handleKeyDownCtrlkeyMode)
this.selectionHook.off('key-up', this.handleKeyUpCtrlkeyMode)
this.isCtrlkeyListenerActive = false
}
this.selectionHook!.setSelectionPassiveMode(false)
this.selectionHook.setSelectionPassiveMode(false)
break
case TriggerMode.Ctrlkey:
if (!this.isCtrlkeyListenerActive) {
this.selectionHook!.on('key-down', this.handleKeyDownCtrlkeyMode)
this.selectionHook!.on('key-up', this.handleKeyUpCtrlkeyMode)
this.selectionHook.on('key-down', this.handleKeyDownCtrlkeyMode)
this.selectionHook.on('key-up', this.handleKeyUpCtrlkeyMode)
this.isCtrlkeyListenerActive = true
}
this.selectionHook!.setSelectionPassiveMode(true)
this.selectionHook.setSelectionPassiveMode(true)
break
case TriggerMode.Shortcut:
//remove the ctrlkey listener, don't need any key listener for shortcut mode
if (this.isCtrlkeyListenerActive) {
this.selectionHook!.off('key-down', this.handleKeyDownCtrlkeyMode)
this.selectionHook!.off('key-up', this.handleKeyUpCtrlkeyMode)
this.selectionHook.off('key-down', this.handleKeyDownCtrlkeyMode)
this.selectionHook.off('key-up', this.handleKeyUpCtrlkeyMode)
this.isCtrlkeyListenerActive = false
}
this.selectionHook!.setSelectionPassiveMode(true)
this.selectionHook.setSelectionPassiveMode(true)
break
}
}
@ -1214,7 +1441,7 @@ export class SelectionService {
selectionService?.hideToolbar()
})
ipcMain.handle(IpcChannel.Selection_WriteToClipboard, (_, text: string) => {
ipcMain.handle(IpcChannel.Selection_WriteToClipboard, (_, text: string): boolean => {
return selectionService?.writeToClipboard(text) ?? false
})
@ -1246,8 +1473,9 @@ export class SelectionService {
configManager.setSelectionAssistantFilterList(filterList)
})
ipcMain.handle(IpcChannel.Selection_ProcessAction, (_, actionItem: ActionItem) => {
selectionService?.processAction(actionItem)
// [macOS] only macOS has the available isFullscreen mode
ipcMain.handle(IpcChannel.Selection_ProcessAction, (_, actionItem: ActionItem, isFullScreen: boolean = false) => {
selectionService?.processAction(actionItem, isFullScreen)
})
ipcMain.handle(IpcChannel.Selection_ActionWindowClose, (event) => {
@ -1274,13 +1502,13 @@ export class SelectionService {
this.isIpcHandlerRegistered = true
}
private logInfo(message: string, forceShow: boolean = false) {
private logInfo(message: string, forceShow: boolean = false): void {
if (isDev || forceShow) {
Logger.info('[SelectionService] Info: ', message)
}
}
private logError(...args: [...string[], Error]) {
private logError(...args: [...string[], Error]): void {
Logger.error('[SelectionService] Error: ', ...args)
}
}
@ -1291,9 +1519,9 @@ export class SelectionService {
* @returns {boolean} Success status of initialization
*/
export function initSelectionService(): boolean {
if (!isWin) return false
if (!isSupportedOS) return false
configManager.subscribe(ConfigKeys.SelectionAssistantEnabled, (enabled: boolean) => {
configManager.subscribe(ConfigKeys.SelectionAssistantEnabled, (enabled: boolean): void => {
//avoid closure
const ss = SelectionService.getInstance()
if (!ss) {

View File

@ -55,7 +55,8 @@ function formatShortcutKey(shortcut: string[]): string {
return shortcut.join('+')
}
// convert the shortcut recorded by keyboard event key value to electron global shortcut format
// convert the shortcut recorded by JS keyboard event key value to electron global shortcut format
// see: https://www.electronjs.org/zh/docs/latest/api/accelerator
const convertShortcutFormat = (shortcut: string | string[]): string => {
const accelerator = (() => {
if (Array.isArray(shortcut)) {
@ -68,12 +69,34 @@ const convertShortcutFormat = (shortcut: string | string[]): string => {
return accelerator
.map((key) => {
switch (key) {
// OLD WAY FOR MODIFIER KEYS, KEEP THEM HERE FOR REFERENCE
// case 'Command':
// return 'CommandOrControl'
// case 'Control':
// return 'Control'
// case 'Ctrl':
// return 'Control'
// NEW WAY FOR MODIFIER KEYS
// you can see all the modifier keys in the same
case 'CommandOrControl':
return 'CommandOrControl'
case 'Ctrl':
return 'Ctrl'
case 'Alt':
return 'Alt' // Use `Alt` instead of `Option`. The `Option` key only exists on macOS, whereas the `Alt` key is available on all platforms.
case 'Meta':
return 'Meta' // `Meta` key is mapped to the Windows key on Windows and Linux, `Cmd` on macOS.
case 'Shift':
return 'Shift'
// For backward compatibility with old data
case 'Command':
case 'Cmd':
return 'CommandOrControl'
case 'Control':
return 'Control'
case 'Ctrl':
return 'Control'
return 'Ctrl'
case 'ArrowUp':
return 'Up'
case 'ArrowDown':
@ -83,7 +106,7 @@ const convertShortcutFormat = (shortcut: string | string[]): string => {
case 'ArrowRight':
return 'Right'
case 'AltGraph':
return 'Alt'
return 'AltGr'
case 'Slash':
return '/'
case 'Semicolon':

View File

@ -1,48 +1,48 @@
import { IpcChannel } from '@shared/IpcChannel'
import { ThemeMode } from '@types'
import { BrowserWindow, nativeTheme } from 'electron'
import { titleBarOverlayDark, titleBarOverlayLight } from '../config'
import { configManager } from './ConfigManager'
class ThemeService {
private theme: ThemeMode = ThemeMode.system
constructor() {
this.theme = configManager.getTheme()
if (this.theme === ThemeMode.dark || this.theme === ThemeMode.light || this.theme === ThemeMode.system) {
nativeTheme.themeSource = this.theme
} else {
// 兼容旧版本
configManager.setTheme(ThemeMode.system)
nativeTheme.themeSource = ThemeMode.system
}
nativeTheme.on('updated', this.themeUpdatadHandler.bind(this))
}
themeUpdatadHandler() {
BrowserWindow.getAllWindows().forEach((win) => {
if (win && !win.isDestroyed() && win.setTitleBarOverlay) {
try {
win.setTitleBarOverlay(nativeTheme.shouldUseDarkColors ? titleBarOverlayDark : titleBarOverlayLight)
} catch (error) {
// don't throw error if setTitleBarOverlay failed
// Because it may be called with some windows have some title bar
}
}
win.webContents.send(IpcChannel.ThemeUpdated, nativeTheme.shouldUseDarkColors ? ThemeMode.dark : ThemeMode.light)
})
}
setTheme(theme: ThemeMode) {
if (theme === this.theme) {
return
}
this.theme = theme
nativeTheme.themeSource = theme
configManager.setTheme(theme)
}
}
export const themeService = new ThemeService()
import { IpcChannel } from '@shared/IpcChannel'
import { ThemeMode } from '@types'
import { BrowserWindow, nativeTheme } from 'electron'
import { titleBarOverlayDark, titleBarOverlayLight } from '../config'
import { configManager } from './ConfigManager'
class ThemeService {
private theme: ThemeMode = ThemeMode.system
constructor() {
this.theme = configManager.getTheme()
if (this.theme === ThemeMode.dark || this.theme === ThemeMode.light || this.theme === ThemeMode.system) {
nativeTheme.themeSource = this.theme
} else {
// 兼容旧版本
configManager.setTheme(ThemeMode.system)
nativeTheme.themeSource = ThemeMode.system
}
nativeTheme.on('updated', this.themeUpdatadHandler.bind(this))
}
themeUpdatadHandler() {
BrowserWindow.getAllWindows().forEach((win) => {
if (win && !win.isDestroyed() && win.setTitleBarOverlay) {
try {
win.setTitleBarOverlay(nativeTheme.shouldUseDarkColors ? titleBarOverlayDark : titleBarOverlayLight)
} catch (error) {
// don't throw error if setTitleBarOverlay failed
// Because it may be called with some windows have some title bar
}
}
win.webContents.send(IpcChannel.ThemeUpdated, nativeTheme.shouldUseDarkColors ? ThemeMode.dark : ThemeMode.light)
})
}
setTheme(theme: ThemeMode) {
if (theme === this.theme) {
return
}
this.theme = theme
nativeTheme.themeSource = theme
configManager.setTheme(theme)
}
}
export const themeService = new ThemeService()

View File

@ -84,10 +84,8 @@ export class TrayService {
label: trayLocale.show_mini_window,
click: () => windowService.showMiniWindow()
},
isWin && {
(isWin || isMac) && {
label: selectionLocale.name + (selectionAssistantEnabled ? ' - On' : ' - Off'),
// type: 'checkbox',
// checked: selectionAssistantEnabled,
click: () => {
if (selectionService) {
selectionService.toggleEnabled()

View File

@ -23,7 +23,9 @@ export default class WebDav {
password: params.webdavPass,
maxBodyLength: Infinity,
maxContentLength: Infinity,
httpsAgent: new https.Agent({ rejectUnauthorized: false })
httpsAgent: new https.Agent({
rejectUnauthorized: false
})
})
this.putFileContents = this.putFileContents.bind(this)

View File

@ -5,7 +5,7 @@ import { is } from '@electron-toolkit/utils'
import { isDev, isLinux, isMac, isWin } from '@main/constant'
import { getFilesDir } from '@main/utils/file'
import { IpcChannel } from '@shared/IpcChannel'
import { app, BrowserWindow, nativeTheme, shell } from 'electron'
import { app, BrowserWindow, nativeTheme, screen, shell } from 'electron'
import Logger from 'electron-log'
import windowStateKeeper from 'electron-window-state'
import { join } from 'path'
@ -16,6 +16,9 @@ import { configManager } from './ConfigManager'
import { contextMenu } from './ContextMenu'
import { initSessionUserAgent } from './WebviewService'
const DEFAULT_MINIWINDOW_WIDTH = 550
const DEFAULT_MINIWINDOW_HEIGHT = 400
export class WindowService {
private static instance: WindowService | null = null
private mainWindow: BrowserWindow | null = null
@ -26,6 +29,11 @@ export class WindowService {
private wasMainWindowFocused: boolean = false
private lastRendererProcessCrashTime: number = 0
private miniWindowSize: { width: number; height: number } = {
width: DEFAULT_MINIWINDOW_WIDTH,
height: DEFAULT_MINIWINDOW_HEIGHT
}
public static getInstance(): WindowService {
if (!WindowService.instance) {
WindowService.instance = new WindowService()
@ -41,8 +49,8 @@ export class WindowService {
}
const mainWindowState = windowStateKeeper({
defaultWidth: 1080,
defaultHeight: 670,
defaultWidth: 960,
defaultHeight: 600,
fullScreen: false,
maximize: false
})
@ -52,7 +60,7 @@ export class WindowService {
y: mainWindowState.y,
width: mainWindowState.width,
height: mainWindowState.height,
minWidth: 1080,
minWidth: 960,
minHeight: 600,
show: false,
autoHideMenuBar: true,
@ -426,8 +434,8 @@ export class WindowService {
public createMiniWindow(isPreload: boolean = false): BrowserWindow {
this.miniWindow = new BrowserWindow({
width: 550,
height: 400,
width: this.miniWindowSize.width,
height: this.miniWindowSize.height,
minWidth: 350,
minHeight: 380,
maxWidth: 1024,
@ -437,13 +445,12 @@ export class WindowService {
transparent: isMac,
vibrancy: 'under-window',
visualEffectState: 'followWindow',
center: true,
frame: false,
alwaysOnTop: true,
resizable: true,
useContentSize: true,
...(isMac ? { type: 'panel' } : {}),
skipTaskbar: true,
resizable: true,
minimizable: false,
maximizable: false,
fullscreenable: false,
@ -451,8 +458,7 @@ export class WindowService {
preload: join(__dirname, '../preload/index.js'),
sandbox: false,
webSecurity: false,
webviewTag: true,
backgroundThrottling: false
webviewTag: true
}
})
@ -486,6 +492,13 @@ export class WindowService {
this.miniWindow?.webContents.send(IpcChannel.HideMiniWindow)
})
this.miniWindow.on('resized', () => {
this.miniWindowSize = this.miniWindow?.getBounds() || {
width: DEFAULT_MINIWINDOW_WIDTH,
height: DEFAULT_MINIWINDOW_HEIGHT
}
})
this.miniWindow.on('show', () => {
this.miniWindow?.webContents.send(IpcChannel.ShowMiniWindow)
})
@ -509,10 +522,48 @@ export class WindowService {
if (this.miniWindow && !this.miniWindow.isDestroyed()) {
this.wasMainWindowFocused = this.mainWindow?.isFocused() || false
if (this.miniWindow.isMinimized()) {
this.miniWindow.restore()
// [Windows] hacky fix
// the window is minimized only when in Windows platform
// because it's a workround for Windows, see `hideMiniWindow()`
if (this.miniWindow?.isMinimized()) {
// don't let the window being seen before we finish adusting the position across screens
this.miniWindow?.setOpacity(0)
// DO NOT use `restore()` here, Electron has the bug with screens of different scale factor
// We have to use `show()` here, then set the position and bounds
this.miniWindow?.show()
}
this.miniWindow.show()
const miniWindowBounds = this.miniWindow.getBounds()
// Check if miniWindow is on the same screen as mouse cursor
const cursorDisplay = screen.getDisplayNearestPoint(screen.getCursorScreenPoint())
const miniWindowDisplay = screen.getDisplayNearestPoint(miniWindowBounds)
// Show the miniWindow on the cursor's screen center
// If miniWindow is not on the same screen as cursor, move it to cursor's screen center
if (cursorDisplay.id !== miniWindowDisplay.id) {
const workArea = cursorDisplay.bounds
// use remembered size to avoid the bug of Electron with screens of different scale factor
const miniWindowWidth = this.miniWindowSize.width
const miniWindowHeight = this.miniWindowSize.height
// move to the center of the cursor's screen
const miniWindowX = Math.round(workArea.x + (workArea.width - miniWindowWidth) / 2)
const miniWindowY = Math.round(workArea.y + (workArea.height - miniWindowHeight) / 2)
this.miniWindow.setPosition(miniWindowX, miniWindowY, false)
this.miniWindow.setBounds({
x: miniWindowX,
y: miniWindowY,
width: miniWindowWidth,
height: miniWindowHeight
})
}
this.miniWindow?.setOpacity(1)
this.miniWindow?.show()
return
}
@ -520,20 +571,26 @@ export class WindowService {
}
public hideMiniWindow() {
//hacky-fix:[mac/win] previous window(not self-app) should be focused again after miniWindow hide
if (!this.miniWindow || this.miniWindow.isDestroyed()) {
return
}
//[macOs/Windows] hacky fix
// previous window(not self-app) should be focused again after miniWindow hide
// this workaround is to make previous window focused again after miniWindow hide
if (isWin) {
this.miniWindow?.minimize()
this.miniWindow?.hide()
this.miniWindow.setOpacity(0) // don't show the minimizing animation
this.miniWindow.minimize()
return
} else if (isMac) {
this.miniWindow?.hide()
this.miniWindow.hide()
if (!this.wasMainWindowFocused) {
app.hide()
}
return
}
this.miniWindow?.hide()
this.miniWindow.hide()
}
public closeMiniWindow() {

View File

@ -0,0 +1,829 @@
import { Client, createClient } from '@libsql/client'
import Embeddings from '@main/knowledge/embeddings/Embeddings'
import type {
AddMemoryOptions,
AssistantMessage,
MemoryConfig,
MemoryHistoryItem,
MemoryItem,
MemoryListOptions,
MemorySearchOptions
} from '@types'
import crypto from 'crypto'
import { app } from 'electron'
import Logger from 'electron-log'
import path from 'path'
import { MemoryQueries } from './queries'
export interface EmbeddingOptions {
model: string
provider: string
apiKey: string
apiVersion?: string
baseURL: string
dimensions?: number
batchSize?: number
}
export interface VectorSearchOptions {
limit?: number
threshold?: number
userId?: string
agentId?: string
filters?: Record<string, any>
}
export interface SearchResult {
memories: MemoryItem[]
count: number
error?: string
}
export class MemoryService {
private static instance: MemoryService | null = null
private db: Client | null = null
private isInitialized = false
private embeddings: Embeddings | null = null
private config: MemoryConfig | null = null
private static readonly UNIFIED_DIMENSION = 1536
private static readonly SIMILARITY_THRESHOLD = 0.85
private constructor() {
// Private constructor to enforce singleton pattern
}
public static getInstance(): MemoryService {
if (!MemoryService.instance) {
MemoryService.instance = new MemoryService()
}
return MemoryService.instance
}
public static reload(): MemoryService {
if (MemoryService.instance) {
MemoryService.instance.close()
}
MemoryService.instance = new MemoryService()
return MemoryService.instance
}
/**
* Initialize the database connection and create tables
*/
private async init(): Promise<void> {
if (this.isInitialized && this.db) {
return
}
try {
const userDataPath = app.getPath('userData')
const dbPath = path.join(userDataPath, 'memories.db')
this.db = createClient({
url: `file:${dbPath}`,
intMode: 'number'
})
// Create tables
await this.createTables()
this.isInitialized = true
Logger.info('Memory database initialized successfully')
} catch (error) {
Logger.error('Failed to initialize memory database:', error)
throw new Error(
`Memory database initialization failed: ${error instanceof Error ? error.message : 'Unknown error'}`
)
}
}
private async createTables(): Promise<void> {
if (!this.db) throw new Error('Database not initialized')
// Create memories table with native vector support
await this.db.execute(MemoryQueries.createTables.memories)
// Create memory history table
await this.db.execute(MemoryQueries.createTables.memoryHistory)
// Create indexes
await this.db.execute(MemoryQueries.createIndexes.userId)
await this.db.execute(MemoryQueries.createIndexes.agentId)
await this.db.execute(MemoryQueries.createIndexes.createdAt)
await this.db.execute(MemoryQueries.createIndexes.hash)
await this.db.execute(MemoryQueries.createIndexes.memoryHistory)
// Create vector index for similarity search
try {
await this.db.execute(MemoryQueries.createIndexes.vector)
} catch (error) {
// Vector index might not be supported in all versions
Logger.warn('Failed to create vector index, falling back to non-indexed search:', error)
}
}
/**
* Add new memories from messages
*/
public async add(messages: string | AssistantMessage[], options: AddMemoryOptions): Promise<SearchResult> {
await this.init()
if (!this.db) throw new Error('Database not initialized')
const { userId, agentId, runId, metadata } = options
try {
// Convert messages to memory strings
const memoryStrings = Array.isArray(messages)
? messages.map((m) => (typeof m === 'string' ? m : m.content))
: [messages]
const addedMemories: MemoryItem[] = []
for (const memory of memoryStrings) {
const trimmedMemory = memory.trim()
if (!trimmedMemory) continue
// Generate hash for deduplication
const hash = crypto.createHash('sha256').update(trimmedMemory).digest('hex')
// Check if memory already exists
const existing = await this.db.execute({
sql: MemoryQueries.memory.checkExistsIncludeDeleted,
args: [hash]
})
if (existing.rows.length > 0) {
const existingRecord = existing.rows[0] as any
const isDeleted = existingRecord.is_deleted === 1
if (!isDeleted) {
// Active record exists, skip insertion
Logger.info(`Memory already exists with hash: ${hash}`)
continue
} else {
// Deleted record exists, restore it instead of inserting new one
Logger.info(`Restoring deleted memory with hash: ${hash}`)
// Generate embedding if model is configured
let embedding: number[] | null = null
const embedderApiClient = this.config?.embedderApiClient
if (embedderApiClient) {
try {
embedding = await this.generateEmbedding(trimmedMemory)
Logger.info(
`Generated embedding for restored memory with dimension: ${embedding.length} (target: ${this.config?.embedderDimensions || MemoryService.UNIFIED_DIMENSION})`
)
} catch (error) {
Logger.error('Failed to generate embedding for restored memory:', error)
}
}
const now = new Date().toISOString()
// Restore the deleted record
await this.db.execute({
sql: MemoryQueries.memory.restoreDeleted,
args: [
trimmedMemory,
embedding ? this.embeddingToVector(embedding) : null,
metadata ? JSON.stringify(metadata) : null,
now,
existingRecord.id
]
})
// Add to history
await this.addHistory(existingRecord.id, null, trimmedMemory, 'ADD')
addedMemories.push({
id: existingRecord.id,
memory: trimmedMemory,
hash,
createdAt: now,
updatedAt: now,
metadata
})
continue
}
}
// Generate embedding if model is configured
let embedding: number[] | null = null
if (this.config?.embedderApiClient) {
try {
embedding = await this.generateEmbedding(trimmedMemory)
Logger.info(
`Generated embedding with dimension: ${embedding.length} (target: ${this.config?.embedderDimensions || MemoryService.UNIFIED_DIMENSION})`
)
// Check for similar memories using vector similarity
const similarMemories = await this.hybridSearch(trimmedMemory, embedding, {
limit: 5,
threshold: 0.1, // Lower threshold to get more candidates
userId,
agentId
})
// Check if any similar memory exceeds the similarity threshold
if (similarMemories.memories.length > 0) {
const highestSimilarity = Math.max(...similarMemories.memories.map((m) => m.score || 0))
if (highestSimilarity >= MemoryService.SIMILARITY_THRESHOLD) {
Logger.info(
`Skipping memory addition due to high similarity: ${highestSimilarity.toFixed(3)} >= ${MemoryService.SIMILARITY_THRESHOLD}`
)
Logger.info(`Similar memory found: "${similarMemories.memories[0].memory}"`)
continue
}
}
} catch (error) {
Logger.error('Failed to generate embedding:', error)
}
}
// Insert new memory
const id = crypto.randomUUID()
const now = new Date().toISOString()
await this.db.execute({
sql: MemoryQueries.memory.insert,
args: [
id,
trimmedMemory,
hash,
embedding ? this.embeddingToVector(embedding) : null,
metadata ? JSON.stringify(metadata) : null,
userId || null,
agentId || null,
runId || null,
now,
now
]
})
// Add to history
await this.addHistory(id, null, trimmedMemory, 'ADD')
addedMemories.push({
id,
memory: trimmedMemory,
hash,
createdAt: now,
updatedAt: now,
metadata
})
}
return {
memories: addedMemories,
count: addedMemories.length
}
} catch (error) {
Logger.error('Failed to add memories:', error)
return {
memories: [],
count: 0,
error: error instanceof Error ? error.message : 'Unknown error'
}
}
}
/**
* Search memories using text or vector similarity
*/
public async search(query: string, options: MemorySearchOptions = {}): Promise<SearchResult> {
await this.init()
if (!this.db) throw new Error('Database not initialized')
const { limit = 10, userId, agentId, filters = {} } = options
try {
// If we have an embedder model configured, use vector search
if (this.config?.embedderApiClient) {
try {
const queryEmbedding = await this.generateEmbedding(query)
return await this.hybridSearch(query, queryEmbedding, { limit, userId, agentId, filters })
} catch (error) {
Logger.error('Vector search failed, falling back to text search:', error)
}
}
// Fallback to text search
const conditions: string[] = ['m.is_deleted = 0']
const params: any[] = []
// Add search conditions
conditions.push('(m.memory LIKE ? OR m.memory LIKE ?)')
params.push(`%${query}%`, `%${query.split(' ').join('%')}%`)
if (userId) {
conditions.push('m.user_id = ?')
params.push(userId)
}
if (agentId) {
conditions.push('m.agent_id = ?')
params.push(agentId)
}
// Add custom filters
for (const [key, value] of Object.entries(filters)) {
if (value !== undefined && value !== null) {
conditions.push(`json_extract(m.metadata, '$.${key}') = ?`)
params.push(value)
}
}
const whereClause = conditions.join(' AND ')
params.push(limit)
const result = await this.db.execute({
sql: `${MemoryQueries.memory.list} ${whereClause}
ORDER BY m.created_at DESC
LIMIT ?
`,
args: params
})
const memories: MemoryItem[] = result.rows.map((row: any) => ({
id: row.id as string,
memory: row.memory as string,
hash: (row.hash as string) || undefined,
metadata: row.metadata ? JSON.parse(row.metadata as string) : undefined,
createdAt: row.created_at as string,
updatedAt: row.updated_at as string
}))
return {
memories,
count: memories.length
}
} catch (error) {
Logger.error('Search failed:', error)
return {
memories: [],
count: 0,
error: error instanceof Error ? error.message : 'Unknown error'
}
}
}
/**
* List all memories with optional filters
*/
public async list(options: MemoryListOptions = {}): Promise<SearchResult> {
await this.init()
if (!this.db) throw new Error('Database not initialized')
const { userId, agentId, limit = 100, offset = 0 } = options
try {
const conditions: string[] = ['m.is_deleted = 0']
const params: any[] = []
if (userId) {
conditions.push('m.user_id = ?')
params.push(userId)
}
if (agentId) {
conditions.push('m.agent_id = ?')
params.push(agentId)
}
const whereClause = conditions.join(' AND ')
// Get total count
const countResult = await this.db.execute({
sql: `${MemoryQueries.memory.count} ${whereClause}`,
args: params
})
const totalCount = (countResult.rows[0] as any).total as number
// Get paginated results
params.push(limit, offset)
const result = await this.db.execute({
sql: `${MemoryQueries.memory.list} ${whereClause}
ORDER BY m.created_at DESC
LIMIT ? OFFSET ?
`,
args: params
})
const memories: MemoryItem[] = result.rows.map((row: any) => ({
id: row.id as string,
memory: row.memory as string,
hash: (row.hash as string) || undefined,
metadata: row.metadata ? JSON.parse(row.metadata as string) : undefined,
createdAt: row.created_at as string,
updatedAt: row.updated_at as string
}))
return {
memories,
count: totalCount
}
} catch (error) {
Logger.error('List failed:', error)
return {
memories: [],
count: 0,
error: error instanceof Error ? error.message : 'Unknown error'
}
}
}
/**
* Delete a memory (soft delete)
*/
public async delete(id: string): Promise<void> {
await this.init()
if (!this.db) throw new Error('Database not initialized')
try {
// Get current memory value for history
const current = await this.db.execute({
sql: MemoryQueries.memory.getForDelete,
args: [id]
})
if (current.rows.length === 0) {
throw new Error('Memory not found')
}
const currentMemory = (current.rows[0] as any).memory as string
// Soft delete
await this.db.execute({
sql: MemoryQueries.memory.softDelete,
args: [new Date().toISOString(), id]
})
// Add to history
await this.addHistory(id, currentMemory, null, 'DELETE')
Logger.info(`Memory deleted: ${id}`)
} catch (error) {
Logger.error('Delete failed:', error)
throw new Error(`Failed to delete memory: ${error instanceof Error ? error.message : 'Unknown error'}`)
}
}
/**
* Update a memory
*/
public async update(id: string, memory: string, metadata?: Record<string, any>): Promise<void> {
await this.init()
if (!this.db) throw new Error('Database not initialized')
try {
// Get current memory
const current = await this.db.execute({
sql: MemoryQueries.memory.getForUpdate,
args: [id]
})
if (current.rows.length === 0) {
throw new Error('Memory not found')
}
const row = current.rows[0] as any
const previousMemory = row.memory as string
const previousMetadata = row.metadata ? JSON.parse(row.metadata as string) : {}
// Generate new hash
const hash = crypto.createHash('sha256').update(memory.trim()).digest('hex')
// Generate new embedding if model is configured
let embedding: number[] | null = null
if (this.config?.embedderApiClient) {
try {
embedding = await this.generateEmbedding(memory)
Logger.info(
`Updated embedding with dimension: ${embedding.length} (target: ${this.config?.embedderDimensions || MemoryService.UNIFIED_DIMENSION})`
)
} catch (error) {
Logger.error('Failed to generate embedding for update:', error)
}
}
// Merge metadata
const mergedMetadata = { ...previousMetadata, ...metadata }
// Update memory
await this.db.execute({
sql: MemoryQueries.memory.update,
args: [
memory.trim(),
hash,
embedding ? this.embeddingToVector(embedding) : null,
JSON.stringify(mergedMetadata),
new Date().toISOString(),
id
]
})
// Add to history
await this.addHistory(id, previousMemory, memory, 'UPDATE')
Logger.info(`Memory updated: ${id}`)
} catch (error) {
Logger.error('Update failed:', error)
throw new Error(`Failed to update memory: ${error instanceof Error ? error.message : 'Unknown error'}`)
}
}
/**
* Get memory history
*/
public async get(memoryId: string): Promise<MemoryHistoryItem[]> {
await this.init()
if (!this.db) throw new Error('Database not initialized')
try {
const result = await this.db.execute({
sql: MemoryQueries.history.getByMemoryId,
args: [memoryId]
})
return result.rows.map((row: any) => ({
id: row.id as number,
memoryId: row.memory_id as string,
previousValue: row.previous_value as string | undefined,
newValue: row.new_value as string,
action: row.action as 'ADD' | 'UPDATE' | 'DELETE',
createdAt: row.created_at as string,
updatedAt: row.updated_at as string,
isDeleted: row.is_deleted === 1
}))
} catch (error) {
Logger.error('Get history failed:', error)
throw new Error(`Failed to get memory history: ${error instanceof Error ? error.message : 'Unknown error'}`)
}
}
/**
* Delete all memories for a user without deleting the user (hard delete)
*/
public async deleteAllMemoriesForUser(userId: string): Promise<void> {
await this.init()
if (!this.db) throw new Error('Database not initialized')
if (!userId) {
throw new Error('User ID is required')
}
try {
// Get count of memories to be deleted
const countResult = await this.db.execute({
sql: MemoryQueries.users.countMemoriesForUser,
args: [userId]
})
const totalCount = (countResult.rows[0] as any).total as number
// Delete history entries for this user's memories
await this.db.execute({
sql: MemoryQueries.users.deleteHistoryForUser,
args: [userId]
})
// Hard delete all memories for this user
await this.db.execute({
sql: MemoryQueries.users.deleteAllMemoriesForUser,
args: [userId]
})
Logger.info(`Reset all memories for user ${userId} (${totalCount} memories deleted)`)
} catch (error) {
Logger.error('Reset user memories failed:', error)
throw new Error(`Failed to reset user memories: ${error instanceof Error ? error.message : 'Unknown error'}`)
}
}
/**
* Delete a user and all their memories (hard delete)
*/
public async deleteUser(userId: string): Promise<void> {
await this.init()
if (!this.db) throw new Error('Database not initialized')
if (!userId) {
throw new Error('User ID is required')
}
if (userId === 'default-user') {
throw new Error('Cannot delete the default user')
}
try {
// Get count of memories to be deleted
const countResult = await this.db.execute({
sql: `SELECT COUNT(*) as total FROM memories WHERE user_id = ?`,
args: [userId]
})
const totalCount = (countResult.rows[0] as any).total as number
// Delete history entries for this user's memories
await this.db.execute({
sql: `DELETE FROM memory_history WHERE memory_id IN (SELECT id FROM memories WHERE user_id = ?)`,
args: [userId]
})
// Delete all memories for this user (hard delete)
await this.db.execute({
sql: `DELETE FROM memories WHERE user_id = ?`,
args: [userId]
})
Logger.info(`Deleted user ${userId} and ${totalCount} memories`)
} catch (error) {
Logger.error('Delete user failed:', error)
throw new Error(`Failed to delete user: ${error instanceof Error ? error.message : 'Unknown error'}`)
}
}
/**
* Get list of unique user IDs with their memory counts
*/
public async getUsersList(): Promise<{ userId: string; memoryCount: number; lastMemoryDate: string }[]> {
await this.init()
if (!this.db) throw new Error('Database not initialized')
try {
const result = await this.db.execute({
sql: MemoryQueries.users.getUniqueUsers,
args: []
})
return result.rows.map((row: any) => ({
userId: row.user_id as string,
memoryCount: row.memory_count as number,
lastMemoryDate: row.last_memory_date as string
}))
} catch (error) {
Logger.error('Get users list failed:', error)
throw new Error(`Failed to get users list: ${error instanceof Error ? error.message : 'Unknown error'}`)
}
}
/**
* Update configuration
*/
public setConfig(config: MemoryConfig): void {
this.config = config
// Reset embeddings instance when config changes
this.embeddings = null
}
/**
* Close database connection
*/
public async close(): Promise<void> {
if (this.db) {
await this.db.close()
this.db = null
this.isInitialized = false
}
}
// ========== EMBEDDING OPERATIONS (Previously EmbeddingService) ==========
/**
* Normalize embedding dimensions to unified size
*/
private normalizeEmbedding(embedding: number[]): number[] {
if (embedding.length === MemoryService.UNIFIED_DIMENSION) {
return embedding
}
if (embedding.length < MemoryService.UNIFIED_DIMENSION) {
// Pad with zeros
return [...embedding, ...new Array(MemoryService.UNIFIED_DIMENSION - embedding.length).fill(0)]
} else {
// Truncate
return embedding.slice(0, MemoryService.UNIFIED_DIMENSION)
}
}
/**
* Generate embedding for text
*/
private async generateEmbedding(text: string): Promise<number[]> {
if (!this.config?.embedderApiClient) {
throw new Error('Embedder model not configured')
}
try {
// Initialize embeddings instance if needed
if (!this.embeddings) {
if (!this.config.embedderApiClient) {
throw new Error('Embedder provider not configured')
}
this.embeddings = new Embeddings({
embedApiClient: this.config.embedderApiClient,
dimensions: this.config.embedderDimensions
})
await this.embeddings.init()
}
const embedding = await this.embeddings.embedQuery(text)
// Normalize to unified dimension
return this.normalizeEmbedding(embedding)
} catch (error) {
Logger.error('Embedding generation failed:', error)
throw new Error(`Failed to generate embedding: ${error instanceof Error ? error.message : 'Unknown error'}`)
}
}
// ========== VECTOR SEARCH OPERATIONS (Previously VectorSearch) ==========
/**
* Convert embedding array to libsql vector format
*/
private embeddingToVector(embedding: number[]): string {
return `[${embedding.join(',')}]`
}
/**
* Hybrid search combining text and vector similarity (currently vector-only)
*/
private async hybridSearch(
_: string,
queryEmbedding: number[],
options: VectorSearchOptions = {}
): Promise<SearchResult> {
if (!this.db) throw new Error('Database not initialized')
const { limit = 10, threshold = 0.5, userId } = options
try {
const queryVector = this.embeddingToVector(queryEmbedding)
const conditions: string[] = ['m.is_deleted = 0']
const params: any[] = []
// Vector search only - three vector parameters for distance, vector_similarity, and combined_score
params.push(queryVector, queryVector, queryVector)
if (userId) {
conditions.push('m.user_id = ?')
params.push(userId)
}
const whereClause = conditions.join(' AND ')
const hybridQuery = `${MemoryQueries.search.hybridSearch} ${whereClause}
) AS results
WHERE vector_similarity >= ?
ORDER BY vector_similarity DESC
LIMIT ?`
params.push(threshold, limit)
const result = await this.db.execute({
sql: hybridQuery,
args: params
})
const memories: MemoryItem[] = result.rows.map((row: any) => ({
id: row.id as string,
memory: row.memory as string,
hash: (row.hash as string) || undefined,
metadata: row.metadata ? JSON.parse(row.metadata as string) : undefined,
createdAt: row.created_at as string,
updatedAt: row.updated_at as string,
score: row.vector_similarity as number
}))
return {
memories,
count: memories.length
}
} catch (error) {
Logger.error('Hybrid search failed:', error)
throw new Error(`Hybrid search failed: ${error instanceof Error ? error.message : 'Unknown error'}`)
}
}
// ========== HELPER METHODS ==========
/**
* Add entry to memory history
*/
private async addHistory(
memoryId: string,
previousValue: string | null,
newValue: string | null,
action: 'ADD' | 'UPDATE' | 'DELETE'
): Promise<void> {
if (!this.db) throw new Error('Database not initialized')
const now = new Date().toISOString()
await this.db.execute({
sql: MemoryQueries.history.insert,
args: [memoryId, previousValue, newValue, action, now, now]
})
}
}
export default MemoryService

View File

@ -0,0 +1,164 @@
/**
* SQL queries for MemoryService
* All SQL queries are centralized here for better maintainability
*/
export const MemoryQueries = {
// Table creation queries
createTables: {
memories: `
CREATE TABLE IF NOT EXISTS memories (
id TEXT PRIMARY KEY,
memory TEXT NOT NULL,
hash TEXT UNIQUE,
embedding F32_BLOB(1536), -- Native vector column (1536 dimensions for OpenAI embeddings)
metadata TEXT, -- JSON string
user_id TEXT,
agent_id TEXT,
run_id TEXT,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
is_deleted INTEGER DEFAULT 0
)
`,
memoryHistory: `
CREATE TABLE IF NOT EXISTS memory_history (
id INTEGER PRIMARY KEY AUTOINCREMENT,
memory_id TEXT NOT NULL,
previous_value TEXT,
new_value TEXT,
action TEXT NOT NULL, -- ADD, UPDATE, DELETE
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
is_deleted INTEGER DEFAULT 0,
FOREIGN KEY (memory_id) REFERENCES memories (id)
)
`
},
// Index creation queries
createIndexes: {
userId: 'CREATE INDEX IF NOT EXISTS idx_memories_user_id ON memories(user_id)',
agentId: 'CREATE INDEX IF NOT EXISTS idx_memories_agent_id ON memories(agent_id)',
createdAt: 'CREATE INDEX IF NOT EXISTS idx_memories_created_at ON memories(created_at)',
hash: 'CREATE INDEX IF NOT EXISTS idx_memories_hash ON memories(hash)',
memoryHistory: 'CREATE INDEX IF NOT EXISTS idx_memory_history_memory_id ON memory_history(memory_id)',
vector: 'CREATE INDEX IF NOT EXISTS idx_memories_vector ON memories (libsql_vector_idx(embedding))'
},
// Memory operations
memory: {
checkExists: 'SELECT id FROM memories WHERE hash = ? AND is_deleted = 0',
checkExistsIncludeDeleted: 'SELECT id, is_deleted FROM memories WHERE hash = ?',
restoreDeleted: `
UPDATE memories
SET is_deleted = 0, memory = ?, embedding = ?, metadata = ?, updated_at = ?
WHERE id = ?
`,
insert: `
INSERT INTO memories (id, memory, hash, embedding, metadata, user_id, agent_id, run_id, created_at, updated_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
getForDelete: 'SELECT memory FROM memories WHERE id = ? AND is_deleted = 0',
softDelete: 'UPDATE memories SET is_deleted = 1, updated_at = ? WHERE id = ?',
getForUpdate: 'SELECT memory, metadata FROM memories WHERE id = ? AND is_deleted = 0',
update: `
UPDATE memories
SET memory = ?, hash = ?, embedding = ?, metadata = ?, updated_at = ?
WHERE id = ?
`,
count: 'SELECT COUNT(*) as total FROM memories m WHERE',
list: `
SELECT
m.id,
m.memory,
m.hash,
m.metadata,
m.user_id,
m.agent_id,
m.run_id,
m.created_at,
m.updated_at
FROM memories m
WHERE
`
},
// History operations
history: {
insert: `
INSERT INTO memory_history (memory_id, previous_value, new_value, action, created_at, updated_at)
VALUES (?, ?, ?, ?, ?, ?)
`,
getByMemoryId: `
SELECT * FROM memory_history
WHERE memory_id = ? AND is_deleted = 0
ORDER BY created_at DESC
`
},
// Search operations
search: {
hybridSearch: `
SELECT * FROM (
SELECT
m.id,
m.memory,
m.hash,
m.metadata,
m.user_id,
m.agent_id,
m.run_id,
m.created_at,
m.updated_at,
CASE
WHEN m.embedding IS NULL THEN 2.0
ELSE vector_distance_cos(m.embedding, vector32(?))
END as distance,
CASE
WHEN m.embedding IS NULL THEN 0.0
ELSE (1 - vector_distance_cos(m.embedding, vector32(?)))
END as vector_similarity,
0.0 as text_similarity,
(
CASE
WHEN m.embedding IS NULL THEN 0.0
ELSE (1 - vector_distance_cos(m.embedding, vector32(?)))
END
) as combined_score
FROM memories m
WHERE
`
},
// User operations
users: {
getUniqueUsers: `
SELECT DISTINCT
user_id,
COUNT(*) as memory_count,
MAX(created_at) as last_memory_date
FROM memories
WHERE user_id IS NOT NULL AND is_deleted = 0
GROUP BY user_id
ORDER BY last_memory_date DESC
`,
countMemoriesForUser: 'SELECT COUNT(*) as total FROM memories WHERE user_id = ?',
deleteAllMemoriesForUser: 'DELETE FROM memories WHERE user_id = ?',
deleteHistoryForUser: 'DELETE FROM memory_history WHERE memory_id IN (SELECT id FROM memories WHERE user_id = ?)'
}
} as const

View File

@ -0,0 +1,13 @@
import { FileListResponse, FileMetadata, FileUploadResponse, Provider } from '@types'
export abstract class BaseFileService {
protected readonly provider: Provider
protected constructor(provider: Provider) {
this.provider = provider
}
abstract uploadFile(file: FileMetadata): Promise<FileUploadResponse>
abstract deleteFile(fileId: string): Promise<void>
abstract listFiles(): Promise<FileListResponse>
abstract retrieveFile(fileId: string): Promise<FileUploadResponse>
}

View File

@ -0,0 +1,41 @@
import { Provider } from '@types'
import { BaseFileService } from './BaseFileService'
import { GeminiService } from './GeminiService'
import { MistralService } from './MistralService'
export class FileServiceManager {
private static instance: FileServiceManager
private services: Map<string, BaseFileService> = new Map()
// eslint-disable-next-line @typescript-eslint/no-empty-function
private constructor() {}
static getInstance(): FileServiceManager {
if (!this.instance) {
this.instance = new FileServiceManager()
}
return this.instance
}
getService(provider: Provider): BaseFileService {
const type = provider.type
let service = this.services.get(type)
if (!service) {
switch (type) {
case 'gemini':
service = new GeminiService(provider)
break
case 'mistral':
service = new MistralService(provider)
break
default:
throw new Error(`Unsupported service type: ${type}`)
}
this.services.set(type, service)
}
return service
}
}

View File

@ -0,0 +1,190 @@
import { File, Files, FileState, GoogleGenAI } from '@google/genai'
import { FileListResponse, FileMetadata, FileUploadResponse, Provider } from '@types'
import Logger from 'electron-log'
import { v4 as uuidv4 } from 'uuid'
import { CacheService } from '../CacheService'
import { BaseFileService } from './BaseFileService'
export class GeminiService extends BaseFileService {
private static readonly FILE_LIST_CACHE_KEY = 'gemini_file_list'
private static readonly FILE_CACHE_DURATION = 48 * 60 * 60 * 1000
private static readonly LIST_CACHE_DURATION = 3000
protected readonly fileManager: Files
constructor(provider: Provider) {
super(provider)
this.fileManager = new GoogleGenAI({
vertexai: false,
apiKey: provider.apiKey,
httpOptions: {
baseUrl: provider.apiHost
}
}).files
}
async uploadFile(file: FileMetadata): Promise<FileUploadResponse> {
try {
const uploadResult = await this.fileManager.upload({
file: file.path,
config: {
mimeType: 'application/pdf',
name: file.id,
displayName: file.origin_name
}
})
// 根据文件状态设置响应状态
let status: 'success' | 'processing' | 'failed' | 'unknown'
switch (uploadResult.state) {
case FileState.ACTIVE:
status = 'success'
break
case FileState.PROCESSING:
status = 'processing'
break
case FileState.FAILED:
status = 'failed'
break
default:
status = 'unknown'
}
const response: FileUploadResponse = {
fileId: uploadResult.name || '',
displayName: file.origin_name,
status,
originalFile: {
type: 'gemini',
file: uploadResult
}
}
// 只缓存成功的文件
if (status === 'success') {
const cacheKey = `${GeminiService.FILE_LIST_CACHE_KEY}_${response.fileId}`
CacheService.set<FileUploadResponse>(cacheKey, response, GeminiService.FILE_CACHE_DURATION)
}
return response
} catch (error) {
Logger.error('Error uploading file to Gemini:', error)
return {
fileId: '',
displayName: file.origin_name,
status: 'failed',
originalFile: undefined
}
}
}
async retrieveFile(fileId: string): Promise<FileUploadResponse> {
try {
const cachedResponse = CacheService.get<FileUploadResponse>(`${GeminiService.FILE_LIST_CACHE_KEY}_${fileId}`)
Logger.info('[GeminiService] cachedResponse', cachedResponse)
if (cachedResponse) {
return cachedResponse
}
const files: File[] = []
for await (const f of await this.fileManager.list()) {
files.push(f)
}
Logger.info('[GeminiService] files', files)
const file = files
.filter((file) => file.state === FileState.ACTIVE)
.find((file) => file.name?.substring(6) === fileId) // 去掉 files/ 前缀
Logger.info('[GeminiService] file', file)
if (file) {
return {
fileId: fileId,
displayName: file.displayName || '',
status: 'success',
originalFile: {
type: 'gemini',
file
}
}
}
return {
fileId: fileId,
displayName: '',
status: 'failed',
originalFile: undefined
}
} catch (error) {
Logger.error('Error retrieving file from Gemini:', error)
return {
fileId: fileId,
displayName: '',
status: 'failed',
originalFile: undefined
}
}
}
async listFiles(): Promise<FileListResponse> {
try {
const cachedList = CacheService.get<FileListResponse>(GeminiService.FILE_LIST_CACHE_KEY)
if (cachedList) {
return cachedList
}
const geminiFiles: File[] = []
for await (const f of await this.fileManager.list()) {
geminiFiles.push(f)
}
const fileList: FileListResponse = {
files: geminiFiles
.filter((file) => file.state === FileState.ACTIVE)
.map((file) => {
// 更新单个文件的缓存
const fileResponse: FileUploadResponse = {
fileId: file.name || uuidv4(),
displayName: file.displayName || '',
status: 'success',
originalFile: {
type: 'gemini',
file
}
}
CacheService.set(
`${GeminiService.FILE_LIST_CACHE_KEY}_${file.name}`,
fileResponse,
GeminiService.FILE_CACHE_DURATION
)
return {
id: file.name || uuidv4(),
displayName: file.displayName || '',
size: Number(file.sizeBytes),
status: 'success',
originalFile: {
type: 'gemini',
file
}
}
})
}
// 更新文件列表缓存
CacheService.set(GeminiService.FILE_LIST_CACHE_KEY, fileList, GeminiService.LIST_CACHE_DURATION)
return fileList
} catch (error) {
Logger.error('Error listing files from Gemini:', error)
return { files: [] }
}
}
async deleteFile(fileId: string): Promise<void> {
try {
await this.fileManager.delete({ name: fileId })
Logger.info(`File ${fileId} deleted from Gemini`)
} catch (error) {
Logger.error('Error deleting file from Gemini:', error)
throw error
}
}
}

View File

@ -0,0 +1,104 @@
import fs from 'node:fs/promises'
import { Mistral } from '@mistralai/mistralai'
import { FileListResponse, FileMetadata, FileUploadResponse, Provider } from '@types'
import Logger from 'electron-log'
import { MistralClientManager } from '../MistralClientManager'
import { BaseFileService } from './BaseFileService'
export class MistralService extends BaseFileService {
private readonly client: Mistral
constructor(provider: Provider) {
super(provider)
const clientManager = MistralClientManager.getInstance()
clientManager.initializeClient(provider)
this.client = clientManager.getClient()
}
async uploadFile(file: FileMetadata): Promise<FileUploadResponse> {
try {
const fileBuffer = await fs.readFile(file.path)
const response = await this.client.files.upload({
file: {
fileName: file.origin_name,
content: new Uint8Array(fileBuffer)
},
purpose: 'ocr'
})
return {
fileId: response.id,
displayName: file.origin_name,
status: 'success',
originalFile: {
type: 'mistral',
file: response
}
}
} catch (error) {
Logger.error('Error uploading file:', error)
return {
fileId: '',
displayName: file.origin_name,
status: 'failed'
}
}
}
async listFiles(): Promise<FileListResponse> {
try {
const response = await this.client.files.list({})
return {
files: response.data.map((file) => ({
id: file.id,
displayName: file.filename || '',
size: file.sizeBytes,
status: 'success', // All listed files are processed,
originalFile: {
type: 'mistral',
file
}
}))
}
} catch (error) {
Logger.error('Error listing files:', error)
return { files: [] }
}
}
async deleteFile(fileId: string): Promise<void> {
try {
await this.client.files.delete({
fileId
})
Logger.info(`File ${fileId} deleted`)
} catch (error) {
Logger.error('Error deleting file:', error)
throw error
}
}
async retrieveFile(fileId: string): Promise<FileUploadResponse> {
try {
const response = await this.client.files.retrieve({
fileId
})
return {
fileId: response.id,
displayName: response.filename || '',
status: 'success' // Retrieved files are always processed
}
} catch (error) {
Logger.error('Error retrieving file:', error)
return {
fileId: fileId,
displayName: '',
status: 'failed',
originalFile: undefined
}
}
}
}

View File

@ -1,37 +1,72 @@
import { IpcChannel } from '@shared/IpcChannel'
import { isMac } from '@main/constant'
import Logger from 'electron-log'
import { windowService } from '../WindowService'
export function handleProvidersProtocolUrl(url: URL) {
const params = new URLSearchParams(url.search)
function ParseData(data: string) {
try {
const result = JSON.parse(Buffer.from(data, 'base64').toString('utf-8'))
return JSON.stringify(result)
} catch (error) {
Logger.error('ParseData error:', { error })
return null
}
}
export async function handleProvidersProtocolUrl(url: URL) {
switch (url.pathname) {
case '/api-keys': {
// jsonConfig example:
// {
// "id": "tokenflux",
// "baseUrl": "https://tokenflux.ai/v1",
// "apiKey": "sk-xxxx"
// "apiKey": "sk-xxxx",
// "name": "TokenFlux", // optional
// "type": "openai" // optional
// }
// cherrystudio://providers/api-keys?data={base64Encode(JSON.stringify(jsonConfig))}
const data = params.get('data')
if (data) {
const stringify = Buffer.from(data, 'base64').toString('utf8')
Logger.info('get api keys from urlschema: ', stringify)
const jsonConfig = JSON.parse(stringify)
Logger.info('get api keys from urlschema: ', jsonConfig)
const mainWindow = windowService.getMainWindow()
if (mainWindow && !mainWindow.isDestroyed()) {
mainWindow.webContents.send(IpcChannel.Provider_AddKey, jsonConfig)
mainWindow.webContents.executeJavaScript(`window.navigate('/settings/provider?id=${jsonConfig.id}')`)
// cherrystudio://providers/api-keys?v=1&data={base64Encode(JSON.stringify(jsonConfig))}
// replace + and / to _ and - because + and / are processed by URLSearchParams
const processedSearch = url.search.replaceAll('+', '_').replaceAll('/', '-')
const params = new URLSearchParams(processedSearch)
const data = ParseData(params.get('data')?.replaceAll('_', '+').replaceAll('-', '/') || '')
if (!data) {
Logger.error('handleProvidersProtocolUrl data is null or invalid')
return
}
const mainWindow = windowService.getMainWindow()
const version = params.get('v')
if (version == '1') {
// TODO: handle different version
Logger.info('handleProvidersProtocolUrl', { data, version })
}
// add check there is window.navigate function in mainWindow
if (
mainWindow &&
!mainWindow.isDestroyed() &&
(await mainWindow.webContents.executeJavaScript(`typeof window.navigate === 'function'`))
) {
mainWindow.webContents.executeJavaScript(
`window.navigate('/settings/provider?addProviderData=${encodeURIComponent(data)}')`
)
if (isMac) {
windowService.showMainWindow()
}
} else {
Logger.error('No data found in URL')
setTimeout(() => {
Logger.info('handleProvidersProtocolUrl timeout', { data, version })
handleProvidersProtocolUrl(url)
}, 1000)
}
break
}
default:
console.error(`Unknown MCP protocol URL: ${url}`)
Logger.error(`Unknown MCP protocol URL: ${url}`)
break
}
}

View File

@ -44,7 +44,9 @@ export function handleMcpProtocolUrl(url: URL) {
// }
// }
// cherrystudio://mcp/install?servers={base64Encode(JSON.stringify(jsonConfig))}
const data = params.get('servers')
if (data) {
const stringify = Buffer.from(data, 'base64').toString('utf8')
Logger.info('install MCP servers from urlschema: ', stringify)
@ -63,10 +65,8 @@ export function handleMcpProtocolUrl(url: URL) {
}
}
const mainWindow = windowService.getMainWindow()
if (mainWindow && !mainWindow.isDestroyed()) {
mainWindow.webContents.executeJavaScript("window.navigate('/settings/mcp')")
}
windowService.getMainWindow()?.show()
break
}
default:

Some files were not shown because too many files have changed in this diff Show More