Merge branch 'main' into feat/print-to-pdf

This commit is contained in:
Pleasure1234 2025-09-28 20:28:28 +01:00 committed by GitHub
commit 2b14546462
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
285 changed files with 7080 additions and 6263 deletions

4
.github/CODEOWNERS vendored Normal file
View File

@ -0,0 +1,4 @@
/src/renderer/src/store/ @0xfullex
/src/main/services/ConfigManager.ts @0xfullex
/packages/shared/IpcChannel.ts @0xfullex
/src/main/ipc.ts @0xfullex

View File

@ -2,8 +2,8 @@ name: Auto I18N
env:
API_KEY: ${{ secrets.TRANSLATE_API_KEY }}
MODEL: ${{ vars.MODEL || 'deepseek/deepseek-v3.1'}}
BASE_URL: ${{ vars.BASE_URL || 'https://api.ppinfra.com/openai'}}
MODEL: ${{ vars.AUTO_I18N_MODEL || 'deepseek/deepseek-v3.1'}}
BASE_URL: ${{ vars.AUTO_I18N_BASE_URL || 'https://api.ppinfra.com/openai'}}
on:
pull_request:
@ -35,7 +35,7 @@ jobs:
# 在临时目录安装依赖
mkdir -p /tmp/translation-deps
cd /tmp/translation-deps
echo '{"dependencies": {"openai": "^5.12.2", "cli-progress": "^3.12.0", "tsx": "^4.20.3", "prettier": "^3.5.3", "prettier-plugin-sort-json": "^4.1.1", "prettier-plugin-tailwindcss": "^0.6.14"}}' > package.json
echo '{"dependencies": {"openai": "^5.12.2", "cli-progress": "^3.12.0", "tsx": "^4.20.3", "@biomejs/biome": "2.2.4"}}' > package.json
npm install --no-package-lock
# 设置 NODE_PATH 让项目能找到这些依赖
@ -45,7 +45,7 @@ jobs:
run: npx tsx scripts/auto-translate-i18n.ts
- name: 🔍 Format
run: cd /tmp/translation-deps && npx prettier --write --config /home/runner/work/cherry-studio/cherry-studio/.prettierrc /home/runner/work/cherry-studio/cherry-studio/src/renderer/src/i18n/
run: cd /tmp/translation-deps && npx biome format --config-path /home/runner/work/cherry-studio/cherry-studio/biome.jsonc --write /home/runner/work/cherry-studio/cherry-studio/src/renderer/src/i18n/
- name: 🔄 Commit changes
run: |

View File

@ -1,6 +1,6 @@
name: Claude Translator
concurrency:
group: translator-${{ github.event.comment.id || github.event.issue.number }}
group: translator-${{ github.event.comment.id || github.event.issue.number || github.event.review.id }}
cancel-in-progress: false
on:
@ -8,14 +8,18 @@ on:
types: [opened]
issue_comment:
types: [created, edited]
pull_request_review:
types: [submitted, edited]
pull_request_review_comment:
types: [created, edited]
jobs:
translate:
if: |
(github.event_name == 'issues') ||
(github.event_name == 'issue_comment' && github.event.sender.type != 'Bot') &&
((github.event_name == 'issue_comment' && github.event.action == 'created' && !contains(github.event.comment.body, 'This issue was translated by Claude')) ||
(github.event_name == 'issue_comment' && github.event.action == 'edited'))
(github.event_name == 'issue_comment' && github.event.sender.type != 'Bot') ||
(github.event_name == 'pull_request_review' && github.event.sender.type != 'Bot') ||
(github.event_name == 'pull_request_review_comment' && github.event.sender.type != 'Bot')
runs-on: ubuntu-latest
permissions:
contents: read
@ -37,23 +41,44 @@ jobs:
# Now `contents: read` is safe for files, but we could make a fine-grained token to control it.
# See: https://github.com/anthropics/claude-code-action/blob/main/docs/security.md
github_token: ${{ secrets.TOKEN_GITHUB_WRITE }}
allowed_non_write_users: '*'
allowed_non_write_users: "*"
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
claude_args: '--allowed-tools Bash(gh issue:*),Bash(gh api:repos/*/issues:*)'
claude_args: "--allowed-tools Bash(gh issue:*),Bash(gh api:repos/*/issues:*),Bash(gh api:repos/*/pulls/*/reviews/*),Bash(gh api:repos/*/pulls/comments/*)"
prompt: |
你是一个多语言翻译助手。请完成以下任务:
你是一个多语言翻译助手。你需要响应 GitHub Webhooks 中的以下四种事件:
- issues
- issue_comment
- pull_request_review
- pull_request_review_comment
请完成以下任务:
1. 获取当前事件的完整信息。
- 如果当前事件是 issues就获取该 issues 的信息。
- 如果当前事件是 issue_comment就获取该 comment 的信息。
- 如果当前事件是 pull_request_review就获取该 review 的信息。
- 如果当前事件是 pull_request_review_comment就获取该 comment 的信息。
1. 获取当前issue/comment的完整信息
2. 智能检测内容。
1. 如果是已经遵循格式要求翻译过的issue/comment检查翻译内容和原始内容是否匹配。若不匹配则重新翻译一次令其匹配并遵循格式要求若匹配则跳过任务。
2. 如果是未翻译过的issue/comment检查其内容语言。若不是英文则翻译成英文若已经是英文则跳过任务。
- 如果获取到的信息是已经遵循格式要求翻译过的内容,则检查翻译内容和原始内容是否匹配。若不匹配,则重新翻译一次令其匹配,并遵循格式要求;
- 如果获取到的信息是未翻译过的内容,检查其内容语言。若不是英文,则翻译成英文;
- 如果获取到的信息是部分翻译为英文的内容,则将其翻译为英文;
- 如果获取到的信息包含了对已翻译内容的引用,则将引用内容清理为仅含英文的内容。引用的内容不能够包含"This xxx was translated by Claude"和"Original Content`等内容。
- 如果获取到的信息包含了其他类型的引用,即对非 Claude 翻译的内容的引用,则直接照原样引用,不进行翻译。
- 如果获取到的信息是通过邮件回复的内容,则在翻译时应当将邮件内容的引用放到最后。在原始内容和翻译内容中只需要回复的内容本身,不要包含对邮件内容的引用。
- 如果获取到的信息本身不需要任何处理,则跳过任务。
3. 格式要求:
- 标题:英文翻译(如果非英文)
- 内容格式:
> [!NOTE]
> This issue/comment was translated by Claude.
> This issue/comment/review was translated by Claude.
[英文翻译内容]
[翻译内容]
---
<details>
@ -62,15 +87,21 @@ jobs:
</details>
4. 使用gh工具更新
- 根据环境信息中的Event类型选择正确的命令
- 如果Event是'issues'gh issue edit [ISSUE_NUMBER] --title "[英文标题]" --body "[翻译内容 + 原始内容]"
- 如果Event是'issue_comment'gh api -X PATCH /repos/[REPO]/issues/comments/[COMMENT_ID] -f body="[翻译内容 + 原始内容]"
- 如果 Event 是 'issues': gh issue edit [ISSUE_NUMBER] --title "[英文标题]" --body "[翻译内容 + 原始内容]"
- 如果 Event 是 'issue_comment': gh api -X PATCH /repos/${{ github.repository }}/issues/comments/${{ github.event.comment.id }} -f body="[翻译内容 + 原始内容]"
- 如果 Event 是 'pull_request_review': gh api -X PUT /repos/${{ github.repository }}/pulls/${{ github.event.pull_request.number }}/reviews/${{ github.event.review.id }} -f body="[翻译内容]"
- 如果 Event 是 'pull_request_review_comment': gh api -X PATCH /repos/${{ github.repository }}/pulls/comments/${{ github.event.comment.id }} -f body="[翻译内容 + 原始内容]"
环境信息:
- Event: ${{ github.event_name }}
- Issue Number: ${{ github.event.issue.number }}
- Repository: ${{ github.repository }}
- Comment ID: ${{ github.event.comment.id || 'N/A' }} (only available for comment events)
- (Review) Comment ID: ${{ github.event.comment.id || 'N/A' }}
- Pull Request Number: ${{ github.event.pull_request.number || 'N/A' }}
- Review ID: ${{ github.event.review.id || 'N/A' }}
使用以下命令获取完整信息:
gh issue view ${{ github.event.issue.number }} --json title,body,comments

22
.github/workflows/delete-branch.yml vendored Normal file
View File

@ -0,0 +1,22 @@
name: Delete merged branch
on:
pull_request:
types:
- closed
jobs:
delete-branch:
runs-on: ubuntu-latest
permissions:
contents: write
if: github.event.pull_request.merged == true && github.event.pull_request.head.repo.full_name == github.repository
steps:
- name: Delete merged branch
uses: actions/github-script@v7
with:
script: |
github.rest.git.deleteRef({
owner: context.repo.owner,
repo: context.repo.repo,
ref: `heads/${context.payload.pull_request.head.ref}`,
})

View File

@ -98,10 +98,10 @@ jobs:
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NODE_OPTIONS: --max-old-space-size=8192
MAIN_VITE_CHERRYIN_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYIN_CLIENT_SECRET }}
MAIN_VITE_MINERU_API_KEY: ${{ vars.MAIN_VITE_MINERU_API_KEY }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
RENDERER_VITE_PPIO_APP_SECRET: ${{ vars.RENDERER_VITE_PPIO_APP_SECRET }}
MAIN_VITE_CHERRYAI_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYAI_CLIENT_SECRET }}
MAIN_VITE_MINERU_API_KEY: ${{ secrets.MAIN_VITE_MINERU_API_KEY }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ secrets.RENDERER_VITE_AIHUBMIX_SECRET }}
RENDERER_VITE_PPIO_APP_SECRET: ${{ secrets.RENDERER_VITE_PPIO_APP_SECRET }}
- name: Build Mac
if: matrix.os == 'macos-latest'
@ -110,15 +110,15 @@ jobs:
env:
CSC_LINK: ${{ secrets.CSC_LINK }}
CSC_KEY_PASSWORD: ${{ secrets.CSC_KEY_PASSWORD }}
APPLE_ID: ${{ vars.APPLE_ID }}
APPLE_APP_SPECIFIC_PASSWORD: ${{ vars.APPLE_APP_SPECIFIC_PASSWORD }}
APPLE_TEAM_ID: ${{ vars.APPLE_TEAM_ID }}
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_APP_SPECIFIC_PASSWORD: ${{ secrets.APPLE_APP_SPECIFIC_PASSWORD }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NODE_OPTIONS: --max-old-space-size=8192
MAIN_VITE_CHERRYIN_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYIN_CLIENT_SECRET }}
MAIN_VITE_MINERU_API_KEY: ${{ vars.MAIN_VITE_MINERU_API_KEY }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
RENDERER_VITE_PPIO_APP_SECRET: ${{ vars.RENDERER_VITE_PPIO_APP_SECRET }}
MAIN_VITE_CHERRYAI_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYAI_CLIENT_SECRET }}
MAIN_VITE_MINERU_API_KEY: ${{ secrets.MAIN_VITE_MINERU_API_KEY }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ secrets.RENDERER_VITE_AIHUBMIX_SECRET }}
RENDERER_VITE_PPIO_APP_SECRET: ${{ secrets.RENDERER_VITE_PPIO_APP_SECRET }}
- name: Build Windows
if: matrix.os == 'windows-latest'
@ -127,10 +127,10 @@ jobs:
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NODE_OPTIONS: --max-old-space-size=8192
MAIN_VITE_CHERRYIN_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYIN_CLIENT_SECRET }}
MAIN_VITE_MINERU_API_KEY: ${{ vars.MAIN_VITE_MINERU_API_KEY }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
RENDERER_VITE_PPIO_APP_SECRET: ${{ vars.RENDERER_VITE_PPIO_APP_SECRET }}
MAIN_VITE_CHERRYAI_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYAI_CLIENT_SECRET }}
MAIN_VITE_MINERU_API_KEY: ${{ secrets.MAIN_VITE_MINERU_API_KEY }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ secrets.RENDERER_VITE_AIHUBMIX_SECRET }}
RENDERER_VITE_PPIO_APP_SECRET: ${{ secrets.RENDERER_VITE_PPIO_APP_SECRET }}
- name: Rename artifacts with nightly format
shell: bash

View File

@ -9,12 +9,15 @@ on:
branches:
- main
- develop
- v2
types: [ready_for_review, synchronize, opened]
jobs:
build:
runs-on: ubuntu-latest
env:
PRCI: true
if: github.event.pull_request.draft == false
steps:
- name: Check out Git repository
@ -45,12 +48,12 @@ jobs:
- name: Install Dependencies
run: yarn install
- name: Format Check
run: yarn format:check
- name: Lint Check
run: yarn test:lint
- name: Format Check
run: yarn format:check
- name: Type Check
run: yarn typecheck

View File

@ -85,10 +85,10 @@ jobs:
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NODE_OPTIONS: --max-old-space-size=8192
MAIN_VITE_CHERRYIN_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYIN_CLIENT_SECRET }}
MAIN_VITE_MINERU_API_KEY: ${{ vars.MAIN_VITE_MINERU_API_KEY }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
RENDERER_VITE_PPIO_APP_SECRET: ${{ vars.RENDERER_VITE_PPIO_APP_SECRET }}
MAIN_VITE_CHERRYAI_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYAI_CLIENT_SECRET }}
MAIN_VITE_MINERU_API_KEY: ${{ secrets.MAIN_VITE_MINERU_API_KEY }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ secrets.RENDERER_VITE_AIHUBMIX_SECRET }}
RENDERER_VITE_PPIO_APP_SECRET: ${{ secrets.RENDERER_VITE_PPIO_APP_SECRET }}
- name: Build Mac
if: matrix.os == 'macos-latest'
@ -98,15 +98,15 @@ jobs:
env:
CSC_LINK: ${{ secrets.CSC_LINK }}
CSC_KEY_PASSWORD: ${{ secrets.CSC_KEY_PASSWORD }}
APPLE_ID: ${{ vars.APPLE_ID }}
APPLE_APP_SPECIFIC_PASSWORD: ${{ vars.APPLE_APP_SPECIFIC_PASSWORD }}
APPLE_TEAM_ID: ${{ vars.APPLE_TEAM_ID }}
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_APP_SPECIFIC_PASSWORD: ${{ secrets.APPLE_APP_SPECIFIC_PASSWORD }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NODE_OPTIONS: --max-old-space-size=8192
MAIN_VITE_CHERRYIN_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYIN_CLIENT_SECRET }}
MAIN_VITE_MINERU_API_KEY: ${{ vars.MAIN_VITE_MINERU_API_KEY }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
RENDERER_VITE_PPIO_APP_SECRET: ${{ vars.RENDERER_VITE_PPIO_APP_SECRET }}
MAIN_VITE_CHERRYAI_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYAI_CLIENT_SECRET }}
MAIN_VITE_MINERU_API_KEY: ${{ secrets.MAIN_VITE_MINERU_API_KEY }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ secrets.RENDERER_VITE_AIHUBMIX_SECRET }}
RENDERER_VITE_PPIO_APP_SECRET: ${{ secrets.RENDERER_VITE_PPIO_APP_SECRET }}
- name: Build Windows
if: matrix.os == 'windows-latest'
@ -115,10 +115,10 @@ jobs:
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NODE_OPTIONS: --max-old-space-size=8192
MAIN_VITE_CHERRYIN_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYIN_CLIENT_SECRET }}
MAIN_VITE_MINERU_API_KEY: ${{ vars.MAIN_VITE_MINERU_API_KEY }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
RENDERER_VITE_PPIO_APP_SECRET: ${{ vars.RENDERER_VITE_PPIO_APP_SECRET }}
MAIN_VITE_CHERRYAI_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYAI_CLIENT_SECRET }}
MAIN_VITE_MINERU_API_KEY: ${{ secrets.MAIN_VITE_MINERU_API_KEY }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ secrets.RENDERER_VITE_AIHUBMIX_SECRET }}
RENDERER_VITE_PPIO_APP_SECRET: ${{ secrets.RENDERER_VITE_PPIO_APP_SECRET }}
- name: Release
uses: ncipollo/release-action@v1

3
.gitignore vendored
View File

@ -37,6 +37,7 @@ dist
out
mcp_server
stats.html
.eslintcache
# ENV
.env
@ -53,6 +54,8 @@ local
.qwen/*
.trae/*
.claude-code-router/*
.codebuddy/*
.zed/*
CLAUDE.local.md
# vitest

215
.oxlintrc.json Normal file
View File

@ -0,0 +1,215 @@
{
"$schema": "./node_modules/oxlint/configuration_schema.json",
"categories": {},
"env": {
"es2022": true
},
"globals": {},
"ignorePatterns": [
"node_modules/**",
"build/**",
"dist/**",
"out/**",
"local/**",
".yarn/**",
".gitignore",
"scripts/cloudflare-worker.js",
"src/main/integration/nutstore/sso/lib/**",
"src/main/integration/cherryai/index.js",
"src/main/integration/nutstore/sso/lib/**",
"src/renderer/src/ui/**",
"packages/**/dist",
"eslint.config.mjs"
],
"overrides": [
// set different env
{
"env": {
"node": true
},
"files": ["src/main/**", "resources/scripts/**", "scripts/**", "playwright.config.ts", "electron.vite.config.ts"]
},
{
"env": {
"browser": true
},
"files": [
"src/renderer/**/*.{ts,tsx}",
"packages/aiCore/**",
"packages/extension-table-plus/**",
"resources/js/**"
]
},
{
"env": {
"node": true,
"vitest": true
},
"files": ["**/__tests__/*.test.{ts,tsx}", "tests/**"]
},
{
"env": {
"browser": true,
"node": true
},
"files": ["src/preload/**"]
}
],
// We don't use the React plugin here because its behavior differs slightly from that of ESLint's React plugin.
"plugins": ["unicorn", "typescript", "oxc", "import"],
"rules": {
"constructor-super": "error",
"for-direction": "error",
"getter-return": "error",
"no-array-constructor": "off",
// "import/no-cycle": "error", // tons of error, bro
"no-async-promise-executor": "error",
"no-caller": "warn",
"no-case-declarations": "error",
"no-class-assign": "error",
"no-compare-neg-zero": "error",
"no-cond-assign": "error",
"no-const-assign": "error",
"no-constant-binary-expression": "error",
"no-constant-condition": "error",
"no-control-regex": "error",
"no-debugger": "error",
"no-delete-var": "error",
"no-dupe-args": "error",
"no-dupe-class-members": "error",
"no-dupe-else-if": "error",
"no-dupe-keys": "error",
"no-duplicate-case": "error",
"no-empty": "error",
"no-empty-character-class": "error",
"no-empty-pattern": "error",
"no-empty-static-block": "error",
"no-eval": "warn",
"no-ex-assign": "error",
"no-extra-boolean-cast": "error",
"no-fallthrough": "warn",
"no-func-assign": "error",
"no-global-assign": "error",
"no-import-assign": "error",
"no-invalid-regexp": "error",
"no-irregular-whitespace": "error",
"no-loss-of-precision": "error",
"no-misleading-character-class": "error",
"no-new-native-nonconstructor": "error",
"no-nonoctal-decimal-escape": "error",
"no-obj-calls": "error",
"no-octal": "error",
"no-prototype-builtins": "error",
"no-redeclare": "error",
"no-regex-spaces": "error",
"no-self-assign": "error",
"no-setter-return": "error",
"no-shadow-restricted-names": "error",
"no-sparse-arrays": "error",
"no-this-before-super": "error",
"no-unassigned-vars": "warn",
"no-undef": "error",
"no-unexpected-multiline": "error",
"no-unreachable": "error",
"no-unsafe-finally": "error",
"no-unsafe-negation": "error",
"no-unsafe-optional-chaining": "error",
"no-unused-expressions": "off", // this rule disallow us to use expression to call function, like `condition && fn()`
"no-unused-labels": "error",
"no-unused-private-class-members": "error",
"no-unused-vars": ["error", { "caughtErrors": "none" }],
"no-useless-backreference": "error",
"no-useless-catch": "error",
"no-useless-escape": "error",
"no-useless-rename": "warn",
"no-with": "error",
"oxc/bad-array-method-on-arguments": "warn",
"oxc/bad-char-at-comparison": "warn",
"oxc/bad-comparison-sequence": "warn",
"oxc/bad-min-max-func": "warn",
"oxc/bad-object-literal-comparison": "warn",
"oxc/bad-replace-all-arg": "warn",
"oxc/const-comparisons": "warn",
"oxc/double-comparisons": "warn",
"oxc/erasing-op": "warn",
"oxc/missing-throw": "warn",
"oxc/number-arg-out-of-range": "warn",
"oxc/only-used-in-recursion": "off", // manually off bacause of existing warning. may turn it on in the future
"oxc/uninvoked-array-callback": "warn",
"require-yield": "error",
"typescript/await-thenable": "warn",
// "typescript/ban-ts-comment": "error",
"typescript/no-array-constructor": "error",
// "typescript/consistent-type-imports": "error",
"typescript/no-array-delete": "warn",
"typescript/no-base-to-string": "warn",
"typescript/no-duplicate-enum-values": "error",
"typescript/no-duplicate-type-constituents": "warn",
"typescript/no-empty-object-type": "off",
"typescript/no-explicit-any": "off", // not safe but too many errors
"typescript/no-extra-non-null-assertion": "error",
"typescript/no-floating-promises": "warn",
"typescript/no-for-in-array": "warn",
"typescript/no-implied-eval": "warn",
"typescript/no-meaningless-void-operator": "warn",
"typescript/no-misused-new": "error",
"typescript/no-misused-spread": "warn",
"typescript/no-namespace": "error",
"typescript/no-non-null-asserted-optional-chain": "off", // it's off now. but may turn it on.
"typescript/no-redundant-type-constituents": "warn",
"typescript/no-require-imports": "off",
"typescript/no-this-alias": "error",
"typescript/no-unnecessary-parameter-property-assignment": "warn",
"typescript/no-unnecessary-type-constraint": "error",
"typescript/no-unsafe-declaration-merging": "error",
"typescript/no-unsafe-function-type": "error",
"typescript/no-unsafe-unary-minus": "warn",
"typescript/no-useless-empty-export": "warn",
"typescript/no-wrapper-object-types": "error",
"typescript/prefer-as-const": "error",
"typescript/prefer-namespace-keyword": "error",
"typescript/require-array-sort-compare": "warn",
"typescript/restrict-template-expressions": "warn",
"typescript/triple-slash-reference": "error",
"typescript/unbound-method": "warn",
"unicorn/no-await-in-promise-methods": "warn",
"unicorn/no-empty-file": "off", // manually off bacause of existing warning. may turn it on in the future
"unicorn/no-invalid-fetch-options": "warn",
"unicorn/no-invalid-remove-event-listener": "warn",
"unicorn/no-new-array": "off", // manually off bacause of existing warning. may turn it on in the future
"unicorn/no-single-promise-in-promise-methods": "warn",
"unicorn/no-thenable": "off", // manually off bacause of existing warning. may turn it on in the future
"unicorn/no-unnecessary-await": "warn",
"unicorn/no-useless-fallback-in-spread": "warn",
"unicorn/no-useless-length-check": "warn",
"unicorn/no-useless-spread": "off", // manually off bacause of existing warning. may turn it on in the future
"unicorn/prefer-set-size": "warn",
"unicorn/prefer-string-starts-ends-with": "warn",
"use-isnan": "error",
"valid-typeof": "error"
},
"settings": {
"jsdoc": {
"augmentsExtendsReplacesDocs": false,
"exemptDestructuredRootsFromChecks": false,
"ignoreInternal": false,
"ignorePrivate": false,
"ignoreReplacesDocs": true,
"implementsReplacesDocs": false,
"overrideReplacesDocs": true,
"tagNamePreference": {}
},
"jsx-a11y": {
"attributes": {},
"components": {},
"polymorphicPropName": null
},
"next": {
"rootDir": []
},
"react": {
"formComponents": [],
"linkComponents": []
}
}
}

View File

@ -1,12 +0,0 @@
out
dist
pnpm-lock.yaml
LICENSE.md
tsconfig.json
tsconfig.*.json
CHANGELOG*.md
agents.json
src/renderer/src/integration/nutstore/sso/lib
AGENT.md
src/main/integration/
.yarn/releases/

View File

@ -1,13 +0,0 @@
{
"bracketSameLine": true,
"endOfLine": "lf",
"jsonRecursiveSort": true,
"jsonSortOrder": "{\"*\": \"lexical\"}",
"plugins": ["prettier-plugin-sort-json", "prettier-plugin-tailwindcss"],
"printWidth": 120,
"semi": false,
"singleQuote": true,
"tailwindFunctions": ["clsx"],
"tailwindStylesheet": "./src/renderer/src/assets/styles/tailwind.css",
"trailingComma": "none"
}

View File

@ -1,8 +1,11 @@
{
"recommendations": [
"dbaeumer.vscode-eslint",
"esbenp.prettier-vscode",
"editorconfig.editorconfig",
"lokalise.i18n-ally"
"lokalise.i18n-ally",
"bradlc.vscode-tailwindcss",
"vitest.explorer",
"oxc.oxc-vscode",
"biomejs.biome"
]
}

16
.vscode/settings.json vendored
View File

@ -1,30 +1,32 @@
{
"[css]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
"editor.defaultFormatter": "biomejs.biome"
},
"[javascript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
"editor.defaultFormatter": "biomejs.biome"
},
"[json]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
"editor.defaultFormatter": "biomejs.biome"
},
"[jsonc]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
"editor.defaultFormatter": "biomejs.biome"
},
"[markdown]": {
"files.trimTrailingWhitespace": false
},
"[scss]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
"editor.defaultFormatter": "biomejs.biome"
},
"[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
"editor.defaultFormatter": "biomejs.biome"
},
"[typescriptreact]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
"editor.defaultFormatter": "biomejs.biome"
},
"editor.codeActionsOnSave": {
"source.fixAll.biome": "explicit",
"source.fixAll.eslint": "explicit",
"source.fixAll.oxc": "explicit",
"source.organizeImports": "never"
},
"editor.formatOnSave": true,

View File

@ -0,0 +1,13 @@
diff --git a/dist/index.mjs b/dist/index.mjs
index 110f37ec18c98b1d55ae2b73cc716194e6f9094d..17e109b7778cbebb904f1919e768d21a2833d965 100644
--- a/dist/index.mjs
+++ b/dist/index.mjs
@@ -448,7 +448,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
// src/get-model-path.ts
function getModelPath(modelId) {
- return modelId.includes("/") ? modelId : `models/${modelId}`;
+ return modelId?.includes("models/") ? modelId : `models/${modelId}`;
}
// src/google-generative-ai-options.ts

View File

@ -5,3 +5,5 @@ httpTimeout: 300000
nodeLinker: node-modules
yarnPath: .yarn/releases/yarn-4.9.1.cjs
npmRegistryServer: https://registry.npmjs.org
npmPublishRegistry: https://registry.npmjs.org

View File

@ -9,6 +9,7 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co
- **Prerequisites**: Node.js v22.x.x or higher, Yarn 4.9.1
- **Setup Yarn**: `corepack enable && corepack prepare yarn@4.9.1 --activate`
- **Install Dependencies**: `yarn install`
- **Add New Dependencies**: `yarn add -D` for renderer-specific dependencies, `yarn add` for others.
### Development
@ -21,7 +22,7 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co
- **Run E2E Tests**: `yarn test:e2e` - Playwright end-to-end tests
- **Type Check**: `yarn typecheck` - Checks TypeScript for both node and web
- **Lint**: `yarn lint` - ESLint with auto-fix
- **Format**: `yarn format` - Prettier formatting
- **Format**: `yarn format` - Biome formatting
### Build & Release

97
biome.jsonc Normal file
View File

@ -0,0 +1,97 @@
{
"$schema": "https://biomejs.dev/schemas/2.2.4/schema.json",
"assist": {
// to sort json
"actions": {
"source": {
"organizeImports": "on",
"useSortedKeys": {
"level": "on",
"options": {
"sortOrder": "lexicographic"
}
}
}
},
"enabled": true,
"includes": ["**/*.json", "!*.json", "!**/package.json"]
},
"css": {
"formatter": {
"quoteStyle": "single"
}
},
"files": { "ignoreUnknown": false },
"formatter": {
"attributePosition": "auto",
"bracketSameLine": false,
"bracketSpacing": true,
"enabled": true,
"expand": "auto",
"formatWithErrors": true,
"includes": [
"**",
"!out/**",
"!**/dist/**",
"!build/**",
"!.yarn/**",
"!.github/**",
"!.husky/**",
"!.vscode/**",
"!*.yaml",
"!*.yml",
"!*.mjs",
"!*.cjs",
"!*.md",
"!*.json",
"!src/main/integration/**",
"!**/tailwind.css",
"!**/package.json"
],
"indentStyle": "space",
"indentWidth": 2,
"lineEnding": "lf",
"lineWidth": 120,
"useEditorconfig": true
},
"html": { "formatter": { "selfCloseVoidElements": "always" } },
"javascript": {
"formatter": {
"arrowParentheses": "always",
"attributePosition": "auto",
// To minimize changes in this PR as much as possible, it's set to true. However, setting it to false would make it more convenient to add attributes at the end.
"bracketSameLine": true,
"bracketSpacing": true,
"jsxQuoteStyle": "double",
"quoteProperties": "asNeeded",
"quoteStyle": "single",
"semicolons": "asNeeded",
"trailingCommas": "none"
}
},
"json": {
"parser": {
"allowComments": true
}
},
"linter": {
"enabled": true,
"includes": ["!**/tailwind.css", "src/renderer/**/*.{tsx,ts}"],
// only enable sorted tailwind css rule. used as formatter instead of linter
"rules": {
"nursery": {
// to sort tailwind css classes
"useSortedClasses": {
"fix": "safe",
"level": "warn",
"options": {
"functions": ["cn"]
}
}
},
"recommended": false,
"suspicious": "off"
}
},
"vcs": { "clientKind": "git", "enabled": false, "useIgnoreFile": false }
}

View File

@ -2,7 +2,9 @@
## IDE Setup
[Cursor](https://www.cursor.com/) + [ESLint](https://marketplace.visualstudio.com/items?itemName=dbaeumer.vscode-eslint) + [Prettier](https://marketplace.visualstudio.com/items?itemName=esbenp.prettier-vscode)
- Editor: [Cursor](https://www.cursor.com/), etc. Any VS Code compatible editor.
- Linter: [ESLint](https://marketplace.visualstudio.com/items?itemName=dbaeumer.vscode-eslint)
- Formatter: [Biome](https://marketplace.visualstudio.com/items?itemName=biomejs.biome)
## Project Setup

View File

@ -24,52 +24,52 @@ protocols:
schemes:
- cherrystudio
files:
- '**/*'
- '!**/{.vscode,.yarn,.yarn-lock,.github,.cursorrules,.prettierrc}'
- '!electron.vite.config.{js,ts,mjs,cjs}}'
- '!**/{.eslintignore,.eslintrc.js,.eslintrc.json,.eslintcache,root.eslint.config.js,eslint.config.js,.eslintrc.cjs,.prettierignore,.prettierrc.yaml,eslint.config.mjs,dev-app-update.yml,CHANGELOG.md,README.md}'
- '!**/{.env,.env.*,.npmrc,pnpm-lock.yaml}'
- '!**/{tsconfig.json,tsconfig.tsbuildinfo,tsconfig.node.json,tsconfig.web.json}'
- '!**/{.editorconfig,.jekyll-metadata}'
- '!src'
- '!scripts'
- '!local'
- '!docs'
- '!packages'
- '!.swc'
- '!.bin'
- '!._*'
- '!*.log'
- '!stats.html'
- '!*.md'
- '!**/*.{iml,o,hprof,orig,pyc,pyo,rbc,swp,csproj,sln,xproj}'
- '!**/*.{map,ts,tsx,jsx,less,scss,sass,css.d.ts,d.cts,d.mts,md,markdown,yaml,yml}'
- '!**/{test,tests,__tests__,powered-test,coverage}/**'
- '!**/{example,examples}/**'
- '!**/*.{spec,test}.{js,jsx,ts,tsx}'
- '!**/*.min.*.map'
- '!**/*.d.ts'
- '!**/dist/es6/**'
- '!**/dist/demo/**'
- '!**/amd/**'
- '!**/{.DS_Store,Thumbs.db,thumbs.db,__pycache__}'
- '!**/{LICENSE,license,LICENSE.*,*.LICENSE.txt,NOTICE.txt,README.md,readme.md,CHANGELOG.md}'
- '!node_modules/rollup-plugin-visualizer'
- '!node_modules/js-tiktoken'
- '!node_modules/@tavily/core/node_modules/js-tiktoken'
- '!node_modules/pdf-parse/lib/pdf.js/{v1.9.426,v1.10.88,v2.0.550}'
- '!node_modules/mammoth/{mammoth.browser.js,mammoth.browser.min.js}'
- '!node_modules/selection-hook/prebuilds/**/*' # we rebuild .node, don't use prebuilds
- '!node_modules/selection-hook/node_modules' # we don't need what in the node_modules dir
- '!node_modules/selection-hook/src' # we don't need source files
- '!node_modules/tesseract.js-core/{tesseract-core.js,tesseract-core.wasm,tesseract-core.wasm.js}' # we don't need source files
- '!node_modules/tesseract.js-core/{tesseract-core-lstm.js,tesseract-core-lstm.wasm,tesseract-core-lstm.wasm.js}' # we don't need source files
- '!node_modules/tesseract.js-core/{tesseract-core-simd-lstm.js,tesseract-core-simd-lstm.wasm,tesseract-core-simd-lstm.wasm.js}' # we don't need source files
- '!**/*.{h,iobj,ipdb,tlog,recipe,vcxproj,vcxproj.filters,Makefile,*.Makefile}' # filter .node build files
- "**/*"
- "!**/{.vscode,.yarn,.yarn-lock,.github,.cursorrules,.prettierrc}"
- "!electron.vite.config.{js,ts,mjs,cjs}}"
- "!**/{.eslintignore,.eslintrc.js,.eslintrc.json,.eslintcache,root.eslint.config.js,eslint.config.js,.eslintrc.cjs,.prettierignore,.prettierrc.yaml,eslint.config.mjs,dev-app-update.yml,CHANGELOG.md,README.md,biome.jsonc}"
- "!**/{.env,.env.*,.npmrc,pnpm-lock.yaml}"
- "!**/{tsconfig.json,tsconfig.tsbuildinfo,tsconfig.node.json,tsconfig.web.json}"
- "!**/{.editorconfig,.jekyll-metadata}"
- "!src"
- "!scripts"
- "!local"
- "!docs"
- "!packages"
- "!.swc"
- "!.bin"
- "!._*"
- "!*.log"
- "!stats.html"
- "!*.md"
- "!**/*.{iml,o,hprof,orig,pyc,pyo,rbc,swp,csproj,sln,xproj}"
- "!**/*.{map,ts,tsx,jsx,less,scss,sass,css.d.ts,d.cts,d.mts,md,markdown,yaml,yml}"
- "!**/{test,tests,__tests__,powered-test,coverage}/**"
- "!**/{example,examples}/**"
- "!**/*.{spec,test}.{js,jsx,ts,tsx}"
- "!**/*.min.*.map"
- "!**/*.d.ts"
- "!**/dist/es6/**"
- "!**/dist/demo/**"
- "!**/amd/**"
- "!**/{.DS_Store,Thumbs.db,thumbs.db,__pycache__}"
- "!**/{LICENSE,license,LICENSE.*,*.LICENSE.txt,NOTICE.txt,README.md,readme.md,CHANGELOG.md}"
- "!node_modules/rollup-plugin-visualizer"
- "!node_modules/js-tiktoken"
- "!node_modules/@tavily/core/node_modules/js-tiktoken"
- "!node_modules/pdf-parse/lib/pdf.js/{v1.9.426,v1.10.88,v2.0.550}"
- "!node_modules/mammoth/{mammoth.browser.js,mammoth.browser.min.js}"
- "!node_modules/selection-hook/prebuilds/**/*" # we rebuild .node, don't use prebuilds
- "!node_modules/selection-hook/node_modules" # we don't need what in the node_modules dir
- "!node_modules/selection-hook/src" # we don't need source files
- "!node_modules/tesseract.js-core/{tesseract-core.js,tesseract-core.wasm,tesseract-core.wasm.js}" # we don't need source files
- "!node_modules/tesseract.js-core/{tesseract-core-lstm.js,tesseract-core-lstm.wasm,tesseract-core-lstm.wasm.js}" # we don't need source files
- "!node_modules/tesseract.js-core/{tesseract-core-simd-lstm.js,tesseract-core-simd-lstm.wasm,tesseract-core-simd-lstm.wasm.js}" # we don't need source files
- "!**/*.{h,iobj,ipdb,tlog,recipe,vcxproj,vcxproj.filters,Makefile,*.Makefile}" # filter .node build files
asarUnpack:
- resources/**
- '**/*.{metal,exp,lib}'
- 'node_modules/@img/sharp-libvips-*/**'
- "**/*.{metal,exp,lib}"
- "node_modules/@img/sharp-libvips-*/**"
win:
executableName: Cherry Studio
artifactName: ${productName}-${version}-${arch}-setup.${ext}
@ -95,7 +95,7 @@ mac:
entitlementsInherit: build/entitlements.mac.plist
notarize: false
artifactName: ${productName}-${version}-${arch}.${ext}
minimumSystemVersion: '20.1.0' # 最低支持 macOS 11.0
minimumSystemVersion: "20.1.0" # 最低支持 macOS 11.0
extendInfo:
- NSCameraUsageDescription: Application requests access to the device's camera.
- NSMicrophoneUsageDescription: Application requests access to the device's microphone.
@ -120,7 +120,7 @@ linux:
rpm:
# Workaround for electron build issue on rpm package:
# https://github.com/electron/forge/issues/3594
fpm: ['--rpm-rpmbuild-define=_build_id_links none']
fpm: ["--rpm-rpmbuild-define=_build_id_links none"]
publish:
provider: generic
url: https://releases.cherry-ai.com
@ -132,25 +132,7 @@ afterSign: scripts/notarize.js
artifactBuildCompleted: scripts/artifact-build-completed.js
releaseInfo:
releaseNotes: |
✨ 新功能:
- 重构知识库模块,提升文档处理能力和搜索性能
- 新增 PaddleOCR 支持,增强文档识别能力
- 支持自定义窗口控制按钮样式
- 新增 AI SDK 包,扩展 AI 能力集成
- 支持标签页拖拽重排序功能
- 增强笔记编辑器的同步和日志功能
🔧 性能优化:
- 优化 MCP 服务的日志记录和错误处理
- 改进 WebView 服务的 User-Agent 处理
- 优化迷你应用的标题栏样式和状态栏适配
- 重构依赖管理,清理和优化 package.json
🐛 问题修复:
- 修复输入栏无限状态更新循环问题
- 修复窗口控制提示框的鼠标悬停延迟
- 修复翻译输入框粘贴多内容源的处理
- 修复导航服务初始化时序问题
- 修复 MCP 通过 JSON 添加时的参数转换
- 修复模型作用域服务器同步时的 URL 格式
- 标准化工具提示图标样式
Optimized note-taking feature, now able to quickly rename by modifying the title
Fixed issue where CherryAI free model could not be used
Fixed issue where VertexAI proxy address could not be called normally
Fixed issue where built-in tools from service providers could not be called normally

View File

@ -4,7 +4,9 @@ import { defineConfig, externalizeDepsPlugin } from 'electron-vite'
import { resolve } from 'path'
import { visualizer } from 'rollup-plugin-visualizer'
import pkg from './package.json' assert { type: 'json' }
// assert not supported by biome
// import pkg from './package.json' assert { type: 'json' }
import pkg from './package.json'
const visualizerPlugin = (type: 'renderer' | 'main') => {
return process.env[`VISUALIZER_${type.toUpperCase()}`] ? [visualizer({ open: true })] : []
@ -32,6 +34,10 @@ export default defineConfig({
output: {
manualChunks: undefined, // 彻底禁用代码分割 - 返回 null 强制单文件打包
inlineDynamicImports: true // 内联所有动态导入,这是关键配置
},
onwarn(warning, warn) {
if (warning.code === 'COMMONJS_VARIABLE_IN_ESM') return
warn(warning)
}
},
sourcemap: isDev
@ -109,6 +115,10 @@ export default defineConfig({
selectionToolbar: resolve(__dirname, 'src/renderer/selectionToolbar.html'),
selectionAction: resolve(__dirname, 'src/renderer/selectionAction.html'),
traceWindow: resolve(__dirname, 'src/renderer/traceWindow.html')
},
onwarn(warning, warn) {
if (warning.code === 'COMMONJS_VARIABLE_IN_ESM') return
warn(warning)
}
}
},

View File

@ -1,8 +1,8 @@
import electronConfigPrettier from '@electron-toolkit/eslint-config-prettier'
import tseslint from '@electron-toolkit/eslint-config-ts'
import eslint from '@eslint/js'
import eslintReact from '@eslint-react/eslint-plugin'
import { defineConfig } from 'eslint/config'
import oxlint from 'eslint-plugin-oxlint'
import reactHooks from 'eslint-plugin-react-hooks'
import simpleImportSort from 'eslint-plugin-simple-import-sort'
import unusedImports from 'eslint-plugin-unused-imports'
@ -10,7 +10,6 @@ import unusedImports from 'eslint-plugin-unused-imports'
export default defineConfig([
eslint.configs.recommended,
tseslint.configs.recommended,
electronConfigPrettier,
eslintReact.configs['recommended-typescript'],
reactHooks.configs['recommended-latest'],
{
@ -26,7 +25,6 @@ export default defineConfig([
'simple-import-sort/exports': 'error',
'unused-imports/no-unused-imports': 'error',
'@eslint-react/no-prop-types': 'error',
'prettier/prettier': ['error']
}
},
// Configuration for ensuring compatibility with the original ESLint(8.x) rules
@ -50,10 +48,31 @@ export default defineConfig([
'@eslint-react/no-children-to-array': 'off'
}
},
{
ignores: [
'node_modules/**',
'build/**',
'dist/**',
'out/**',
'local/**',
'.yarn/**',
'.gitignore',
'scripts/cloudflare-worker.js',
'src/main/integration/nutstore/sso/lib/**',
'src/main/integration/cherryai/index.js',
'src/main/integration/nutstore/sso/lib/**',
'src/renderer/src/ui/**',
'packages/**/dist'
]
},
// turn off oxlint supported rules.
...oxlint.configs['flat/eslint'],
...oxlint.configs['flat/typescript'],
...oxlint.configs['flat/unicorn'],
{
// LoggerService Custom Rules - only apply to src directory
files: ['src/**/*.{ts,tsx,js,jsx}'],
ignores: ['src/**/__tests__/**', 'src/**/__mocks__/**', 'src/**/*.test.*'],
ignores: ['src/**/__tests__/**', 'src/**/__mocks__/**', 'src/**/*.test.*', 'src/preload/**'],
rules: {
'no-restricted-syntax': [
process.env.PRCI ? 'error' : 'warn',
@ -112,21 +131,4 @@ export default defineConfig([
'i18n/no-template-in-t': 'warn'
}
},
{
ignores: [
'node_modules/**',
'build/**',
'dist/**',
'out/**',
'local/**',
'.yarn/**',
'.gitignore',
'scripts/cloudflare-worker.js',
'src/main/integration/nutstore/sso/lib/**',
'src/main/integration/cherryin/index.js',
'src/main/integration/nutstore/sso/lib/**',
'src/renderer/src/ui/**',
'packages/**/dist'
]
}
])

View File

@ -1,6 +1,6 @@
{
"name": "CherryStudio",
"version": "1.6.0-beta.7",
"version": "1.6.2",
"private": true,
"description": "A powerful AI assistant for producer.",
"main": "./out/main/index.js",
@ -48,8 +48,8 @@
"analyze:renderer": "VISUALIZER_RENDERER=true yarn build",
"analyze:main": "VISUALIZER_MAIN=true yarn build",
"typecheck": "concurrently -n \"node,web\" -c \"cyan,magenta\" \"npm run typecheck:node\" \"npm run typecheck:web\"",
"typecheck:node": "tsc --noEmit -p tsconfig.node.json --composite false",
"typecheck:web": "tsc --noEmit -p tsconfig.web.json --composite false",
"typecheck:node": "tsgo --noEmit -p tsconfig.node.json --composite false",
"typecheck:web": "tsgo --noEmit -p tsconfig.web.json --composite false",
"check:i18n": "tsx scripts/check-i18n.ts",
"sync:i18n": "tsx scripts/sync-i18n.ts",
"update:i18n": "dotenv -e .env -- tsx scripts/update-i18n.ts",
@ -63,13 +63,16 @@
"test:ui": "vitest --ui",
"test:watch": "vitest",
"test:e2e": "yarn playwright test",
"test:lint": "eslint . --ext .js,.jsx,.cjs,.mjs,.ts,.tsx,.cts,.mts",
"test:lint": "oxlint --deny-warnings && eslint . --ext .js,.jsx,.cjs,.mjs,.ts,.tsx,.cts,.mts --cache",
"test:scripts": "vitest scripts",
"format": "prettier --write .",
"format:check": "prettier --check .",
"lint": "eslint . --ext .js,.jsx,.cjs,.mjs,.ts,.tsx,.cts,.mts --fix && yarn typecheck && yarn check:i18n",
"lint": "oxlint --fix && eslint . --ext .js,.jsx,.cjs,.mjs,.ts,.tsx,.cts,.mts --fix --cache && yarn typecheck && yarn check:i18n",
"format": "biome format --write && biome lint --write",
"format:check": "biome format && biome lint",
"prepare": "git config blame.ignoreRevsFile .git-blame-ignore-revs && husky",
"claude": "dotenv -e .env -- claude"
"claude": "dotenv -e .env -- claude",
"release:aicore:alpha": "yarn workspace @cherrystudio/ai-core version prerelease --immediate && yarn workspace @cherrystudio/ai-core npm publish --tag alpha --access public",
"release:aicore:beta": "yarn workspace @cherrystudio/ai-core version prerelease --immediate && yarn workspace @cherrystudio/ai-core npm publish --tag beta --access public",
"release:aicore": "yarn workspace @cherrystudio/ai-core version patch --immediate && yarn workspace @cherrystudio/ai-core npm publish --access public"
},
"dependencies": {
"@libsql/client": "0.14.0",
@ -77,7 +80,7 @@
"@napi-rs/system-ocr": "patch:@napi-rs/system-ocr@npm%3A1.0.2#~/.yarn/patches/@napi-rs-system-ocr-npm-1.0.2-59e7a78e8b.patch",
"@strongtz/win32-arm64-msvc": "^0.4.7",
"express": "^5.1.0",
"faiss-node": "^0.5.1",
"font-list": "^2.0.0",
"graceful-fs": "^4.2.11",
"jsdom": "26.1.0",
"node-stream-zip": "^1.15.0",
@ -94,16 +97,18 @@
"@agentic/exa": "^7.3.3",
"@agentic/searxng": "^7.3.3",
"@agentic/tavily": "^7.3.3",
"@ai-sdk/amazon-bedrock": "^3.0.0",
"@ai-sdk/google-vertex": "^3.0.25",
"@ai-sdk/mistral": "^2.0.0",
"@ai-sdk/amazon-bedrock": "^3.0.21",
"@ai-sdk/google-vertex": "^3.0.27",
"@ai-sdk/mistral": "^2.0.14",
"@ai-sdk/perplexity": "^2.0.9",
"@ant-design/v5-patch-for-react-19": "^1.0.3",
"@anthropic-ai/sdk": "^0.41.0",
"@anthropic-ai/vertex-sdk": "patch:@anthropic-ai/vertex-sdk@npm%3A0.11.4#~/.yarn/patches/@anthropic-ai-vertex-sdk-npm-0.11.4-c19cb41edb.patch",
"@aws-sdk/client-bedrock": "^3.840.0",
"@aws-sdk/client-bedrock-runtime": "^3.840.0",
"@aws-sdk/client-s3": "^3.840.0",
"@cherrystudio/ai-core": "workspace:*",
"@biomejs/biome": "2.2.4",
"@cherrystudio/ai-core": "workspace:^1.0.0-alpha.18",
"@cherrystudio/embedjs": "^0.1.31",
"@cherrystudio/embedjs-libsql": "^0.1.31",
"@cherrystudio/embedjs-loader-csv": "^0.1.31",
@ -121,7 +126,6 @@
"@dnd-kit/modifiers": "^9.0.0",
"@dnd-kit/sortable": "^10.0.0",
"@dnd-kit/utilities": "^3.2.2",
"@electron-toolkit/eslint-config-prettier": "^3.0.0",
"@electron-toolkit/eslint-config-ts": "^3.0.0",
"@electron-toolkit/preload": "^3.0.0",
"@electron-toolkit/tsconfig": "^1.0.1",
@ -135,9 +139,6 @@
"@heroui/react": "^2.8.3",
"@kangfenmao/keyv-storage": "^0.1.0",
"@langchain/community": "^0.3.50",
"@langchain/core": "^0.3.68",
"@langchain/ollama": "^0.2.1",
"@langchain/openai": "^0.6.7",
"@mistralai/mistralai": "^1.7.5",
"@modelcontextprotocol/sdk": "^1.17.5",
"@mozilla/readability": "^0.6.0",
@ -202,6 +203,7 @@
"@types/tinycolor2": "^1",
"@types/turndown": "^5.0.5",
"@types/word-extractor": "^1",
"@typescript/native-preview": "latest",
"@uiw/codemirror-extensions-langs": "^4.25.1",
"@uiw/codemirror-themes-all": "^4.25.1",
"@uiw/react-codemirror": "^4.25.1",
@ -213,7 +215,7 @@
"@viz-js/lang-dot": "^1.0.5",
"@viz-js/viz": "^3.14.0",
"@xyflow/react": "^12.4.4",
"ai": "^5.0.38",
"ai": "^5.0.44",
"antd": "patch:antd@npm%3A5.27.0#~/.yarn/patches/antd-npm-5.27.0-aa91c36546.patch",
"archiver": "^7.0.1",
"async-mutex": "^0.5.0",
@ -247,6 +249,7 @@
"emoji-picker-element": "^1.22.1",
"epub": "patch:epub@npm%3A1.3.0#~/.yarn/patches/epub-npm-1.3.0-8325494ffe.patch",
"eslint": "^9.22.0",
"eslint-plugin-oxlint": "^1.15.0",
"eslint-plugin-react-hooks": "^5.2.0",
"eslint-plugin-simple-import-sort": "^12.1.1",
"eslint-plugin-unused-imports": "^4.1.4",
@ -282,13 +285,12 @@
"notion-helper": "^1.3.22",
"npx-scope-finder": "^1.2.0",
"openai": "patch:openai@npm%3A5.12.2#~/.yarn/patches/openai-npm-5.12.2-30b075401c.patch",
"oxlint": "^1.15.0",
"oxlint-tsgolint": "^0.2.0",
"p-queue": "^8.1.0",
"pdf-lib": "^1.17.1",
"pdf-parse": "^1.1.1",
"playwright": "^1.52.0",
"prettier": "^3.5.3",
"prettier-plugin-sort-json": "^4.1.1",
"prettier-plugin-tailwindcss": "^0.6.14",
"proxy-agent": "^6.5.0",
"react": "^19.0.0",
"react-dom": "^19.0.0",
@ -330,7 +332,7 @@
"tsx": "^4.20.3",
"turndown-plugin-gfm": "^1.0.2",
"tw-animate-css": "^1.3.8",
"typescript": "^5.6.2",
"typescript": "~5.8.2",
"undici": "6.21.2",
"unified": "^11.0.5",
"uuid": "^10.0.0",
@ -366,16 +368,17 @@
"pkce-challenge@npm:^4.1.0": "patch:pkce-challenge@npm%3A4.1.0#~/.yarn/patches/pkce-challenge-npm-4.1.0-fbc51695a3.patch",
"undici": "6.21.2",
"vite": "npm:rolldown-vite@latest",
"tesseract.js@npm:*": "patch:tesseract.js@npm%3A6.0.1#~/.yarn/patches/tesseract.js-npm-6.0.1-2562a7e46d.patch"
"tesseract.js@npm:*": "patch:tesseract.js@npm%3A6.0.1#~/.yarn/patches/tesseract.js-npm-6.0.1-2562a7e46d.patch",
"@ai-sdk/google@npm:2.0.14": "patch:@ai-sdk/google@npm%3A2.0.14#~/.yarn/patches/@ai-sdk-google-npm-2.0.14-376d8b03cc.patch"
},
"packageManager": "yarn@4.9.1",
"lint-staged": {
"*.{js,jsx,ts,tsx,cjs,mjs,cts,mts}": [
"prettier --write",
"biome format --write --no-errors-on-unmatched",
"eslint --fix"
],
"*.{json,yml,yaml,css,html}": [
"prettier --write"
"biome format --write --no-errors-on-unmatched"
]
}
}

View File

@ -1,6 +1,6 @@
{
"name": "@cherrystudio/ai-core",
"version": "1.0.0-alpha.14",
"version": "1.0.0-alpha.18",
"description": "Cherry Studio AI Core - Unified AI Provider Interface Based on Vercel AI SDK",
"main": "dist/index.js",
"module": "dist/index.mjs",
@ -36,15 +36,14 @@
"ai": "^5.0.26"
},
"dependencies": {
"@ai-sdk/anthropic": "^2.0.5",
"@ai-sdk/azure": "^2.0.16",
"@ai-sdk/deepseek": "^1.0.9",
"@ai-sdk/google": "^2.0.13",
"@ai-sdk/openai": "^2.0.26",
"@ai-sdk/openai-compatible": "^1.0.9",
"@ai-sdk/anthropic": "^2.0.17",
"@ai-sdk/azure": "^2.0.30",
"@ai-sdk/deepseek": "^1.0.17",
"@ai-sdk/openai": "^2.0.30",
"@ai-sdk/openai-compatible": "^1.0.17",
"@ai-sdk/provider": "^2.0.0",
"@ai-sdk/provider-utils": "^3.0.4",
"@ai-sdk/xai": "^2.0.9",
"@ai-sdk/provider-utils": "^3.0.9",
"@ai-sdk/xai": "^2.0.18",
"zod": "^4.1.5"
},
"devDependencies": {

View File

@ -59,7 +59,7 @@ export function createGoogleOptions(options: ExtractProviderOptions<'google'>) {
/**
* OpenRouter供应商选项的便捷函数
*/
export function createOpenRouterOptions(options: ExtractProviderOptions<'openrouter'>) {
export function createOpenRouterOptions(options: ExtractProviderOptions<'openrouter'> | Record<string, any>) {
return createProviderOptions('openrouter', options)
}

View File

@ -1,38 +0,0 @@
export type OpenRouterProviderOptions = {
models?: string[]
/**
* https://openrouter.ai/docs/use-cases/reasoning-tokens
* One of `max_tokens` or `effort` is required.
* If `exclude` is true, reasoning will be removed from the response. Default is false.
*/
reasoning?: {
exclude?: boolean
} & (
| {
max_tokens: number
}
| {
effort: 'high' | 'medium' | 'low'
}
)
/**
* A unique identifier representing your end-user, which can
* help OpenRouter to monitor and detect abuse.
*/
user?: string
extraBody?: Record<string, unknown>
/**
* Enable usage accounting to get detailed token usage information.
* https://openrouter.ai/docs/use-cases/usage-accounting
*/
usage?: {
/**
* When true, includes token usage information in the response.
*/
include: boolean
}
}

View File

@ -2,9 +2,8 @@ import { type AnthropicProviderOptions } from '@ai-sdk/anthropic'
import { type GoogleGenerativeAIProviderOptions } from '@ai-sdk/google'
import { type OpenAIResponsesProviderOptions } from '@ai-sdk/openai'
import { type SharedV2ProviderMetadata } from '@ai-sdk/provider'
import { type OpenRouterProviderOptions } from './openrouter'
import { type XaiProviderOptions } from './xai'
import { type XaiProviderOptions } from '@ai-sdk/xai'
import { type OpenRouterProviderOptions } from '@openrouter/ai-sdk-provider'
export type ProviderOptions<T extends keyof SharedV2ProviderMetadata> = SharedV2ProviderMetadata[T]

View File

@ -1,86 +0,0 @@
// copy from @ai-sdk/xai/xai-chat-options.ts
// 如果@ai-sdk/xai暴露出了xaiProviderOptions就删除这个文件
import { z } from 'zod'
const webSourceSchema = z.object({
type: z.literal('web'),
country: z.string().length(2).optional(),
excludedWebsites: z.array(z.string()).max(5).optional(),
allowedWebsites: z.array(z.string()).max(5).optional(),
safeSearch: z.boolean().optional()
})
const xSourceSchema = z.object({
type: z.literal('x'),
xHandles: z.array(z.string()).optional()
})
const newsSourceSchema = z.object({
type: z.literal('news'),
country: z.string().length(2).optional(),
excludedWebsites: z.array(z.string()).max(5).optional(),
safeSearch: z.boolean().optional()
})
const rssSourceSchema = z.object({
type: z.literal('rss'),
links: z.array(z.url()).max(1) // currently only supports one RSS link
})
const searchSourceSchema = z.discriminatedUnion('type', [
webSourceSchema,
xSourceSchema,
newsSourceSchema,
rssSourceSchema
])
export const xaiProviderOptions = z.object({
/**
* reasoning effort for reasoning models
* only supported by grok-3-mini and grok-3-mini-fast models
*/
reasoningEffort: z.enum(['low', 'high']).optional(),
searchParameters: z
.object({
/**
* search mode preference
* - "off": disables search completely
* - "auto": model decides whether to search (default)
* - "on": always enables search
*/
mode: z.enum(['off', 'auto', 'on']),
/**
* whether to return citations in the response
* defaults to true
*/
returnCitations: z.boolean().optional(),
/**
* start date for search data (ISO8601 format: YYYY-MM-DD)
*/
fromDate: z.string().optional(),
/**
* end date for search data (ISO8601 format: YYYY-MM-DD)
*/
toDate: z.string().optional(),
/**
* maximum number of search results to consider
* defaults to 20
*/
maxSearchResults: z.number().min(1).max(50).optional(),
/**
* data sources to search from
* defaults to ["web", "x"] if not specified
*/
sources: z.array(searchSourceSchema).optional()
})
.optional()
})
export type XaiProviderOptions = z.infer<typeof xaiProviderOptions>

View File

@ -24,7 +24,6 @@ export const googleToolsPlugin = (config?: ToolConfig) =>
if (!typedParams.tools) {
typedParams.tools = {}
}
// 使用类型安全的方式遍历配置
;(Object.keys(config) as ToolConfigKey[]).forEach((key) => {
if (config[key] && key in toolNameMap && key in google.tools) {

View File

@ -7,5 +7,9 @@ export const BUILT_IN_PLUGIN_PREFIX = 'built-in:'
export { googleToolsPlugin } from './googleToolsPlugin'
export { createLoggingPlugin } from './logging'
export { createPromptToolUsePlugin } from './toolUsePlugin/promptToolUsePlugin'
export type { PromptToolUseConfig, ToolUseRequestContext, ToolUseResult } from './toolUsePlugin/type'
export { webSearchPlugin } from './webSearchPlugin'
export type {
PromptToolUseConfig,
ToolUseRequestContext,
ToolUseResult
} from './toolUsePlugin/type'
export { webSearchPlugin, type WebSearchPluginConfig } from './webSearchPlugin'

View File

@ -261,22 +261,39 @@ export const createPromptToolUsePlugin = (config: PromptToolUseConfig = {}) => {
return params
}
context.mcpTools = params.tools
// 分离 provider-defined 和其他类型的工具
const providerDefinedTools: ToolSet = {}
const promptTools: ToolSet = {}
// 构建系统提示符
for (const [toolName, tool] of Object.entries(params.tools as ToolSet)) {
if (tool.type === 'provider-defined') {
// provider-defined 类型的工具保留在 tools 参数中
providerDefinedTools[toolName] = tool
} else {
// 其他工具转换为 prompt 模式
promptTools[toolName] = tool
}
}
// 只有当有非 provider-defined 工具时才保存到 context
if (Object.keys(promptTools).length > 0) {
context.mcpTools = promptTools
}
// 构建系统提示符(只包含非 provider-defined 工具)
const userSystemPrompt = typeof params.system === 'string' ? params.system : ''
const systemPrompt = buildSystemPrompt(userSystemPrompt, params.tools)
const systemPrompt = buildSystemPrompt(userSystemPrompt, promptTools)
let systemMessage: string | null = systemPrompt
if (config.createSystemMessage) {
// 🎯 如果用户提供了自定义处理函数,使用它
systemMessage = config.createSystemMessage(systemPrompt, params, context)
}
// 移除 tools改为 prompt 模式
// 保留 provider-defined tools移除其他 tools
const transformedParams = {
...params,
...(systemMessage ? { system: systemMessage } : {}),
tools: undefined
tools: Object.keys(providerDefinedTools).length > 0 ? providerDefinedTools : undefined
}
context.originalParams = transformedParams
return transformedParams
@ -285,8 +302,9 @@ export const createPromptToolUsePlugin = (config: PromptToolUseConfig = {}) => {
let textBuffer = ''
// let stepId = ''
// 如果没有需要 prompt 模式处理的工具,直接返回原始流
if (!context.mcpTools) {
throw new Error('No tools available')
return new TransformStream()
}
// 从 context 中获取或初始化 usage 累加器

View File

@ -1,15 +1,19 @@
import { anthropic } from '@ai-sdk/anthropic'
import { google } from '@ai-sdk/google'
import { openai } from '@ai-sdk/openai'
import { InferToolInput, InferToolOutput } from 'ai'
import { ProviderOptionsMap } from '../../../options/types'
import { OpenRouterSearchConfig } from './openrouter'
/**
* AI SDK
*/
type OpenAISearchConfig = Parameters<typeof openai.tools.webSearchPreview>[0]
type AnthropicSearchConfig = Parameters<typeof anthropic.tools.webSearch_20250305>[0]
type GoogleSearchConfig = Parameters<typeof google.tools.googleSearch>[0]
export type OpenAISearchConfig = NonNullable<Parameters<typeof openai.tools.webSearch>[0]>
export type OpenAISearchPreviewConfig = NonNullable<Parameters<typeof openai.tools.webSearchPreview>[0]>
export type AnthropicSearchConfig = NonNullable<Parameters<typeof anthropic.tools.webSearch_20250305>[0]>
export type GoogleSearchConfig = NonNullable<Parameters<typeof google.tools.googleSearch>[0]>
export type XAISearchConfig = NonNullable<ProviderOptionsMap['xai']['searchParameters']>
/**
*
@ -18,10 +22,12 @@ type GoogleSearchConfig = Parameters<typeof google.tools.googleSearch>[0]
*/
export interface WebSearchPluginConfig {
openai?: OpenAISearchConfig
'openai-chat'?: OpenAISearchPreviewConfig
anthropic?: AnthropicSearchConfig
xai?: ProviderOptionsMap['xai']['searchParameters']
google?: GoogleSearchConfig
'google-vertex'?: GoogleSearchConfig
openrouter?: OpenRouterSearchConfig
}
/**
@ -31,6 +37,7 @@ export const DEFAULT_WEB_SEARCH_CONFIG: WebSearchPluginConfig = {
google: {},
'google-vertex': {},
openai: {},
'openai-chat': {},
xai: {
mode: 'on',
returnCitations: true,
@ -39,29 +46,44 @@ export const DEFAULT_WEB_SEARCH_CONFIG: WebSearchPluginConfig = {
},
anthropic: {
maxUses: 5
},
openrouter: {
plugins: [
{
id: 'web',
max_results: 5
}
]
}
}
export type WebSearchToolOutputSchema = {
// Anthropic 工具 - 手动定义
anthropicWebSearch: Array<{
url: string
title: string
pageAge: string | null
encryptedContent: string
type: string
}>
anthropic: InferToolOutput<ReturnType<typeof anthropic.tools.webSearch_20250305>>
// OpenAI 工具 - 基于实际输出
openaiWebSearch: {
// TODO: 上游定义不规范,是unknown
// openai: InferToolOutput<ReturnType<typeof openai.tools.webSearch>>
openai: {
status: 'completed' | 'failed'
}
'openai-chat': {
status: 'completed' | 'failed'
}
// Google 工具
googleSearch: {
// TODO: 上游定义不规范,是unknown
// google: InferToolOutput<ReturnType<typeof google.tools.googleSearch>>
google: {
webSearchQueries?: string[]
groundingChunks?: Array<{
web?: { uri: string; title: string }
}>
}
}
export type WebSearchToolInputSchema = {
anthropic: InferToolInput<ReturnType<typeof anthropic.tools.webSearch_20250305>>
openai: InferToolInput<ReturnType<typeof openai.tools.webSearch>>
google: InferToolInput<ReturnType<typeof google.tools.googleSearch>>
'openai-chat': InferToolInput<ReturnType<typeof openai.tools.webSearchPreview>>
}

View File

@ -6,7 +6,7 @@ import { anthropic } from '@ai-sdk/anthropic'
import { google } from '@ai-sdk/google'
import { openai } from '@ai-sdk/openai'
import { createXaiOptions, mergeProviderOptions } from '../../../options'
import { createOpenRouterOptions, createXaiOptions, mergeProviderOptions } from '../../../options'
import { definePlugin } from '../../'
import type { AiRequestContext } from '../../types'
import { DEFAULT_WEB_SEARCH_CONFIG, WebSearchPluginConfig } from './helper'
@ -31,6 +31,13 @@ export const webSearchPlugin = (config: WebSearchPluginConfig = DEFAULT_WEB_SEAR
}
break
}
case 'openai-chat': {
if (config['openai-chat']) {
if (!params.tools) params.tools = {}
params.tools.web_search_preview = openai.tools.webSearchPreview(config['openai-chat'])
}
break
}
case 'anthropic': {
if (config.anthropic) {
@ -56,6 +63,14 @@ export const webSearchPlugin = (config: WebSearchPluginConfig = DEFAULT_WEB_SEAR
}
break
}
case 'openrouter': {
if (config.openrouter) {
const searchOptions = createOpenRouterOptions(config.openrouter)
params.providerOptions = mergeProviderOptions(params.providerOptions, searchOptions)
}
break
}
}
return params

View File

@ -0,0 +1,26 @@
export type OpenRouterSearchConfig = {
plugins?: Array<{
id: 'web'
/**
* Maximum number of search results to include (default: 5)
*/
max_results?: number
/**
* Custom search prompt to guide the search query
*/
search_prompt?: string
}>
/**
* Built-in web search options for models that support native web search
*/
web_search_options?: {
/**
* Maximum number of search results to include
*/
max_results?: number
/**
* Custom search prompt to guide the search query
*/
search_prompt?: string
}
}

View File

@ -9,7 +9,9 @@ import { createDeepSeek } from '@ai-sdk/deepseek'
import { createGoogleGenerativeAI } from '@ai-sdk/google'
import { createOpenAI, type OpenAIProviderSettings } from '@ai-sdk/openai'
import { createOpenAICompatible } from '@ai-sdk/openai-compatible'
import { LanguageModelV2 } from '@ai-sdk/provider'
import { createXai } from '@ai-sdk/xai'
import { createOpenRouter } from '@openrouter/ai-sdk-provider'
import { customProvider, Provider } from 'ai'
import { z } from 'zod'
@ -25,7 +27,8 @@ export const baseProviderIds = [
'xai',
'azure',
'azure-responses',
'deepseek'
'deepseek',
'openrouter'
] as const
/**
@ -38,10 +41,14 @@ export const baseProviderIdSchema = z.enum(baseProviderIds)
*/
export type BaseProviderId = z.infer<typeof baseProviderIdSchema>
export const isBaseProvider = (id: ProviderId): id is BaseProviderId => {
return baseProviderIdSchema.safeParse(id).success
}
type BaseProvider = {
id: BaseProviderId
name: string
creator: (options: any) => Provider
creator: (options: any) => Provider | LanguageModelV2
supportsImageGeneration: boolean
}
@ -119,6 +126,12 @@ export const baseProviders = [
name: 'DeepSeek',
creator: createDeepSeek,
supportsImageGeneration: false
},
{
id: 'openrouter',
name: 'OpenRouter',
creator: createOpenRouter,
supportsImageGeneration: true
}
] as const satisfies BaseProvider[]

View File

@ -1,26 +1,21 @@
{
"compilerOptions": {
"target": "ES2020",
"allowSyntheticDefaultImports": true,
"declaration": true,
"emitDecoratorMetadata": true,
"esModuleInterop": true,
"experimentalDecorators": true,
"forceConsistentCasingInFileNames": true,
"module": "ESNext",
"moduleResolution": "bundler",
"declaration": true,
"outDir": "./dist",
"rootDir": "./src",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"resolveJsonModule": true,
"allowSyntheticDefaultImports": true,
"noEmitOnError": false,
"experimentalDecorators": true,
"emitDecoratorMetadata": true
"outDir": "./dist",
"resolveJsonModule": true,
"rootDir": "./src",
"skipLibCheck": true,
"strict": true,
"target": "ES2020"
},
"include": [
"src/**/*"
],
"exclude": [
"node_modules",
"dist"
]
}
"exclude": ["node_modules", "dist"],
"include": ["src/**/*"]
}

View File

@ -67,13 +67,13 @@
"dist"
],
"devDependencies": {
"@biomejs/biome": "2.2.4",
"@tiptap/core": "^3.2.0",
"@tiptap/pm": "^3.2.0",
"eslint": "^9.22.0",
"eslint-plugin-react-hooks": "^5.2.0",
"eslint-plugin-simple-import-sort": "^12.1.1",
"eslint-plugin-unused-imports": "^4.1.4",
"prettier": "^3.5.3",
"tsdown": "^0.13.3"
},
"peerDependencies": {
@ -87,7 +87,7 @@
},
"scripts": {
"build": "tsdown",
"lint": "prettier ./src/ --write && eslint --fix ./src/"
"lint": "biome format ./src/ --write && eslint --fix ./src/"
},
"packageManager": "yarn@4.9.1"
}

View File

@ -8,6 +8,7 @@ export enum IpcChannel {
App_ShowUpdateDialog = 'app:show-update-dialog',
App_CheckForUpdate = 'app:check-for-update',
App_Reload = 'app:reload',
App_Quit = 'app:quit',
App_Info = 'app:info',
App_Proxy = 'app:proxy',
App_SetLaunchToTray = 'app:set-launch-to-tray',
@ -38,6 +39,7 @@ export enum IpcChannel {
App_GetDiskInfo = 'app:get-disk-info',
App_SetFullScreen = 'app:set-full-screen',
App_IsFullScreen = 'app:is-full-screen',
App_GetSystemFonts = 'app:get-system-fonts',
App_MacIsProcessTrusted = 'app:mac-is-process-trusted',
App_MacRequestProcessTrust = 'app:mac-request-process-trust',
@ -321,10 +323,14 @@ export enum IpcChannel {
// CodeTools
CodeTools_Run = 'code-tools:run',
CodeTools_GetAvailableTerminals = 'code-tools:get-available-terminals',
CodeTools_SetCustomTerminalPath = 'code-tools:set-custom-terminal-path',
CodeTools_GetCustomTerminalPath = 'code-tools:get-custom-terminal-path',
CodeTools_RemoveCustomTerminalPath = 'code-tools:remove-custom-terminal-path',
// OCR
OCR_ocr = 'ocr:ocr',
// Cherryin
Cherryin_GetSignature = 'cherryin:get-signature'
// CherryAI
Cherryai_GetSignature = 'cherryai:get-signature'
}

View File

@ -216,5 +216,256 @@ export enum codeTools {
qwenCode = 'qwen-code',
claudeCode = 'claude-code',
geminiCli = 'gemini-cli',
openaiCodex = 'openai-codex'
openaiCodex = 'openai-codex',
iFlowCli = 'iflow-cli'
}
export enum terminalApps {
systemDefault = 'Terminal',
iterm2 = 'iTerm2',
kitty = 'kitty',
alacritty = 'Alacritty',
wezterm = 'WezTerm',
ghostty = 'Ghostty',
tabby = 'Tabby',
// Windows terminals
windowsTerminal = 'WindowsTerminal',
powershell = 'PowerShell',
cmd = 'CMD',
wsl = 'WSL'
}
export interface TerminalConfig {
id: string
name: string
bundleId?: string
customPath?: string // For user-configured terminal paths on Windows
}
export interface TerminalConfigWithCommand extends TerminalConfig {
command: (directory: string, fullCommand: string) => { command: string; args: string[] }
}
export const MACOS_TERMINALS: TerminalConfig[] = [
{
id: terminalApps.systemDefault,
name: 'Terminal',
bundleId: 'com.apple.Terminal'
},
{
id: terminalApps.iterm2,
name: 'iTerm2',
bundleId: 'com.googlecode.iterm2'
},
{
id: terminalApps.kitty,
name: 'kitty',
bundleId: 'net.kovidgoyal.kitty'
},
{
id: terminalApps.alacritty,
name: 'Alacritty',
bundleId: 'org.alacritty'
},
{
id: terminalApps.wezterm,
name: 'WezTerm',
bundleId: 'com.github.wez.wezterm'
},
{
id: terminalApps.ghostty,
name: 'Ghostty',
bundleId: 'com.mitchellh.ghostty'
},
{
id: terminalApps.tabby,
name: 'Tabby',
bundleId: 'org.tabby'
}
]
export const WINDOWS_TERMINALS: TerminalConfig[] = [
{
id: terminalApps.cmd,
name: 'Command Prompt'
},
{
id: terminalApps.powershell,
name: 'PowerShell'
},
{
id: terminalApps.windowsTerminal,
name: 'Windows Terminal'
},
{
id: terminalApps.wsl,
name: 'WSL (Ubuntu/Debian)'
},
{
id: terminalApps.alacritty,
name: 'Alacritty'
},
{
id: terminalApps.wezterm,
name: 'WezTerm'
}
]
export const WINDOWS_TERMINALS_WITH_COMMANDS: TerminalConfigWithCommand[] = [
{
id: terminalApps.cmd,
name: 'Command Prompt',
command: (_: string, fullCommand: string) => ({
command: 'cmd',
args: ['/c', 'start', 'cmd', '/k', fullCommand]
})
},
{
id: terminalApps.powershell,
name: 'PowerShell',
command: (_: string, fullCommand: string) => ({
command: 'cmd',
args: ['/c', 'start', 'powershell', '-NoExit', '-Command', `& '${fullCommand}'`]
})
},
{
id: terminalApps.windowsTerminal,
name: 'Windows Terminal',
command: (_: string, fullCommand: string) => ({
command: 'wt',
args: ['cmd', '/k', fullCommand]
})
},
{
id: terminalApps.wsl,
name: 'WSL (Ubuntu/Debian)',
command: (_: string, fullCommand: string) => {
// Start WSL in a new window and execute the batch file from within WSL using cmd.exe
// The batch file will run in Windows context but output will be in WSL terminal
return {
command: 'cmd',
args: ['/c', 'start', 'wsl', '-e', 'bash', '-c', `cmd.exe /c '${fullCommand}' ; exec bash`]
}
}
},
{
id: terminalApps.alacritty,
name: 'Alacritty',
customPath: '', // Will be set by user in settings
command: (_: string, fullCommand: string) => ({
command: 'alacritty', // Will be replaced with customPath if set
args: ['-e', 'cmd', '/k', fullCommand]
})
},
{
id: terminalApps.wezterm,
name: 'WezTerm',
customPath: '', // Will be set by user in settings
command: (_: string, fullCommand: string) => ({
command: 'wezterm', // Will be replaced with customPath if set
args: ['start', 'cmd', '/k', fullCommand]
})
}
]
// Helper function to escape strings for AppleScript
const escapeForAppleScript = (str: string): string => {
// In AppleScript strings, backslashes and double quotes need to be escaped
// When passed through osascript -e with single quotes, we need:
// 1. Backslash: \ -> \\
// 2. Double quote: " -> \"
return str
.replace(/\\/g, '\\\\') // Escape backslashes first
.replace(/"/g, '\\"') // Then escape double quotes
}
export const MACOS_TERMINALS_WITH_COMMANDS: TerminalConfigWithCommand[] = [
{
id: terminalApps.systemDefault,
name: 'Terminal',
bundleId: 'com.apple.Terminal',
command: (_directory: string, fullCommand: string) => ({
command: 'sh',
args: [
'-c',
`open -na Terminal && sleep 0.5 && osascript -e 'tell application "Terminal" to activate' -e 'tell application "Terminal" to do script "${escapeForAppleScript(fullCommand)}" in front window'`
]
})
},
{
id: terminalApps.iterm2,
name: 'iTerm2',
bundleId: 'com.googlecode.iterm2',
command: (_directory: string, fullCommand: string) => ({
command: 'sh',
args: [
'-c',
`open -na iTerm && sleep 0.8 && osascript -e 'on waitUntilRunning()\n repeat 50 times\n tell application "System Events"\n if (exists process "iTerm2") then exit repeat\n end tell\n delay 0.1\n end repeat\nend waitUntilRunning\n\nwaitUntilRunning()\n\ntell application "iTerm2"\n if (count of windows) = 0 then\n create window with default profile\n delay 0.3\n else\n tell current window\n create tab with default profile\n end tell\n delay 0.3\n end if\n tell current session of current window to write text "${escapeForAppleScript(fullCommand)}"\n activate\nend tell'`
]
})
},
{
id: terminalApps.kitty,
name: 'kitty',
bundleId: 'net.kovidgoyal.kitty',
command: (_directory: string, fullCommand: string) => ({
command: 'sh',
args: [
'-c',
`cd "${_directory}" && open -na kitty --args --directory="${_directory}" sh -c "${fullCommand.replace(/\\/g, '\\\\').replace(/"/g, '\\"')}; exec \\$SHELL" && sleep 0.5 && osascript -e 'tell application "kitty" to activate'`
]
})
},
{
id: terminalApps.alacritty,
name: 'Alacritty',
bundleId: 'org.alacritty',
command: (_directory: string, fullCommand: string) => ({
command: 'sh',
args: [
'-c',
`open -na Alacritty --args --working-directory "${_directory}" -e sh -c "${fullCommand.replace(/\\/g, '\\\\').replace(/"/g, '\\"')}; exec \\$SHELL" && sleep 0.5 && osascript -e 'tell application "Alacritty" to activate'`
]
})
},
{
id: terminalApps.wezterm,
name: 'WezTerm',
bundleId: 'com.github.wez.wezterm',
command: (_directory: string, fullCommand: string) => ({
command: 'sh',
args: [
'-c',
`open -na WezTerm --args start --new-tab --cwd "${_directory}" -- sh -c "${fullCommand.replace(/\\/g, '\\\\').replace(/"/g, '\\"')}; exec \\$SHELL" && sleep 0.5 && osascript -e 'tell application "WezTerm" to activate'`
]
})
},
{
id: terminalApps.ghostty,
name: 'Ghostty',
bundleId: 'com.mitchellh.ghostty',
command: (_directory: string, fullCommand: string) => ({
command: 'sh',
args: [
'-c',
`cd "${_directory}" && open -na Ghostty --args --working-directory="${_directory}" -e sh -c "${fullCommand.replace(/\\/g, '\\\\').replace(/"/g, '\\"')}; exec \\$SHELL" && sleep 0.5 && osascript -e 'tell application "Ghostty" to activate'`
]
})
},
{
id: terminalApps.tabby,
name: 'Tabby',
bundleId: 'org.tabby',
command: (_directory: string, fullCommand: string) => ({
command: 'sh',
args: [
'-c',
`if pgrep -x "Tabby" > /dev/null; then
open -na Tabby --args open && sleep 0.3
else
open -na Tabby --args open && sleep 2
fi && osascript -e 'tell application "Tabby" to activate' -e 'set the clipboard to "${escapeForAppleScript(fullCommand)}"' -e 'tell application "System Events" to tell process "Tabby" to keystroke "v" using {command down}' -e 'tell application "System Events" to key code 36'`
]
})
}
]

View File

@ -0,0 +1,252 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Privacy Policy</title>
<style>
* {
margin: 0;
padding: 0;
box-sizing: border-box;
}
body {
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Helvetica Neue', Arial, sans-serif;
line-height: 1.6;
color: #333;
background: transparent;
margin: 0 auto;
}
body.dark {
background: transparent;
color: rgba(255, 255, 255, 0.85);
}
h1 {
font-size: 24px;
font-weight: 600;
margin-bottom: 20px;
color: #1a1a1a;
}
body.dark h1 {
color: rgba(255, 255, 255, 0.95);
}
h2 {
font-size: 18px;
font-weight: 600;
margin-top: 24px;
margin-bottom: 12px;
color: #2c2c2c;
}
body.dark h2 {
color: rgba(255, 255, 255, 0.9);
}
p {
margin: 12px 0;
line-height: 1.8;
}
body.dark p {
color: rgba(255, 255, 255, 0.8);
}
ul {
margin: 12px 0;
padding-left: 24px;
}
li {
margin: 6px 0;
line-height: 1.6;
}
body.dark li {
color: rgba(255, 255, 255, 0.75);
}
a {
color: #0066cc;
text-decoration: none;
}
a:hover {
text-decoration: underline;
}
body.dark a {
color: #4da6ff;
}
.footer {
margin-top: 40px;
padding-top: 20px;
border-top: 1px solid #e0e0e0;
font-size: 13px;
color: #666;
}
body.dark .footer {
border-top-color: rgba(255, 255, 255, 0.1);
color: rgba(255, 255, 255, 0.5);
}
.content-wrapper {
max-height: calc(100vh - 40px);
overflow-y: auto;
padding-right: 10px;
background: transparent;
}
/* Scrollbar styles - Light mode */
::-webkit-scrollbar {
width: 8px;
height: 8px;
}
::-webkit-scrollbar-track {
background: rgba(0, 0, 0, 0.05);
border-radius: 4px;
}
::-webkit-scrollbar-thumb {
background: rgba(0, 0, 0, 0.2);
border-radius: 4px;
}
::-webkit-scrollbar-thumb:hover {
background: rgba(0, 0, 0, 0.3);
}
/* Scrollbar styles - Dark mode */
body.dark ::-webkit-scrollbar-track {
background: rgba(255, 255, 255, 0.05);
}
body.dark ::-webkit-scrollbar-thumb {
background: rgba(255, 255, 255, 0.2);
}
body.dark ::-webkit-scrollbar-thumb:hover {
background: rgba(255, 255, 255, 0.3);
}
</style>
<script>
// Detect theme
document.addEventListener('DOMContentLoaded', function () {
const urlParams = new URLSearchParams(window.location.search);
const theme = urlParams.get('theme');
if (theme === 'dark') {
document.documentElement.classList.add('dark');
document.body.classList.add('dark');
}
});
</script>
</head>
<body>
<div class="content-wrapper">
<h1>Privacy Policy</h1>
<p>
Welcome to Cherry Studio (hereinafter referred to as "the Software" or "we"). We highly value your privacy
protection. This Privacy Policy explains how we process and protect your personal information and data.
Please read and understand this policy carefully before using the Software:
</p>
<h2>1. Information We Collect</h2>
<p>To optimize user experience and improve software quality, we may only collect the following anonymous,
non-personal information:</p>
<ul>
<li>Software version information</li>
<li>Activity and usage frequency of software features</li>
<li>Anonymous crash and error log information</li>
</ul>
<p>The above information is completely anonymous, does not involve any personal identity data, and cannot be
linked to your personal information.</p>
<h2>2. Information We Do Not Collect</h2>
<p>To maximize the protection of your privacy and security, we explicitly commit that we:</p>
<ul>
<li>Will not collect, save, transmit, or process model service API Key information you enter into the
Software</li>
<li>Will not collect, save, transmit, or process any conversation data generated during your use of the
Software, including but not limited to chat content, instruction information, knowledge base
information, vector data, and other custom content</li>
<li>Will not collect, save, transmit, or process any sensitive information that can identify personal
identity</li>
</ul>
<h2>3. Data Interaction Description</h2>
<p>
The Software uses API Keys from third-party model service providers that you apply for and configure
yourself to complete model calls and conversation functions. The model services you use (such as large
models, API interfaces, etc.) are directly provided by third-party providers of your choice. We do not
intervene, monitor, or interfere with the data transmission process.
</p>
<p>
Data interactions between you and third-party model services are governed by the privacy policies and user
agreements of third-party service providers. We recommend that you fully understand the privacy terms of
relevant service providers before use.
</p>
<h2>4. Local Data Security Protection</h2>
<p>The Software is a localized application, and all data is stored on your local device by default. We have
taken the following measures to ensure data security:</p>
<ul>
<li>Conversation records, configuration information, and other data are only saved on your local device</li>
<li>Data import/export functions are provided to facilitate your independent management and backup of data
</li>
<li>Your local data will not be uploaded to any server or cloud storage</li>
</ul>
<h2>5. Third-Party Services</h2>
<p>
When using the Software, you may access third-party services (such as AI model APIs, translation services,
etc.). The use of these third-party services is governed by their respective terms of service and privacy
policies. We strongly recommend that you carefully read and understand the relevant terms before use.
</p>
<h2>6. User Rights</h2>
<p>You have complete control over your data:</p>
<ul>
<li>You can view, modify, and delete all locally stored data at any time</li>
<li>You can choose whether to enable specific features or services</li>
<li>You can stop using the Software and delete all related data at any time</li>
</ul>
<h2>7. Children's Privacy Protection</h2>
<p>The Software is not intended for minors under 18 years of age. If you are a minor, please use the Software
under the guidance of a guardian.</p>
<h2>8. Privacy Policy Updates</h2>
<p>
We may update this Privacy Policy based on legal requirements or changes in product features. The updated
policy will be published in the Software and you will be notified before it takes effect. If you do not
agree with the updated terms, you can choose to stop using the Software.
</p>
<h2>9. Contact Us</h2>
<p>If you have any questions, suggestions, or complaints about this Privacy Policy, please contact us through
the following methods:</p>
<ul>
<li>
GitHub: <a href="https://github.com/CherryHQ/cherry-studio" target="_blank"
rel="noopener noreferrer">https://github.com/CherryHQ/cherry-studio</a>
</li>
<li>Email: support@cherry-ai.com</li>
</ul>
<div class="footer">
Last Updated: December 2024
</div>
</div>
</body>
</html>

View File

@ -0,0 +1,230 @@
<!DOCTYPE html>
<html lang="zh">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>隐私协议</title>
<style>
* {
margin: 0;
padding: 0;
box-sizing: border-box;
}
body {
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Helvetica Neue', Arial, sans-serif;
line-height: 1.6;
color: #333;
background: transparent;
margin: 0 auto;
}
body.dark {
background: transparent;
color: rgba(255, 255, 255, 0.85);
}
h1 {
font-size: 24px;
font-weight: 600;
margin-bottom: 20px;
color: #1a1a1a;
}
body.dark h1 {
color: rgba(255, 255, 255, 0.95);
}
h2 {
font-size: 18px;
font-weight: 600;
margin-top: 24px;
margin-bottom: 12px;
color: #2c2c2c;
}
body.dark h2 {
color: rgba(255, 255, 255, 0.9);
}
p {
margin: 12px 0;
line-height: 1.8;
}
body.dark p {
color: rgba(255, 255, 255, 0.8);
}
ul {
margin: 12px 0;
padding-left: 24px;
}
li {
margin: 6px 0;
line-height: 1.6;
}
body.dark li {
color: rgba(255, 255, 255, 0.75);
}
a {
color: #0066cc;
text-decoration: none;
}
a:hover {
text-decoration: underline;
}
body.dark a {
color: #4da6ff;
}
.footer {
margin-top: 40px;
padding-top: 20px;
border-top: 1px solid #e0e0e0;
font-size: 13px;
color: #666;
}
body.dark .footer {
border-top-color: rgba(255, 255, 255, 0.1);
color: rgba(255, 255, 255, 0.5);
}
.content-wrapper {
overflow-y: auto;
padding-right: 10px;
background: transparent;
}
/* 滚动条样式 - 亮色模式 */
::-webkit-scrollbar {
width: 8px;
height: 8px;
}
::-webkit-scrollbar-track {
background: rgba(0, 0, 0, 0.05);
border-radius: 4px;
}
::-webkit-scrollbar-thumb {
background: rgba(0, 0, 0, 0.2);
border-radius: 4px;
}
::-webkit-scrollbar-thumb:hover {
background: rgba(0, 0, 0, 0.3);
}
/* 滚动条样式 - 暗色模式 */
body.dark ::-webkit-scrollbar-track {
background: rgba(255, 255, 255, 0.05);
}
body.dark ::-webkit-scrollbar-thumb {
background: rgba(255, 255, 255, 0.2);
}
body.dark ::-webkit-scrollbar-thumb:hover {
background: rgba(255, 255, 255, 0.3);
}
</style>
<script>
// 检测主题
document.addEventListener('DOMContentLoaded', function () {
const urlParams = new URLSearchParams(window.location.search);
const theme = urlParams.get('theme');
if (theme === 'dark') {
document.documentElement.classList.add('dark');
document.body.classList.add('dark');
}
});
</script>
</head>
<body>
<div class="content-wrapper">
<h1>隐私协议</h1>
<p>
欢迎使用 Cherry Studio以下简称"本软件"或"我们")。我们高度重视您的隐私保护,本隐私协议将说明我们如何处理与保护您的个人信息和数据。请在使用本软件前仔细阅读并理解本协议:
</p>
<h2>一、我们收集的信息范围</h2>
<p>为了优化用户体验和提升软件质量,我们仅可能会匿名收集以下非个人化信息:</p>
<ul>
<li>软件版本信息;</li>
<li>软件功能的活跃度、使用频次;</li>
<li>匿名的崩溃、错误日志信息;</li>
</ul>
<p>上述信息完全匿名,不会涉及任何个人身份数据,也无法关联到您的个人信息。</p>
<h2>二、我们不会收集的任何信息</h2>
<p>为了最大限度保护您的隐私安全,我们明确承诺:</p>
<ul>
<li>不会收集、保存、传输或处理您输入到本软件中的模型服务 API Key 信息;</li>
<li>不会收集、保存、传输或处理您在使用本软件过程中产生的任何对话数据,包括但不限于聊天内容、指令信息、知识库信息、向量数据及其他自定义内容;</li>
<li>不会收集、保存、传输或处理任何可识别个人身份的敏感信息。</li>
</ul>
<h2>三、数据交互说明</h2>
<p>
本软件采用您自行申请并配置的第三方模型服务提供商的 API Key以完成相关模型的调用与对话功能。您使用的模型服务例如大模型、API 接口等)由您选择的第三方提供商直接提供,我们不会介入、监控或干扰数据传输过程。
</p>
<p>
您与第三方模型服务之间的数据交互受第三方服务提供商的隐私政策和用户协议约束,我们建议您在使用前充分了解相关服务商的隐私条款。
</p>
<h2>四、本地数据的安全保护</h2>
<p>本软件为本地化应用程序,所有数据默认存储在您的本地设备上。我们采取了以下措施保障数据安全:</p>
<ul>
<li>对话记录、配置信息等数据仅保存在您的本地设备中;</li>
<li>提供数据导入/导出功能,方便您自主管理和备份数据;</li>
<li>不会将您的本地数据上传至任何服务器或云端存储。</li>
</ul>
<h2>五、第三方服务</h2>
<p>
在使用本软件过程中,您可能会接入第三方服务(如 AI 模型 API、翻译服务等。这些第三方服务的使用受其各自的服务条款和隐私政策约束。我们强烈建议您在使用前仔细阅读并理解相关条款。
</p>
<h2>六、用户权利</h2>
<p>您对自己的数据拥有完全的控制权:</p>
<ul>
<li>您可以随时查看、修改、删除本地存储的所有数据;</li>
<li>您可以选择是否启用特定功能或服务;</li>
<li>您可以随时停止使用本软件并删除所有相关数据。</li>
</ul>
<h2>七、儿童隐私保护</h2>
<p>本软件不面向 18 岁以下的未成年人提供服务。如果您是未成年人,请在监护人的指导下使用本软件。</p>
<h2>八、隐私政策的更新</h2>
<p>
我们可能会根据法律法规要求或产品功能的变化更新本隐私协议。更新后的协议将在软件中发布,并在生效前通知您。如果您不同意更新后的条款,您可以选择停止使用本软件。
</p>
<h2>九、联系我们</h2>
<p>如果您对本隐私协议有任何疑问、建议或投诉,请通过以下方式联系我们:</p>
<ul>
<li>
GitHub: <a href="https://github.com/CherryHQ/cherry-studio" target="_blank"
rel="noopener noreferrer">https://github.com/CherryHQ/cherry-studio</a>
</li>
<li>Email: support@cherry-ai.com</li>
</ul>
<div class="footer">
最后更新日期2024年12月
</div>
</div>
</body>
</html>

View File

@ -75,17 +75,17 @@ export const languages: Record<string, LanguageData> = ${languagesObjectString};
}
/**
* Formats a file using Prettier.
* Formats a file using Biome.
* @param filePath The path to the file to format.
*/
async function formatWithPrettier(filePath: string): Promise<void> {
console.log('🎨 Formatting file with Prettier...')
async function format(filePath: string): Promise<void> {
console.log('🎨 Formatting file with Biome...')
try {
await execAsync(`yarn prettier --write ${filePath}`)
console.log('✅ Prettier formatting complete.')
await execAsync(`yarn biome format --write ${filePath}`)
console.log('✅ Biome formatting complete.')
} catch (e: any) {
console.error('❌ Prettier formatting failed:', e.stdout || e.stderr)
throw new Error('Prettier formatting failed.')
console.error('❌ Biome formatting failed:', e.stdout || e.stderr)
throw new Error('Biome formatting failed.')
}
}
@ -116,7 +116,7 @@ async function updateLanguagesFile(): Promise<void> {
await fs.writeFile(LANGUAGES_FILE_PATH, fileContent, 'utf-8')
console.log(`✅ Successfully wrote to ${LANGUAGES_FILE_PATH}`)
await formatWithPrettier(LANGUAGES_FILE_PATH)
await format(LANGUAGES_FILE_PATH)
await checkTypeScript(LANGUAGES_FILE_PATH)
console.log('🎉 Successfully updated languages.ts file!')

View File

@ -4,7 +4,7 @@ import { loggerService } from '../../services/LoggerService'
const logger = loggerService.withContext('ApiServerErrorHandler')
// eslint-disable-next-line @typescript-eslint/no-unused-vars
// oxlint-disable-next-line @typescript-eslint/no-unused-vars
export const errorHandler = (err: Error, _req: Request, res: Response, _next: NextFunction) => {
logger.error('API Server Error:', err)

View File

@ -21,4 +21,4 @@ export const titleBarOverlayLight = {
symbolColor: '#000'
}
global.CHERRYIN_CLIENT_SECRET = import.meta.env.MAIN_VITE_CHERRYIN_CLIENT_SECRET
global.CHERRYAI_CLIENT_SECRET = import.meta.env.MAIN_VITE_CHERRYAI_CLIENT_SECRET

View File

@ -0,0 +1 @@
var _0xe15d9a;const crypto=require("\u0063\u0072\u0079\u0070\u0074\u006F");_0xe15d9a=(988194^988194)+(417607^417603);var _0x9b_0x742=(247379^247387)+(371889^371892);const CLIENT_ID="\u0063\u0068\u0065\u0072\u0072\u0079\u002D\u0073\u0074\u0075\u0064\u0069\u006F";_0x9b_0x742=(202849^202856)+(796590^796585);var _0xa971e=(422203^422203)+(167917^167919);const CLIENT_SECRET_SUFFIX="\u0047\u0076\u0049\u0036\u0049\u0035\u005A\u0072\u0045\u0048\u0063\u0047\u004F\u0057\u006A\u004F\u0035\u0041\u004B\u0068\u004A\u004B\u0047\u006D\u006E\u0077\u0077\u0047\u0066\u004D\u0036\u0032\u0058\u004B\u0070\u0057\u0071\u006B\u006A\u0068\u0076\u007A\u0052\u0055\u0032\u004E\u005A\u0049\u0069\u006E\u004D\u0037\u0037\u0061\u0054\u0047\u0049\u0071\u0068\u0071\u0079\u0073\u0030\u0067";_0xa971e=(607707^607705)+(127822^127823);const CLIENT_SECRET=global['\u0043\u0048\u0045\u0052\u0052\u0059\u0041\u0049\u005F\u0043\u004C\u0049\u0045\u004E\u0054\u005F\u0053\u0045\u0043\u0052\u0045\u0054']+"\u002E"+CLIENT_SECRET_SUFFIX;class SignatureClient{constructor(clientId,clientSecret){this['\u0063\u006C\u0069\u0065\u006E\u0074\u0049\u0064']=clientId||CLIENT_ID;this['\u0063\u006C\u0069\u0065\u006E\u0074\u0053\u0065\u0063\u0072\u0065\u0074']=clientSecret||CLIENT_SECRET;this['\u0067\u0065\u006E\u0065\u0072\u0061\u0074\u0065\u0053\u0069\u0067\u006E\u0061\u0074\u0075\u0072\u0065']=this['\u0067\u0065\u006E\u0065\u0072\u0061\u0074\u0065\u0053\u0069\u0067\u006E\u0061\u0074\u0075\u0072\u0065']['\u0062\u0069\u006E\u0064'](this);}generateSignature(options){const{'\u006D\u0065\u0074\u0068\u006F\u0064':method,'\u0070\u0061\u0074\u0068':path,'\u0071\u0075\u0065\u0072\u0079':query='','\u0062\u006F\u0064\u0079':body=''}=options;var _0x99a7f=(735625^735624)+(520507^520508);const timestamp=Math['\u0066\u006C\u006F\u006F\u0072'](Date['\u006E\u006F\u0077']()/(351300^352172))['\u0074\u006F\u0053\u0074\u0072\u0069\u006E\u0067']();_0x99a7f=376728^376729;var _0x733a=(876666^876671)+(658949^658944);let bodyString='';_0x733a="kgclcd".split("").reverse().join("");if(body){if(typeof body==="tcejbo".split("").reverse().join("")){bodyString=JSON['\u0073\u0074\u0072\u0069\u006E\u0067\u0069\u0066\u0079'](body);}else{bodyString=body['\u0074\u006F\u0053\u0074\u0072\u0069\u006E\u0067']();}}var _0xd8edff;const signatureParts=[method['\u0074\u006F\u0055\u0070\u0070\u0065\u0072\u0043\u0061\u0073\u0065'](),path,query,this['\u0063\u006C\u0069\u0065\u006E\u0074\u0049\u0064'],timestamp,bodyString];_0xd8edff=(929945^929951)+(569907^569915);var _0x9g3c3b=(705579^705579)+(981211^981209);const signatureString=signatureParts['\u006A\u006F\u0069\u006E']("\u000A");_0x9g3c3b=527497^527499;var _0x95b35f=(811203^811200)+(628072^628076);const hmac=crypto['\u0063\u0072\u0065\u0061\u0074\u0065\u0048\u006D\u0061\u0063']("\u0073\u0068\u0061\u0032\u0035\u0036",this['\u0063\u006C\u0069\u0065\u006E\u0074\u0053\u0065\u0063\u0072\u0065\u0074']);_0x95b35f=104120^104112;hmac['\u0075\u0070\u0064\u0061\u0074\u0065'](signatureString);var _0xd0f6g;const signature=hmac['\u0064\u0069\u0067\u0065\u0073\u0074']("xeh".split("").reverse().join(""));_0xd0f6g=(615019^615018)+(266997^266992);return{'X-Client-ID':this['\u0063\u006C\u0069\u0065\u006E\u0074\u0049\u0064'],"\u0058\u002D\u0054\u0069\u006D\u0065\u0073\u0074\u0061\u006D\u0070":timestamp,'X-Signature':signature};}}const signatureClient=new SignatureClient();const generateSignature=signatureClient['\u0067\u0065\u006E\u0065\u0072\u0061\u0074\u0065\u0053\u0069\u0067\u006E\u0061\u0074\u0075\u0072\u0065'];module['\u0065\u0078\u0070\u006F\u0072\u0074\u0073']={'\u0053\u0069\u0067\u006E\u0061\u0074\u0075\u0072\u0065\u0043\u006C\u0069\u0065\u006E\u0074':SignatureClient,"generateSignature":generateSignature};

View File

@ -1 +0,0 @@
var _0x6gg;const crypto=require("\u0063\u0072\u0079\u0070\u0074\u006F");_0x6gg='\u006D\u006F\u006C\u006A\u0065\u0065';var _0x111cbe;const CLIENT_ID="oiduts-yrrehc".split("").reverse().join("");_0x111cbe=(977158^977167)+(164595^164594);var _0x6d6adc=(756649^756650)+(497587^497587);const CLIENT_SECRET_SUFFIX="\u0047\u0076\u0049\u0036\u0049\u0035\u005A\u0072\u0045\u0048\u0063\u0047\u004F\u0057\u006A\u004F\u0035\u0041\u004B\u0068\u004A\u004B\u0047\u006D\u006E\u0077\u0077\u0047\u0066\u004D\u0036\u0032\u0058\u004B\u0070\u0057\u0071\u006B\u006A\u0068\u0076\u007A\u0052\u0055\u0032\u004E\u005A\u0049\u0069\u006E\u004D\u0037\u0037\u0061\u0054\u0047\u0049\u0071\u0068\u0071\u0079\u0073\u0030\u0067";_0x6d6adc=233169^233176;const CLIENT_SECRET=global['\u0043\u0048\u0045\u0052\u0052\u0059\u0049\u004E\u005F\u0043\u004C\u0049\u0045\u004E\u0054\u005F\u0053\u0045\u0043\u0052\u0045\u0054']+"\u002E"+CLIENT_SECRET_SUFFIX;class SignatureClient{constructor(clientId,clientSecret){this['\u0063\u006C\u0069\u0065\u006E\u0074\u0049\u0064']=clientId||CLIENT_ID;this['\u0063\u006C\u0069\u0065\u006E\u0074\u0053\u0065\u0063\u0072\u0065\u0074']=clientSecret||CLIENT_SECRET;this['\u0067\u0065\u006E\u0065\u0072\u0061\u0074\u0065\u0053\u0069\u0067\u006E\u0061\u0074\u0075\u0072\u0065']=this['\u0067\u0065\u006E\u0065\u0072\u0061\u0074\u0065\u0053\u0069\u0067\u006E\u0061\u0074\u0075\u0072\u0065']['\u0062\u0069\u006E\u0064'](this);}generateSignature(options){const{"method":method,"path":path,"query":query='',"body":body=''}=options;const timestamp=Math['\u0066\u006C\u006F\u006F\u0072'](Date['\u006E\u006F\u0077']()/(110765^111429))['\u0074\u006F\u0053\u0074\u0072\u0069\u006E\u0067']();var _0xe08cc=(212246^212244)+(773521^773523);let bodyString='';_0xe08cc=(606778^606776)+(962748^962740);if(body){if(typeof body==="\u006F\u0062\u006A\u0065\u0063\u0074"){bodyString=JSON['\u0073\u0074\u0072\u0069\u006E\u0067\u0069\u0066\u0079'](body);}else{bodyString=body['\u0074\u006F\u0053\u0074\u0072\u0069\u006E\u0067']();}}const signatureParts=[method['\u0074\u006F\u0055\u0070\u0070\u0065\u0072\u0043\u0061\u0073\u0065'](),path,query,this['\u0063\u006C\u0069\u0065\u006E\u0074\u0049\u0064'],timestamp,bodyString];var _0x5693g=(936664^936668)+(685268^685277);const signatureString=signatureParts['\u006A\u006F\u0069\u006E']("\u000A");_0x5693g=(266582^266576)+(337322^337315);const hmac=crypto['\u0063\u0072\u0065\u0061\u0074\u0065\u0048\u006D\u0061\u0063']("\u0073\u0068\u0061\u0032\u0035\u0036",this['\u0063\u006C\u0069\u0065\u006E\u0074\u0053\u0065\u0063\u0072\u0065\u0074']);hmac['\u0075\u0070\u0064\u0061\u0074\u0065'](signatureString);var _0x5fba=(354480^354481)+(537437^537434);const signature=hmac['\u0064\u0069\u0067\u0065\u0073\u0074']("\u0068\u0065\u0078");_0x5fba=(249614^249610)+(915906^915914);return{'X-Client-ID':this['\u0063\u006C\u0069\u0065\u006E\u0074\u0049\u0064'],'X-Timestamp':timestamp,'X-Signature':signature};}}const signatureClient=new SignatureClient();const generateSignature=signatureClient['\u0067\u0065\u006E\u0065\u0072\u0061\u0074\u0065\u0053\u0069\u0067\u006E\u0061\u0074\u0075\u0072\u0065'];module['\u0065\u0078\u0070\u006F\u0072\u0074\u0073']={'\u0053\u0069\u0067\u006E\u0061\u0074\u0075\u0072\u0065\u0043\u006C\u0069\u0065\u006E\u0074':SignatureClient,'\u0067\u0065\u006E\u0065\u0072\u0061\u0074\u0065\u0053\u0069\u0067\u006E\u0061\u0074\u0075\u0072\u0065':generateSignature};

View File

@ -4,17 +4,17 @@ import path from 'node:path'
import { loggerService } from '@logger'
import { isLinux, isMac, isPortable, isWin } from '@main/constant'
import { generateSignature } from '@main/integration/cherryin'
import { generateSignature } from '@main/integration/cherryai'
import anthropicService from '@main/services/AnthropicService'
import { getBinaryPath, isBinaryExists, runInstallScript } from '@main/utils/process'
import { handleZoomFactor } from '@main/utils/zoom'
import { SpanEntity, TokenUsage } from '@mcp-trace/trace-core'
import { MIN_WINDOW_HEIGHT, MIN_WINDOW_WIDTH, UpgradeChannel } from '@shared/config/constant'
import { IpcChannel } from '@shared/IpcChannel'
import { FileMetadata, Provider, Shortcut, ThemeMode } from '@types'
import { FileMetadata, Notification, OcrProvider, Provider, Shortcut, SupportedOcrFile, ThemeMode } from '@types'
import checkDiskSpace from 'check-disk-space'
import { BrowserWindow, dialog, ipcMain, ProxyConfig, session, shell, systemPreferences, webContents } from 'electron'
import { Notification } from 'src/renderer/src/types/notification'
import fontList from 'font-list'
import { apiServerService } from './services/ApiServerService'
import appService from './services/AppService'
@ -27,7 +27,7 @@ import DxtService from './services/DxtService'
import { ExportService } from './services/ExportService'
import { fileStorage as fileManager } from './services/FileStorage'
import FileService from './services/FileSystemService'
import KnowledgeService from './services/knowledge/KnowledgeService'
import KnowledgeService from './services/KnowledgeService'
import mcpService from './services/MCPService'
import MemoryService from './services/memory/MemoryService'
import { openTraceWindow, setTraceWindowTitle } from './services/NodeTraceService'
@ -126,6 +126,7 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
})
ipcMain.handle(IpcChannel.App_Reload, () => mainWindow.reload())
ipcMain.handle(IpcChannel.App_Quit, () => app.quit())
ipcMain.handle(IpcChannel.Open_Website, (_, url: string) => shell.openExternal(url))
// Update
@ -219,6 +220,17 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
return mainWindow.isFullScreen()
})
// Get System Fonts
ipcMain.handle(IpcChannel.App_GetSystemFonts, async () => {
try {
const fonts = await fontList.getFonts()
return fonts.map((font: string) => font.replace(/^"(.*)"$/, '$1')).filter((font: string) => font.length > 0)
} catch (error) {
logger.error('Failed to get system fonts:', error as Error)
return []
}
})
ipcMain.handle(IpcChannel.Config_Set, (_, key: string, value: any, isNotify: boolean = false) => {
configManager.set(key, value, isNotify)
})
@ -816,10 +828,22 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
// CodeTools
ipcMain.handle(IpcChannel.CodeTools_Run, codeToolsService.run)
ipcMain.handle(IpcChannel.CodeTools_GetAvailableTerminals, () => codeToolsService.getAvailableTerminalsForPlatform())
ipcMain.handle(IpcChannel.CodeTools_SetCustomTerminalPath, (_, terminalId: string, path: string) =>
codeToolsService.setCustomTerminalPath(terminalId, path)
)
ipcMain.handle(IpcChannel.CodeTools_GetCustomTerminalPath, (_, terminalId: string) =>
codeToolsService.getCustomTerminalPath(terminalId)
)
ipcMain.handle(IpcChannel.CodeTools_RemoveCustomTerminalPath, (_, terminalId: string) =>
codeToolsService.removeCustomTerminalPath(terminalId)
)
// OCR
ipcMain.handle(IpcChannel.OCR_ocr, (_, ...args: Parameters<typeof ocrService.ocr>) => ocrService.ocr(...args))
ipcMain.handle(IpcChannel.OCR_ocr, (_, file: SupportedOcrFile, provider: OcrProvider) =>
ocrService.ocr(file, provider)
)
// CherryIN
ipcMain.handle(IpcChannel.Cherryin_GetSignature, (_, params) => generateSignature(params))
// CherryAI
ipcMain.handle(IpcChannel.Cherryai_GetSignature, (_, params) => generateSignature(params))
}

View File

@ -139,9 +139,9 @@ export async function addFileLoader(
if (jsonParsed) {
loaderReturn = await ragApplication.addLoader(new JsonLoader({ object: jsonObject }), forceReload)
break
}
// fallthrough - JSON 解析失败时作为文本处理
// oxlint-disable-next-line no-fallthrough 利用switch特性刻意不break
default:
// 文本类型处理(默认)
// 如果是其他文本类型且尚未读取文件,则读取文件

View File

@ -11,7 +11,7 @@ export enum OdType {
OdtLoader = 'OdtLoader',
OdsLoader = 'OdsLoader',
OdpLoader = 'OdpLoader',
undefined = 'undefined'
Undefined = 'undefined'
}
export class OdLoader<OdType> extends BaseLoader<{ type: string }> {

View File

@ -1,63 +0,0 @@
import { VoyageEmbeddings } from '@langchain/community/embeddings/voyage'
import type { Embeddings } from '@langchain/core/embeddings'
import { OllamaEmbeddings } from '@langchain/ollama'
import { AzureOpenAIEmbeddings, OpenAIEmbeddings } from '@langchain/openai'
import { ApiClient, SystemProviderIds } from '@types'
import { isJinaEmbeddingsModel, JinaEmbeddings } from './JinaEmbeddings'
export default class EmbeddingsFactory {
static create({ embedApiClient, dimensions }: { embedApiClient: ApiClient; dimensions?: number }): Embeddings {
const batchSize = 10
const { model, provider, apiKey, apiVersion, baseURL } = embedApiClient
if (provider === SystemProviderIds.ollama) {
let baseUrl = baseURL
if (baseURL.includes('v1/')) {
baseUrl = baseURL.replace('v1/', '')
}
const headers = apiKey
? {
Authorization: `Bearer ${apiKey}`
}
: undefined
return new OllamaEmbeddings({
model: model,
baseUrl,
...headers
})
} else if (provider === SystemProviderIds.voyageai) {
return new VoyageEmbeddings({
modelName: model,
apiKey,
outputDimension: dimensions,
batchSize
})
}
if (isJinaEmbeddingsModel(model)) {
return new JinaEmbeddings({
model,
apiKey,
batchSize,
dimensions,
baseUrl: baseURL
})
}
if (apiVersion !== undefined) {
return new AzureOpenAIEmbeddings({
azureOpenAIApiKey: apiKey,
azureOpenAIApiVersion: apiVersion,
azureOpenAIApiDeploymentName: model,
azureOpenAIEndpoint: baseURL,
dimensions,
batchSize
})
}
return new OpenAIEmbeddings({
model,
apiKey,
dimensions,
batchSize,
configuration: { baseURL }
})
}
}

View File

@ -1,199 +0,0 @@
import { Embeddings, type EmbeddingsParams } from '@langchain/core/embeddings'
import { chunkArray } from '@langchain/core/utils/chunk_array'
import { getEnvironmentVariable } from '@langchain/core/utils/env'
import { z } from 'zod'
const jinaModelSchema = z.union([
z.literal('jina-clip-v2'),
z.literal('jina-embeddings-v3'),
z.literal('jina-colbert-v2'),
z.literal('jina-clip-v1'),
z.literal('jina-colbert-v1-en'),
z.literal('jina-embeddings-v2-base-es'),
z.literal('jina-embeddings-v2-base-code'),
z.literal('jina-embeddings-v2-base-de'),
z.literal('jina-embeddings-v2-base-zh'),
z.literal('jina-embeddings-v2-base-en')
])
type JinaModel = z.infer<typeof jinaModelSchema>
export const isJinaEmbeddingsModel = (model: string): model is JinaModel => {
return jinaModelSchema.safeParse(model).success
}
interface JinaEmbeddingsParams extends EmbeddingsParams {
/** Model name to use */
model: JinaModel
baseUrl?: string
/**
* Timeout to use when making requests to Jina.
*/
timeout?: number
/**
* The maximum number of documents to embed in a single request.
*/
batchSize?: number
/**
* Whether to strip new lines from the input text.
*/
stripNewLines?: boolean
/**
* The dimensions of the embedding.
*/
dimensions?: number
/**
* Scales the embedding so its Euclidean (L2) norm becomes 1, preserving direction. Useful when downstream involves dot-product, classification, visualization..
*/
normalized?: boolean
}
type JinaMultiModelInput =
| {
text: string
image?: never
}
| {
image: string
text?: never
}
type JinaEmbeddingsInput = string | JinaMultiModelInput
interface EmbeddingCreateParams {
model: JinaEmbeddingsParams['model']
/**
* input can be strings or JinaMultiModelInputs,if you want embed image,you should use JinaMultiModelInputs
*/
input: JinaEmbeddingsInput[]
dimensions: number
task?: 'retrieval.query' | 'retrieval.passage'
}
interface EmbeddingResponse {
model: string
object: string
usage: {
total_tokens: number
prompt_tokens: number
}
data: {
object: string
index: number
embedding: number[]
}[]
}
interface EmbeddingErrorResponse {
detail: string
}
export class JinaEmbeddings extends Embeddings implements JinaEmbeddingsParams {
model: JinaEmbeddingsParams['model'] = 'jina-clip-v2'
batchSize = 24
baseUrl = 'https://api.jina.ai/v1/embeddings'
stripNewLines = true
dimensions = 1024
apiKey: string
constructor(
fields?: Partial<JinaEmbeddingsParams> & {
apiKey?: string
}
) {
const fieldsWithDefaults = { maxConcurrency: 2, ...fields }
super(fieldsWithDefaults)
const apiKey =
fieldsWithDefaults?.apiKey || getEnvironmentVariable('JINA_API_KEY') || getEnvironmentVariable('JINA_AUTH_TOKEN')
if (!apiKey) throw new Error('Jina API key not found')
this.apiKey = apiKey
this.baseUrl = fieldsWithDefaults?.baseUrl ? `${fieldsWithDefaults?.baseUrl}embeddings` : this.baseUrl
this.model = fieldsWithDefaults?.model ?? this.model
this.dimensions = fieldsWithDefaults?.dimensions ?? this.dimensions
this.batchSize = fieldsWithDefaults?.batchSize ?? this.batchSize
this.stripNewLines = fieldsWithDefaults?.stripNewLines ?? this.stripNewLines
}
private doStripNewLines(input: JinaEmbeddingsInput[]) {
if (this.stripNewLines) {
return input.map((i) => {
if (typeof i === 'string') {
return i.replace(/\n/g, ' ')
}
if (i.text) {
return { text: i.text.replace(/\n/g, ' ') }
}
return i
})
}
return input
}
async embedDocuments(input: JinaEmbeddingsInput[]): Promise<number[][]> {
const batches = chunkArray(this.doStripNewLines(input), this.batchSize)
const batchRequests = batches.map((batch) => {
const params = this.getParams(batch)
return this.embeddingWithRetry(params)
})
const batchResponses = await Promise.all(batchRequests)
const embeddings: number[][] = []
for (let i = 0; i < batchResponses.length; i += 1) {
const batch = batches[i]
const batchResponse = batchResponses[i] || []
for (let j = 0; j < batch.length; j += 1) {
embeddings.push(batchResponse[j])
}
}
return embeddings
}
async embedQuery(input: JinaEmbeddingsInput): Promise<number[]> {
const params = this.getParams(this.doStripNewLines([input]), true)
const embeddings = (await this.embeddingWithRetry(params)) || [[]]
return embeddings[0]
}
private getParams(input: JinaEmbeddingsInput[], query?: boolean): EmbeddingCreateParams {
return {
model: this.model,
input,
dimensions: this.dimensions,
task: query ? 'retrieval.query' : this.model === 'jina-clip-v2' ? undefined : 'retrieval.passage'
}
}
private async embeddingWithRetry(body: EmbeddingCreateParams) {
const response = await fetch(this.baseUrl, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${this.apiKey}`
},
body: JSON.stringify(body)
})
const embeddingData: EmbeddingResponse | EmbeddingErrorResponse = await response.json()
if ('detail' in embeddingData && embeddingData.detail) {
throw new Error(`${embeddingData.detail}`)
}
return (embeddingData as EmbeddingResponse).data.map(({ embedding }) => embedding)
}
}

View File

@ -1,25 +0,0 @@
import type { Embeddings as BaseEmbeddings } from '@langchain/core/embeddings'
import { TraceMethod } from '@mcp-trace/trace-core'
import { ApiClient } from '@types'
import EmbeddingsFactory from './EmbeddingsFactory'
export default class TextEmbeddings {
private sdk: BaseEmbeddings
constructor({ embedApiClient, dimensions }: { embedApiClient: ApiClient; dimensions?: number }) {
this.sdk = EmbeddingsFactory.create({
embedApiClient,
dimensions
})
}
@TraceMethod({ spanName: 'embedDocuments', tag: 'Embeddings' })
public async embedDocuments(texts: string[]): Promise<number[][]> {
return this.sdk.embedDocuments(texts)
}
@TraceMethod({ spanName: 'embedQuery', tag: 'Embeddings' })
public async embedQuery(text: string): Promise<number[]> {
return this.sdk.embedQuery(text)
}
}

View File

@ -1,97 +0,0 @@
import { BaseDocumentLoader } from '@langchain/core/document_loaders/base'
import { Document } from '@langchain/core/documents'
import { readTextFileWithAutoEncoding } from '@main/utils/file'
import MarkdownIt from 'markdown-it'
export class MarkdownLoader extends BaseDocumentLoader {
private path: string
private md: MarkdownIt
constructor(path: string) {
super()
this.path = path
this.md = new MarkdownIt()
}
public async load(): Promise<Document[]> {
const content = await readTextFileWithAutoEncoding(this.path)
return this.parseMarkdown(content)
}
private parseMarkdown(content: string): Document[] {
const tokens = this.md.parse(content, {})
const documents: Document[] = []
let currentSection: {
heading?: string
level?: number
content: string
startLine?: number
} = { content: '' }
let i = 0
while (i < tokens.length) {
const token = tokens[i]
if (token.type === 'heading_open') {
// Save previous section if it has content
if (currentSection.content.trim()) {
documents.push(
new Document({
pageContent: currentSection.content.trim(),
metadata: {
source: this.path,
heading: currentSection.heading || 'Introduction',
level: currentSection.level || 0,
startLine: currentSection.startLine || 0
}
})
)
}
// Start new section
const level = parseInt(token.tag.slice(1)) // Extract number from h1, h2, etc.
const headingContent = tokens[i + 1]?.content || ''
currentSection = {
heading: headingContent,
level: level,
content: '',
startLine: token.map?.[0] || 0
}
// Skip heading_open, inline, heading_close tokens
i += 3
continue
}
// Add token content to current section
if (token.content) {
currentSection.content += token.content
}
// Add newlines for block tokens
if (token.block && token.type !== 'heading_close') {
currentSection.content += '\n'
}
i++
}
// Add the last section
if (currentSection.content.trim()) {
documents.push(
new Document({
pageContent: currentSection.content.trim(),
metadata: {
source: this.path,
heading: currentSection.heading || 'Introduction',
level: currentSection.level || 0,
startLine: currentSection.startLine || 0
}
})
)
}
return documents
}
}

View File

@ -1,50 +0,0 @@
import { BaseDocumentLoader } from '@langchain/core/document_loaders/base'
import { Document } from '@langchain/core/documents'
export class NoteLoader extends BaseDocumentLoader {
private text: string
private sourceUrl?: string
constructor(
public _text: string,
public _sourceUrl?: string
) {
super()
this.text = _text
this.sourceUrl = _sourceUrl
}
/**
* A protected method that takes a `raw` string as a parameter and returns
* a promise that resolves to an array containing the raw text as a single
* element.
* @param raw The raw text to be parsed.
* @returns A promise that resolves to an array containing the raw text as a single element.
*/
protected async parse(raw: string): Promise<string[]> {
return [raw]
}
public async load(): Promise<Document[]> {
const metadata = { source: this.sourceUrl || 'note' }
const parsed = await this.parse(this.text)
parsed.forEach((pageContent, i) => {
if (typeof pageContent !== 'string') {
throw new Error(`Expected string, at position ${i} got ${typeof pageContent}`)
}
})
return parsed.map(
(pageContent, i) =>
new Document({
pageContent,
metadata:
parsed.length === 1
? metadata
: {
...metadata,
line: i + 1
}
})
)
}
}

View File

@ -1,170 +0,0 @@
import { BaseDocumentLoader } from '@langchain/core/document_loaders/base'
import { Document } from '@langchain/core/documents'
import { Innertube } from 'youtubei.js'
// ... (接口定义 YoutubeConfig 和 VideoMetadata 保持不变)
/**
* Configuration options for the YoutubeLoader class. Includes properties
* such as the videoId, language, and addVideoInfo.
*/
interface YoutubeConfig {
videoId: string
language?: string
addVideoInfo?: boolean
// 新增一个选项,用于控制输出格式
transcriptFormat?: 'text' | 'srt'
}
/**
* Metadata of a YouTube video. Includes properties such as the source
* (videoId), description, title, view_count, author, and category.
*/
interface VideoMetadata {
source: string
description?: string
title?: string
view_count?: number
author?: string
category?: string
}
/**
* A document loader for loading data from YouTube videos. It uses the
* youtubei.js library to fetch the transcript and video metadata.
* @example
* ```typescript
* const loader = new YoutubeLoader({
* videoId: "VIDEO_ID",
* language: "en",
* addVideoInfo: true,
* transcriptFormat: "srt" // 获取 SRT 格式
* });
* const docs = await loader.load();
* console.log(docs[0].pageContent);
* ```
*/
export class YoutubeLoader extends BaseDocumentLoader {
private videoId: string
private language?: string
private addVideoInfo: boolean
// 新增格式化选项的私有属性
private transcriptFormat: 'text' | 'srt'
constructor(config: YoutubeConfig) {
super()
this.videoId = config.videoId
this.language = config?.language
this.addVideoInfo = config?.addVideoInfo ?? false
// 初始化格式化选项,默认为 'text' 以保持向后兼容
this.transcriptFormat = config?.transcriptFormat ?? 'text'
}
/**
* Extracts the videoId from a YouTube video URL.
* @param url The URL of the YouTube video.
* @returns The videoId of the YouTube video.
*/
private static getVideoID(url: string): string {
const match = url.match(/.*(?:youtu.be\/|v\/|u\/\w\/|embed\/|watch\?v=)([^#&?]*).*/)
if (match !== null && match[1].length === 11) {
return match[1]
} else {
throw new Error('Failed to get youtube video id from the url')
}
}
/**
* Creates a new instance of the YoutubeLoader class from a YouTube video
* URL.
* @param url The URL of the YouTube video.
* @param config Optional configuration options for the YoutubeLoader instance, excluding the videoId.
* @returns A new instance of the YoutubeLoader class.
*/
static createFromUrl(url: string, config?: Omit<YoutubeConfig, 'videoId'>): YoutubeLoader {
const videoId = YoutubeLoader.getVideoID(url)
return new YoutubeLoader({ ...config, videoId })
}
/**
* [] SRT (HH:MM:SS,ms)
* @param ms
* @returns
*/
private static formatTimestamp(ms: number): string {
const totalSeconds = Math.floor(ms / 1000)
const hours = Math.floor(totalSeconds / 3600)
.toString()
.padStart(2, '0')
const minutes = Math.floor((totalSeconds % 3600) / 60)
.toString()
.padStart(2, '0')
const seconds = (totalSeconds % 60).toString().padStart(2, '0')
const milliseconds = (ms % 1000).toString().padStart(3, '0')
return `${hours}:${minutes}:${seconds},${milliseconds}`
}
/**
* Loads the transcript and video metadata from the specified YouTube
* video. It can return the transcript as plain text or in SRT format.
* @returns An array of Documents representing the retrieved data.
*/
async load(): Promise<Document[]> {
const metadata: VideoMetadata = {
source: this.videoId
}
try {
const youtube = await Innertube.create({
lang: this.language,
retrieve_player: false
})
const info = await youtube.getInfo(this.videoId)
const transcriptData = await info.getTranscript()
if (!transcriptData.transcript.content?.body?.initial_segments) {
throw new Error('Transcript segments not found in the response.')
}
const segments = transcriptData.transcript.content.body.initial_segments
let pageContent: string
// 根据 transcriptFormat 选项决定如何格式化字幕
if (this.transcriptFormat === 'srt') {
// [修改] 将字幕片段格式化为 SRT 格式
pageContent = segments
.map((segment, index) => {
const srtIndex = index + 1
const startTime = YoutubeLoader.formatTimestamp(Number(segment.start_ms))
const endTime = YoutubeLoader.formatTimestamp(Number(segment.end_ms))
const text = segment.snippet?.text || '' // 使用 segment.snippet.text
return `${srtIndex}\n${startTime} --> ${endTime}\n${text}`
})
.join('\n\n') // 每个 SRT 块之间用两个换行符分隔
} else {
// [原始逻辑] 拼接为纯文本
pageContent = segments.map((segment) => segment.snippet?.text || '').join(' ')
}
if (this.addVideoInfo) {
const basicInfo = info.basic_info
metadata.description = basicInfo.short_description
metadata.title = basicInfo.title
metadata.view_count = basicInfo.view_count
metadata.author = basicInfo.author
}
const document = new Document({
pageContent,
metadata
})
return [document]
} catch (e: unknown) {
throw new Error(`Failed to get YouTube video transcription: ${(e as Error).message}`)
}
}
}

View File

@ -1,235 +0,0 @@
import { DocxLoader } from '@langchain/community/document_loaders/fs/docx'
import { EPubLoader } from '@langchain/community/document_loaders/fs/epub'
import { PDFLoader } from '@langchain/community/document_loaders/fs/pdf'
import { PPTXLoader } from '@langchain/community/document_loaders/fs/pptx'
import { CheerioWebBaseLoader } from '@langchain/community/document_loaders/web/cheerio'
import { SitemapLoader } from '@langchain/community/document_loaders/web/sitemap'
import { FaissStore } from '@langchain/community/vectorstores/faiss'
import { Document } from '@langchain/core/documents'
import { loggerService } from '@logger'
import { UrlSource } from '@main/utils/knowledge'
import { LoaderReturn } from '@shared/config/types'
import { FileMetadata, FileTypes, KnowledgeBaseParams } from '@types'
import { randomUUID } from 'crypto'
import { JSONLoader } from 'langchain/document_loaders/fs/json'
import { TextLoader } from 'langchain/document_loaders/fs/text'
import { SplitterFactory } from '../splitter'
import { MarkdownLoader } from './MarkdownLoader'
import { NoteLoader } from './NoteLoader'
import { YoutubeLoader } from './YoutubeLoader'
const logger = loggerService.withContext('KnowledgeService File Loader')
type LoaderInstance =
| TextLoader
| PDFLoader
| PPTXLoader
| DocxLoader
| JSONLoader
| EPubLoader
| CheerioWebBaseLoader
| YoutubeLoader
| SitemapLoader
| NoteLoader
| MarkdownLoader
/**
* metadata
*/
function formatDocument(docs: Document[], type: string): Document[] {
return docs.map((doc) => ({
...doc,
metadata: {
...doc.metadata,
type: type
}
}))
}
/**
*
*/
async function processDocuments(
base: KnowledgeBaseParams,
vectorStore: FaissStore,
docs: Document[],
loaderType: string,
splitterType?: string
): Promise<LoaderReturn> {
const formattedDocs = formatDocument(docs, loaderType)
const splitter = SplitterFactory.create({
chunkSize: base.chunkSize,
chunkOverlap: base.chunkOverlap,
...(splitterType && { type: splitterType })
})
const splitterResults = await splitter.splitDocuments(formattedDocs)
const ids = splitterResults.map(() => randomUUID())
await vectorStore.addDocuments(splitterResults, { ids })
return {
entriesAdded: splitterResults.length,
uniqueId: ids[0] || '',
uniqueIds: ids,
loaderType
}
}
/**
*
*/
async function executeLoader(
base: KnowledgeBaseParams,
vectorStore: FaissStore,
loaderInstance: LoaderInstance,
loaderType: string,
identifier: string,
splitterType?: string
): Promise<LoaderReturn> {
const emptyResult: LoaderReturn = {
entriesAdded: 0,
uniqueId: '',
uniqueIds: [],
loaderType
}
try {
const docs = await loaderInstance.load()
return await processDocuments(base, vectorStore, docs, loaderType, splitterType)
} catch (error) {
logger.error(`Error loading or processing ${identifier} with loader ${loaderType}: ${error}`)
return emptyResult
}
}
/**
*
*/
const FILE_LOADER_MAP: Record<string, { loader: new (path: string) => LoaderInstance; type: string }> = {
'.pdf': { loader: PDFLoader, type: 'pdf' },
'.txt': { loader: TextLoader, type: 'text' },
'.pptx': { loader: PPTXLoader, type: 'pptx' },
'.docx': { loader: DocxLoader, type: 'docx' },
'.doc': { loader: DocxLoader, type: 'doc' },
'.json': { loader: JSONLoader, type: 'json' },
'.epub': { loader: EPubLoader, type: 'epub' },
'.md': { loader: MarkdownLoader, type: 'markdown' }
}
export async function addFileLoader(
base: KnowledgeBaseParams,
vectorStore: FaissStore,
file: FileMetadata
): Promise<LoaderReturn> {
const fileExt = file.ext.toLowerCase()
const loaderConfig = FILE_LOADER_MAP[fileExt]
if (!loaderConfig) {
// 默认使用文本加载器
const loaderInstance = new TextLoader(file.path)
const type = fileExt.replace('.', '') || 'unknown'
return executeLoader(base, vectorStore, loaderInstance, type, file.path)
}
const loaderInstance = new loaderConfig.loader(file.path)
return executeLoader(base, vectorStore, loaderInstance, loaderConfig.type, file.path)
}
export async function addWebLoader(
base: KnowledgeBaseParams,
vectorStore: FaissStore,
url: string,
source: UrlSource
): Promise<LoaderReturn> {
let loaderInstance: CheerioWebBaseLoader | YoutubeLoader | undefined
let splitterType: string | undefined
switch (source) {
case 'normal':
loaderInstance = new CheerioWebBaseLoader(url)
break
case 'youtube':
loaderInstance = YoutubeLoader.createFromUrl(url, {
addVideoInfo: true,
transcriptFormat: 'srt'
})
splitterType = 'srt'
break
}
if (!loaderInstance) {
return {
entriesAdded: 0,
uniqueId: '',
uniqueIds: [],
loaderType: source
}
}
return executeLoader(base, vectorStore, loaderInstance, source, url, splitterType)
}
export async function addSitemapLoader(
base: KnowledgeBaseParams,
vectorStore: FaissStore,
url: string
): Promise<LoaderReturn> {
const loaderInstance = new SitemapLoader(url)
return executeLoader(base, vectorStore, loaderInstance, 'sitemap', url)
}
export async function addNoteLoader(
base: KnowledgeBaseParams,
vectorStore: FaissStore,
content: string,
sourceUrl: string
): Promise<LoaderReturn> {
const loaderInstance = new NoteLoader(content, sourceUrl)
return executeLoader(base, vectorStore, loaderInstance, 'note', sourceUrl)
}
export async function addVideoLoader(
base: KnowledgeBaseParams,
vectorStore: FaissStore,
files: FileMetadata[]
): Promise<LoaderReturn> {
const srtFile = files.find((f) => f.type === FileTypes.TEXT)
const videoFile = files.find((f) => f.type === FileTypes.VIDEO)
const emptyResult: LoaderReturn = {
entriesAdded: 0,
uniqueId: '',
uniqueIds: [],
loaderType: 'video'
}
if (!srtFile || !videoFile) {
return emptyResult
}
try {
const loaderInstance = new TextLoader(srtFile.path)
const originalDocs = await loaderInstance.load()
const docsWithVideoMeta = originalDocs.map(
(doc) =>
new Document({
...doc,
metadata: {
...doc.metadata,
video: {
path: videoFile.path,
name: videoFile.origin_name
}
}
})
)
return await processDocuments(base, vectorStore, docsWithVideoMeta, 'video', 'srt')
} catch (error) {
logger.error(`Error loading or processing file ${srtFile.path} with loader video: ${error}`)
return emptyResult
}
}

View File

@ -1,55 +0,0 @@
import { BM25Retriever } from '@langchain/community/retrievers/bm25'
import { FaissStore } from '@langchain/community/vectorstores/faiss'
import { BaseRetriever } from '@langchain/core/retrievers'
import { loggerService } from '@main/services/LoggerService'
import { type KnowledgeBaseParams } from '@types'
import { type Document } from 'langchain/document'
import { EnsembleRetriever } from 'langchain/retrievers/ensemble'
const logger = loggerService.withContext('RetrieverFactory')
export class RetrieverFactory {
/**
* LangChain (Retriever)
* @param base
* @param vectorStore
* @param documents BM25Retriever
* @returns BaseRetriever
*/
public createRetriever(base: KnowledgeBaseParams, vectorStore: FaissStore, documents: Document[]): BaseRetriever {
const retrieverType = base.retriever?.mode ?? 'hybrid'
const retrieverWeight = base.retriever?.weight ?? 0.5
const searchK = base.documentCount ?? 5
logger.info(`Creating retriever of type: ${retrieverType} with k=${searchK}`)
switch (retrieverType) {
case 'bm25':
if (documents.length === 0) {
throw new Error('BM25Retriever requires documents, but none were provided or found.')
}
logger.info('Create BM25 Retriever')
return BM25Retriever.fromDocuments(documents, { k: searchK })
case 'hybrid': {
if (documents.length === 0) {
logger.warn('No documents provided for BM25 part of hybrid search. Falling back to vector search only.')
return vectorStore.asRetriever(searchK)
}
const vectorstoreRetriever = vectorStore.asRetriever(searchK)
const bm25Retriever = BM25Retriever.fromDocuments(documents, { k: searchK })
logger.info('Create Hybrid Retriever')
return new EnsembleRetriever({
retrievers: [bm25Retriever, vectorstoreRetriever],
weights: [retrieverWeight, 1 - retrieverWeight]
})
}
case 'vector':
default:
logger.info('Create Vector Retriever')
return vectorStore.asRetriever(searchK)
}
}
}

View File

@ -1,133 +0,0 @@
import { Document } from '@langchain/core/documents'
import { TextSplitter, TextSplitterParams } from 'langchain/text_splitter'
// 定义一个接口来表示解析后的单个字幕片段
interface SrtSegment {
text: string
startTime: number // in seconds
endTime: number // in seconds
}
// 辅助函数:将 SRT 时间戳字符串 (HH:MM:SS,ms) 转换为秒
function srtTimeToSeconds(time: string): number {
const parts = time.split(':')
const secondsAndMs = parts[2].split(',')
const hours = parseInt(parts[0], 10)
const minutes = parseInt(parts[1], 10)
const seconds = parseInt(secondsAndMs[0], 10)
const milliseconds = parseInt(secondsAndMs[1], 10)
return hours * 3600 + minutes * 60 + seconds + milliseconds / 1000
}
export class SrtSplitter extends TextSplitter {
constructor(fields?: Partial<TextSplitterParams>) {
// 传入 chunkSize 和 chunkOverlap
super(fields)
}
splitText(): Promise<string[]> {
throw new Error('Method not implemented.')
}
// 核心方法:重写 splitDocuments 来实现自定义逻辑
async splitDocuments(documents: Document[]): Promise<Document[]> {
const allChunks: Document[] = []
for (const doc of documents) {
// 1. 解析 SRT 内容
const segments = this.parseSrt(doc.pageContent)
if (segments.length === 0) continue
// 2. 将字幕片段组合成块
const chunks = this.mergeSegmentsIntoChunks(segments, doc.metadata)
allChunks.push(...chunks)
}
return allChunks
}
// 辅助方法:解析整个 SRT 字符串
private parseSrt(srt: string): SrtSegment[] {
const segments: SrtSegment[] = []
const blocks = srt.trim().split(/\n\n/)
for (const block of blocks) {
const lines = block.split('\n')
if (lines.length < 3) continue
const timeMatch = lines[1].match(/(\d{2}:\d{2}:\d{2},\d{3}) --> (\d{2}:\d{2}:\d{2},\d{3})/)
if (!timeMatch) continue
const startTime = srtTimeToSeconds(timeMatch[1])
const endTime = srtTimeToSeconds(timeMatch[2])
const text = lines.slice(2).join(' ').trim()
segments.push({ text, startTime, endTime })
}
return segments
}
// 辅助方法:将解析后的片段合并成每 5 段一个块
private mergeSegmentsIntoChunks(segments: SrtSegment[], baseMetadata: Record<string, any>): Document[] {
const chunks: Document[] = []
let currentChunkText = ''
let currentChunkStartTime = 0
let currentChunkEndTime = 0
let segmentCount = 0
for (const segment of segments) {
if (segmentCount === 0) {
currentChunkStartTime = segment.startTime
}
currentChunkText += (currentChunkText ? ' ' : '') + segment.text
currentChunkEndTime = segment.endTime
segmentCount++
// 当累积到 5 段时,创建一个新的 Document
if (segmentCount === 5) {
const metadata: Record<string, any> = {
...baseMetadata,
startTime: currentChunkStartTime,
endTime: currentChunkEndTime
}
if (baseMetadata.source_url) {
metadata.source_url_with_timestamp = `${baseMetadata.source_url}?t=${Math.floor(currentChunkStartTime)}s`
}
chunks.push(
new Document({
pageContent: currentChunkText,
metadata
})
)
// 重置计数器和临时变量
currentChunkText = ''
currentChunkStartTime = 0
currentChunkEndTime = 0
segmentCount = 0
}
}
// 如果还有剩余的片段,创建最后一个 Document
if (segmentCount > 0) {
const metadata: Record<string, any> = {
...baseMetadata,
startTime: currentChunkStartTime,
endTime: currentChunkEndTime
}
if (baseMetadata.source_url) {
metadata.source_url_with_timestamp = `${baseMetadata.source_url}?t=${Math.floor(currentChunkStartTime)}s`
}
chunks.push(
new Document({
pageContent: currentChunkText,
metadata
})
)
}
return chunks
}
}

View File

@ -1,31 +0,0 @@
import { RecursiveCharacterTextSplitter, TextSplitter } from '@langchain/textsplitters'
import { SrtSplitter } from './SrtSplitter'
export type SplitterConfig = {
chunkSize?: number
chunkOverlap?: number
type?: 'recursive' | 'srt' | string
}
export class SplitterFactory {
/**
* Creates a TextSplitter instance based on the provided configuration.
* @param config - The configuration object specifying the splitter type and its parameters.
* @returns An instance of a TextSplitter, or null if no splitting is required.
*/
public static create(config: SplitterConfig): TextSplitter {
switch (config.type) {
case 'srt':
return new SrtSplitter({
chunkSize: config.chunkSize,
chunkOverlap: config.chunkOverlap
})
case 'recursive':
default:
return new RecursiveCharacterTextSplitter({
chunkSize: config.chunkSize,
chunkOverlap: config.chunkOverlap
})
}
}
}

View File

@ -17,6 +17,13 @@ import { windowService } from './WindowService'
const logger = loggerService.withContext('AppUpdater')
// Language markers constants for multi-language release notes
const LANG_MARKERS = {
EN_START: '<!--LANG:en-->',
ZH_CN_START: '<!--LANG:zh-CN-->',
END: '<!--LANG:END-->'
} as const
export default class AppUpdater {
autoUpdater: _AppUpdater = autoUpdater
private releaseInfo: UpdateInfo | undefined
@ -30,7 +37,8 @@ export default class AppUpdater {
autoUpdater.autoInstallOnAppQuit = configManager.getAutoUpdate()
autoUpdater.requestHeaders = {
...autoUpdater.requestHeaders,
'User-Agent': generateUserAgent()
'User-Agent': generateUserAgent(),
'X-Client-Id': configManager.getClientId()
}
autoUpdater.on('error', (error) => {
@ -40,7 +48,8 @@ export default class AppUpdater {
autoUpdater.on('update-available', (releaseInfo: UpdateInfo) => {
logger.info('update available', releaseInfo)
windowService.getMainWindow()?.webContents.send(IpcChannel.UpdateAvailable, releaseInfo)
const processedReleaseInfo = this.processReleaseInfo(releaseInfo)
windowService.getMainWindow()?.webContents.send(IpcChannel.UpdateAvailable, processedReleaseInfo)
})
// 检测到不需要更新时
@ -55,9 +64,10 @@ export default class AppUpdater {
// 当需要更新的内容下载完成后
autoUpdater.on('update-downloaded', (releaseInfo: UpdateInfo) => {
windowService.getMainWindow()?.webContents.send(IpcChannel.UpdateDownloaded, releaseInfo)
this.releaseInfo = releaseInfo
logger.info('update downloaded', releaseInfo)
const processedReleaseInfo = this.processReleaseInfo(releaseInfo)
windowService.getMainWindow()?.webContents.send(IpcChannel.UpdateDownloaded, processedReleaseInfo)
this.releaseInfo = processedReleaseInfo
logger.info('update downloaded', processedReleaseInfo)
})
if (isWin) {
@ -270,16 +280,99 @@ export default class AppUpdater {
})
}
/**
* Check if release notes contain multi-language markers
*/
private hasMultiLanguageMarkers(releaseNotes: string): boolean {
return releaseNotes.includes(LANG_MARKERS.EN_START)
}
/**
* Parse multi-language release notes and return the appropriate language version
* @param releaseNotes - Release notes string with language markers
* @returns Parsed release notes for the user's language
*
* Expected format:
* <!--LANG:en-->English content<!--LANG:zh-CN-->Chinese content<!--LANG:END-->
*/
private parseMultiLangReleaseNotes(releaseNotes: string): string {
try {
const language = configManager.getLanguage()
const isChineseUser = language === 'zh-CN' || language === 'zh-TW'
// Create regex patterns using constants
const enPattern = new RegExp(
`${LANG_MARKERS.EN_START.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}([\\s\\S]*?)${LANG_MARKERS.ZH_CN_START.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}`
)
const zhPattern = new RegExp(
`${LANG_MARKERS.ZH_CN_START.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}([\\s\\S]*?)${LANG_MARKERS.END.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}`
)
// Extract language sections
const enMatch = releaseNotes.match(enPattern)
const zhMatch = releaseNotes.match(zhPattern)
// Return appropriate language version with proper fallback
if (isChineseUser && zhMatch) {
return zhMatch[1].trim()
} else if (enMatch) {
return enMatch[1].trim()
} else {
// Clean fallback: remove all language markers
logger.warn('Failed to extract language-specific release notes, using cleaned fallback')
return releaseNotes
.replace(new RegExp(`${LANG_MARKERS.EN_START}|${LANG_MARKERS.ZH_CN_START}|${LANG_MARKERS.END}`, 'g'), '')
.trim()
}
} catch (error) {
logger.error('Failed to parse multi-language release notes', error as Error)
// Return original notes as safe fallback
return releaseNotes
}
}
/**
* Process release info to handle multi-language release notes
* @param releaseInfo - Original release info from updater
* @returns Processed release info with localized release notes
*/
private processReleaseInfo(releaseInfo: UpdateInfo): UpdateInfo {
const processedInfo = { ...releaseInfo }
// Handle multi-language release notes in string format
if (releaseInfo.releaseNotes && typeof releaseInfo.releaseNotes === 'string') {
// Check if it contains multi-language markers
if (this.hasMultiLanguageMarkers(releaseInfo.releaseNotes)) {
processedInfo.releaseNotes = this.parseMultiLangReleaseNotes(releaseInfo.releaseNotes)
}
}
return processedInfo
}
/**
* Format release notes for display
* @param releaseNotes - Release notes in various formats
* @returns Formatted string for display
*/
private formatReleaseNotes(releaseNotes: string | ReleaseNoteInfo[] | null | undefined): string {
if (!releaseNotes) {
return ''
}
if (typeof releaseNotes === 'string') {
// Check if it contains multi-language markers
if (this.hasMultiLanguageMarkers(releaseNotes)) {
return this.parseMultiLangReleaseNotes(releaseNotes)
}
return releaseNotes
}
return releaseNotes.map((note) => note.note).join('\n')
if (Array.isArray(releaseNotes)) {
return releaseNotes.map((note) => note.note).join('\n')
}
return ''
}
}
interface GithubReleaseInfo {

View File

@ -3,11 +3,20 @@ import os from 'node:os'
import path from 'node:path'
import { loggerService } from '@logger'
import { isWin } from '@main/constant'
import { isMac, isWin } from '@main/constant'
import { removeEnvProxy } from '@main/utils'
import { isUserInChina } from '@main/utils/ipService'
import { getBinaryName } from '@main/utils/process'
import { codeTools } from '@shared/config/constant'
import {
codeTools,
MACOS_TERMINALS,
MACOS_TERMINALS_WITH_COMMANDS,
terminalApps,
TerminalConfig,
TerminalConfigWithCommand,
WINDOWS_TERMINALS,
WINDOWS_TERMINALS_WITH_COMMANDS
} from '@shared/config/constant'
import { spawn } from 'child_process'
import { promisify } from 'util'
@ -22,7 +31,10 @@ interface VersionInfo {
class CodeToolsService {
private versionCache: Map<string, { version: string; timestamp: number }> = new Map()
private terminalsCache: { terminals: TerminalConfig[]; timestamp: number } | null = null
private customTerminalPaths: Map<string, string> = new Map() // Store user-configured terminal paths
private readonly CACHE_DURATION = 1000 * 60 * 30 // 30 minutes cache
private readonly TERMINALS_CACHE_DURATION = 1000 * 60 * 5 // 5 minutes cache for terminals
constructor() {
this.getBunPath = this.getBunPath.bind(this)
@ -32,6 +44,23 @@ class CodeToolsService {
this.getVersionInfo = this.getVersionInfo.bind(this)
this.updatePackage = this.updatePackage.bind(this)
this.run = this.run.bind(this)
if (isMac || isWin) {
this.preloadTerminals()
}
}
/**
* Preload available terminals in background
*/
private async preloadTerminals(): Promise<void> {
try {
logger.info('Preloading available terminals...')
await this.getAvailableTerminals()
logger.info('Terminal preloading completed')
} catch (error) {
logger.warn('Terminal preloading failed:', error as Error)
}
}
public async getBunPath() {
@ -51,6 +80,8 @@ class CodeToolsService {
return '@openai/codex'
case codeTools.qwenCode:
return '@qwen-code/qwen-code'
case codeTools.iFlowCli:
return '@iflow-ai/iflow-cli'
default:
throw new Error(`Unsupported CLI tool: ${cliTool}`)
}
@ -66,15 +97,265 @@ class CodeToolsService {
return 'codex'
case codeTools.qwenCode:
return 'qwen'
case codeTools.iFlowCli:
return 'iflow'
default:
throw new Error(`Unsupported CLI tool: ${cliTool}`)
}
}
/**
* Check if a single terminal is available
*/
private async checkTerminalAvailability(terminal: TerminalConfig): Promise<TerminalConfig | null> {
try {
if (isMac && terminal.bundleId) {
// macOS: Check if application is installed via bundle ID with timeout
const { stdout } = await execAsync(`mdfind "kMDItemCFBundleIdentifier == '${terminal.bundleId}'"`, {
timeout: 3000
})
if (stdout.trim()) {
return terminal
}
} else if (isWin) {
// Windows: Check terminal availability
return await this.checkWindowsTerminalAvailability(terminal)
} else {
// TODO: Check if terminal is available in linux
await execAsync(`which ${terminal.id}`, { timeout: 2000 })
return terminal
}
} catch (error) {
logger.debug(`Terminal ${terminal.id} not available:`, error as Error)
}
return null
}
/**
* Check Windows terminal availability (simplified - user configured paths)
*/
private async checkWindowsTerminalAvailability(terminal: TerminalConfig): Promise<TerminalConfig | null> {
try {
switch (terminal.id) {
case terminalApps.cmd:
// CMD is always available on Windows
return terminal
case terminalApps.powershell:
// Check for PowerShell in PATH
try {
await execAsync('powershell -Command "Get-Host"', { timeout: 3000 })
return terminal
} catch {
try {
await execAsync('pwsh -Command "Get-Host"', { timeout: 3000 })
return terminal
} catch {
return null
}
}
case terminalApps.windowsTerminal:
// Check for Windows Terminal via where command (doesn't launch the terminal)
try {
await execAsync('where wt', { timeout: 3000 })
return terminal
} catch {
return null
}
case terminalApps.wsl:
// Check for WSL
try {
await execAsync('wsl --status', { timeout: 3000 })
return terminal
} catch {
return null
}
default:
// For other terminals (Alacritty, WezTerm), check if user has configured custom path
return await this.checkCustomTerminalPath(terminal)
}
} catch (error) {
logger.debug(`Windows terminal ${terminal.id} not available:`, error as Error)
return null
}
}
/**
* Check if user has configured custom path for terminal
*/
private async checkCustomTerminalPath(terminal: TerminalConfig): Promise<TerminalConfig | null> {
// Check if user has configured custom path
const customPath = this.customTerminalPaths.get(terminal.id)
if (customPath && fs.existsSync(customPath)) {
try {
await execAsync(`"${customPath}" --version`, { timeout: 3000 })
return { ...terminal, customPath }
} catch {
return null
}
}
// Fallback to PATH check
try {
const command = terminal.id === terminalApps.alacritty ? 'alacritty' : 'wezterm'
await execAsync(`${command} --version`, { timeout: 3000 })
return terminal
} catch {
return null
}
}
/**
* Set custom path for a terminal (called from settings UI)
*/
public setCustomTerminalPath(terminalId: string, path: string): void {
logger.info(`Setting custom path for terminal ${terminalId}: ${path}`)
this.customTerminalPaths.set(terminalId, path)
// Clear terminals cache to force refresh
this.terminalsCache = null
}
/**
* Get custom path for a terminal
*/
public getCustomTerminalPath(terminalId: string): string | undefined {
return this.customTerminalPaths.get(terminalId)
}
/**
* Remove custom path for a terminal
*/
public removeCustomTerminalPath(terminalId: string): void {
logger.info(`Removing custom path for terminal ${terminalId}`)
this.customTerminalPaths.delete(terminalId)
// Clear terminals cache to force refresh
this.terminalsCache = null
}
/**
* Get available terminals (with caching and parallel checking)
*/
private async getAvailableTerminals(): Promise<TerminalConfig[]> {
const now = Date.now()
// Check cache first
if (this.terminalsCache && now - this.terminalsCache.timestamp < this.TERMINALS_CACHE_DURATION) {
logger.info(`Using cached terminals list (${this.terminalsCache.terminals.length} terminals)`)
return this.terminalsCache.terminals
}
logger.info('Checking available terminals in parallel...')
const startTime = Date.now()
// Get terminal list based on platform
const terminalList = isWin ? WINDOWS_TERMINALS : MACOS_TERMINALS
// Check all terminals in parallel
const terminalPromises = terminalList.map((terminal) => this.checkTerminalAvailability(terminal))
try {
// Wait for all checks to complete with a global timeout
const results = await Promise.allSettled(
terminalPromises.map((p) =>
Promise.race([p, new Promise((_, reject) => setTimeout(() => reject(new Error('timeout')), 5000))])
)
)
const availableTerminals: TerminalConfig[] = []
results.forEach((result, index) => {
if (result.status === 'fulfilled' && result.value) {
availableTerminals.push(result.value as TerminalConfig)
} else if (result.status === 'rejected') {
logger.debug(`Terminal check failed for ${MACOS_TERMINALS[index].id}:`, result.reason)
}
})
const endTime = Date.now()
logger.info(
`Terminal availability check completed in ${endTime - startTime}ms, found ${availableTerminals.length} terminals`
)
// Cache the results
this.terminalsCache = {
terminals: availableTerminals,
timestamp: now
}
return availableTerminals
} catch (error) {
logger.error('Error checking terminal availability:', error as Error)
// Return cached result if available, otherwise empty array
return this.terminalsCache?.terminals || []
}
}
/**
* Get terminal config by ID, fallback to system default
*/
private async getTerminalConfig(terminalId?: string): Promise<TerminalConfigWithCommand> {
const availableTerminals = await this.getAvailableTerminals()
const terminalCommands = isWin ? WINDOWS_TERMINALS_WITH_COMMANDS : MACOS_TERMINALS_WITH_COMMANDS
const defaultTerminal = isWin ? terminalApps.cmd : terminalApps.systemDefault
if (terminalId) {
let requestedTerminal = terminalCommands.find(
(t) => t.id === terminalId && availableTerminals.some((at) => at.id === t.id)
)
if (requestedTerminal) {
// Apply custom path if configured
const customPath = this.customTerminalPaths.get(terminalId)
if (customPath && isWin) {
requestedTerminal = this.applyCustomPath(requestedTerminal, customPath)
}
return requestedTerminal
} else {
logger.warn(`Requested terminal ${terminalId} not available, falling back to system default`)
}
}
// Fallback to system default Terminal
const systemTerminal = terminalCommands.find(
(t) => t.id === defaultTerminal && availableTerminals.some((at) => at.id === t.id)
)
if (systemTerminal) {
return systemTerminal
}
// If even system Terminal is not found, return the first available
const firstAvailable = terminalCommands.find((t) => availableTerminals.some((at) => at.id === t.id))
if (firstAvailable) {
return firstAvailable
}
// Last resort fallback
return terminalCommands.find((t) => t.id === defaultTerminal)!
}
/**
* Apply custom path to terminal configuration
*/
private applyCustomPath(terminal: TerminalConfigWithCommand, customPath: string): TerminalConfigWithCommand {
return {
...terminal,
customPath,
command: (directory: string, fullCommand: string) => {
const originalCommand = terminal.command(directory, fullCommand)
return {
...originalCommand,
command: customPath // Replace command with custom path
}
}
}
}
private async isPackageInstalled(cliTool: string): Promise<boolean> {
const executableName = await this.getCliExecutableName(cliTool)
const binDir = path.join(os.homedir(), '.cherrystudio', 'bin')
const executablePath = path.join(binDir, executableName + (process.platform === 'win32' ? '.exe' : ''))
const executablePath = path.join(binDir, executableName + (isWin ? '.exe' : ''))
// Ensure bin directory exists
if (!fs.existsSync(binDir)) {
@ -101,7 +382,7 @@ class CodeToolsService {
try {
const executableName = await this.getCliExecutableName(cliTool)
const binDir = path.join(os.homedir(), '.cherrystudio', 'bin')
const executablePath = path.join(binDir, executableName + (process.platform === 'win32' ? '.exe' : ''))
const executablePath = path.join(binDir, executableName + (isWin ? '.exe' : ''))
const { stdout } = await execAsync(`"${executablePath}" --version`, { timeout: 10000 })
// Extract version number from output (format may vary by tool)
@ -187,6 +468,17 @@ class CodeToolsService {
}
}
/**
* Get available terminals for the current platform
*/
public async getAvailableTerminalsForPlatform(): Promise<TerminalConfig[]> {
if (isMac || isWin) {
return this.getAvailableTerminals()
}
// For other platforms, return empty array for now
return []
}
/**
* Update a CLI tool to the latest version
*/
@ -198,10 +490,9 @@ class CodeToolsService {
const bunInstallPath = path.join(os.homedir(), '.cherrystudio')
const registryUrl = await this.getNpmRegistryUrl()
const installEnvPrefix =
process.platform === 'win32'
? `set "BUN_INSTALL=${bunInstallPath}" && set "NPM_CONFIG_REGISTRY=${registryUrl}" &&`
: `export BUN_INSTALL="${bunInstallPath}" && export NPM_CONFIG_REGISTRY="${registryUrl}" &&`
const installEnvPrefix = isWin
? `set "BUN_INSTALL=${bunInstallPath}" && set "NPM_CONFIG_REGISTRY=${registryUrl}" &&`
: `export BUN_INSTALL="${bunInstallPath}" && export NPM_CONFIG_REGISTRY="${registryUrl}" &&`
const updateCommand = `${installEnvPrefix} "${bunPath}" install -g ${packageName}`
logger.info(`Executing update command: ${updateCommand}`)
@ -237,7 +528,7 @@ class CodeToolsService {
_model: string,
directory: string,
env: Record<string, string>,
options: { autoUpdateToLatest?: boolean } = {}
options: { autoUpdateToLatest?: boolean; terminal?: string } = {}
) {
logger.info(`Starting CLI tool launch: ${cliTool} in directory: ${directory}`)
logger.debug(`Environment variables:`, Object.keys(env))
@ -247,7 +538,7 @@ class CodeToolsService {
const bunPath = await this.getBunPath()
const executableName = await this.getCliExecutableName(cliTool)
const binDir = path.join(os.homedir(), '.cherrystudio', 'bin')
const executablePath = path.join(binDir, executableName + (process.platform === 'win32' ? '.exe' : ''))
const executablePath = path.join(binDir, executableName + (isWin ? '.exe' : ''))
logger.debug(`Package name: ${packageName}`)
logger.debug(`Bun path: ${bunPath}`)
@ -291,7 +582,13 @@ class CodeToolsService {
// Build environment variable prefix (based on platform)
const buildEnvPrefix = (isWindows: boolean) => {
if (Object.keys(env).length === 0) return ''
if (Object.keys(env).length === 0) {
logger.info('No environment variables to set')
return ''
}
logger.info('Setting environment variables:', Object.keys(env))
logger.info('Environment variable values:', env)
if (isWindows) {
// Windows uses set command
@ -300,14 +597,48 @@ class CodeToolsService {
.join(' && ')
} else {
// Unix-like systems use export command
return Object.entries(env)
.map(([key, value]) => `export ${key}="${value.replace(/"/g, '\\"')}"`)
const validEntries = Object.entries(env).filter(([key, value]) => {
if (!key || key.trim() === '') {
return false
}
if (value === undefined || value === null) {
return false
}
return true
})
const envCommands = validEntries
.map(([key, value]) => {
const sanitizedValue = String(value).replace(/\\/g, '\\\\').replace(/"/g, '\\"')
const exportCmd = `export ${key}="${sanitizedValue}"`
logger.info(`Setting env var: ${key}="${sanitizedValue}"`)
logger.info(`Export command: ${exportCmd}`)
return exportCmd
})
.join(' && ')
return envCommands
}
}
// Build command to execute
let baseCommand = isWin ? `"${executablePath}"` : `"${bunPath}" "${executablePath}"`
// Add configuration parameters for OpenAI Codex
if (cliTool === codeTools.openaiCodex && env.OPENAI_MODEL_PROVIDER && env.OPENAI_MODEL_PROVIDER != 'openai') {
const provider = env.OPENAI_MODEL_PROVIDER
const model = env.OPENAI_MODEL
// delete the latest /
const baseUrl = env.OPENAI_BASE_URL.replace(/\/$/, '')
const configParams = [
`--config model_provider="${provider}"`,
`--config model="${model}"`,
`--config model_providers.${provider}.name="${provider}"`,
`--config model_providers.${provider}.base_url="${baseUrl}"`,
`--config model_providers.${provider}.env_key="OPENAI_API_KEY"`
].join(' ')
baseCommand = `${baseCommand} ${configParams}`
}
const bunInstallPath = path.join(os.homedir(), '.cherrystudio')
if (isInstalled) {
@ -329,20 +660,20 @@ class CodeToolsService {
switch (platform) {
case 'darwin': {
// macOS - Use osascript to launch terminal and execute command directly, without showing startup command
// macOS - Support multiple terminals
const envPrefix = buildEnvPrefix(false)
const command = envPrefix ? `${envPrefix} && ${baseCommand}` : baseCommand
// Combine directory change with the main command to ensure they execute in the same shell session
const fullCommand = `cd '${directory.replace(/'/g, "\\'")}' && clear && ${command}`
terminalCommand = 'osascript'
terminalArgs = [
'-e',
`tell application "Terminal"
do script "${fullCommand.replace(/"/g, '\\"')}"
activate
end tell`
]
const command = envPrefix ? `${envPrefix} && ${baseCommand}` : baseCommand
// Combine directory change with the main command to ensure they execute in the same shell session
const fullCommand = `cd "${directory.replace(/"/g, '\\"')}" && clear && ${command}`
const terminalConfig = await this.getTerminalConfig(options.terminal)
logger.info(`Using terminal: ${terminalConfig.name} (${terminalConfig.id})`)
const { command: cmd, args } = terminalConfig.command(directory, fullCommand)
terminalCommand = cmd
terminalArgs = args
break
}
case 'win32': {
@ -402,9 +733,23 @@ end tell`
throw new Error(`Failed to create launch script: ${error}`)
}
// Launch bat file - Use safest start syntax, no title parameter
terminalCommand = 'cmd'
terminalArgs = ['/c', 'start', batFilePath]
// Use selected terminal configuration
const terminalConfig = await this.getTerminalConfig(options.terminal)
logger.info(`Using terminal: ${terminalConfig.name} (${terminalConfig.id})`)
// Get command and args from terminal configuration
// Pass the bat file path as the command to execute
const fullCommand = batFilePath
const { command: cmd, args } = terminalConfig.command(directory, fullCommand)
// Override if it's a custom terminal with a custom path
if (terminalConfig.customPath) {
terminalCommand = terminalConfig.customPath
terminalArgs = args
} else {
terminalCommand = cmd
terminalArgs = args
}
// Set cleanup task (delete temp file after 5 minutes)
setTimeout(() => {

View File

@ -2,6 +2,7 @@ import { defaultLanguage, UpgradeChannel, ZOOM_SHORTCUTS } from '@shared/config/
import { LanguageVarious, Shortcut, ThemeMode } from '@types'
import { app } from 'electron'
import Store from 'electron-store'
import { v4 as uuidv4 } from 'uuid'
import { locales } from '../utils/locales'
@ -27,7 +28,8 @@ export enum ConfigKeys {
SelectionAssistantFilterList = 'selectionAssistantFilterList',
DisableHardwareAcceleration = 'disableHardwareAcceleration',
Proxy = 'proxy',
EnableDeveloperMode = 'enableDeveloperMode'
EnableDeveloperMode = 'enableDeveloperMode',
ClientId = 'clientId'
}
export class ConfigManager {
@ -241,6 +243,17 @@ export class ConfigManager {
this.set(ConfigKeys.EnableDeveloperMode, value)
}
getClientId(): string {
let clientId = this.get<string>(ConfigKeys.ClientId)
if (!clientId) {
clientId = uuidv4()
this.set(ConfigKeys.ClientId, clientId)
}
return clientId
}
set(key: string, value: unknown, isNotify: boolean = false) {
this.store.set(key, value)
isNotify && this.notifySubscribers(key, value)

View File

@ -1,4 +1,4 @@
/* eslint-disable no-case-declarations */
/* oxlint-disable no-case-declarations */
// ExportService
import fs from 'node:fs'

View File

@ -1,3 +1,18 @@
/**
* Knowledge Service - Manages knowledge bases using RAG (Retrieval-Augmented Generation)
*
* This service handles creation, management, and querying of knowledge bases from various sources
* including files, directories, URLs, sitemaps, and notes.
*
* Features:
* - Concurrent task processing with workload management
* - Multiple data source support
* - Vector database integration
*
* For detailed documentation, see:
* @see {@link ../../../docs/technical/KnowledgeService.md}
*/
import * as fs from 'node:fs'
import path from 'node:path'
@ -9,32 +24,87 @@ import { loggerService } from '@logger'
import Embeddings from '@main/knowledge/embedjs/embeddings/Embeddings'
import { addFileLoader } from '@main/knowledge/embedjs/loader'
import { NoteLoader } from '@main/knowledge/embedjs/loader/noteLoader'
import { preprocessingService } from '@main/knowledge/preprocess/PreprocessingService'
import PreprocessProvider from '@main/knowledge/preprocess/PreprocessProvider'
import Reranker from '@main/knowledge/reranker/Reranker'
import { fileStorage } from '@main/services/FileStorage'
import { windowService } from '@main/services/WindowService'
import { getDataPath } from '@main/utils'
import { getAllFiles } from '@main/utils/file'
import { TraceMethod } from '@mcp-trace/trace-core'
import { MB } from '@shared/config/constant'
import { LoaderReturn } from '@shared/config/types'
import type { LoaderReturn } from '@shared/config/types'
import { IpcChannel } from '@shared/IpcChannel'
import { FileMetadata, KnowledgeBaseParams, KnowledgeSearchResult } from '@types'
import { FileMetadata, KnowledgeBaseParams, KnowledgeItem, KnowledgeSearchResult } from '@types'
import { v4 as uuidv4 } from 'uuid'
import { windowService } from '../WindowService'
import {
IKnowledgeFramework,
KnowledgeBaseAddItemOptionsNonNullableAttribute,
LoaderDoneReturn,
LoaderTask,
LoaderTaskItem,
LoaderTaskItemState
} from './IKnowledgeFramework'
const logger = loggerService.withContext('MainKnowledgeService')
export class EmbedJsFramework implements IKnowledgeFramework {
private storageDir: string
private ragApplications: Map<string, RAGApplication> = new Map()
private pendingDeleteFile: string
private dbInstances: Map<string, LibSqlDb> = new Map()
export interface KnowledgeBaseAddItemOptions {
base: KnowledgeBaseParams
item: KnowledgeItem
forceReload?: boolean
userId?: string
}
interface KnowledgeBaseAddItemOptionsNonNullableAttribute {
base: KnowledgeBaseParams
item: KnowledgeItem
forceReload: boolean
userId: string
}
interface EvaluateTaskWorkload {
workload: number
}
type LoaderDoneReturn = LoaderReturn | null
enum LoaderTaskItemState {
PENDING,
PROCESSING,
DONE
}
interface LoaderTaskItem {
state: LoaderTaskItemState
task: () => Promise<unknown>
evaluateTaskWorkload: EvaluateTaskWorkload
}
interface LoaderTask {
loaderTasks: LoaderTaskItem[]
loaderDoneReturn: LoaderDoneReturn
}
interface LoaderTaskOfSet {
loaderTasks: Set<LoaderTaskItem>
loaderDoneReturn: LoaderDoneReturn
}
interface QueueTaskItem {
taskPromise: () => Promise<unknown>
resolve: () => void
evaluateTaskWorkload: EvaluateTaskWorkload
}
const loaderTaskIntoOfSet = (loaderTask: LoaderTask): LoaderTaskOfSet => {
return {
loaderTasks: new Set(loaderTask.loaderTasks),
loaderDoneReturn: loaderTask.loaderDoneReturn
}
}
class KnowledgeService {
private storageDir = path.join(getDataPath(), 'KnowledgeBase')
private pendingDeleteFile = path.join(this.storageDir, 'knowledge_pending_delete.json')
// Byte based
private workload = 0
private processingItemCount = 0
private knowledgeItemProcessingQueueMappingPromise: Map<LoaderTaskOfSet, () => void> = new Map()
private ragApplications: Map<string, RAGApplication> = new Map()
private dbInstances: Map<string, LibSqlDb> = new Map()
private static MAXIMUM_WORKLOAD = 80 * MB
private static MAXIMUM_PROCESSING_ITEM_COUNT = 30
private static ERROR_LOADER_RETURN: LoaderReturn = {
entriesAdded: 0,
uniqueId: '',
@ -43,9 +113,7 @@ export class EmbedJsFramework implements IKnowledgeFramework {
status: 'failed'
}
constructor(storageDir: string) {
this.storageDir = storageDir
this.pendingDeleteFile = path.join(this.storageDir, 'knowledge_pending_delete.json')
constructor() {
this.initStorageDir()
this.cleanupOnStartup()
}
@ -160,28 +228,33 @@ export class EmbedJsFramework implements IKnowledgeFramework {
logger.info(`Startup cleanup completed: ${deletedCount}/${pendingDeleteIds.length} knowledge bases deleted`)
}
private async getRagApplication(base: KnowledgeBaseParams): Promise<RAGApplication> {
if (this.ragApplications.has(base.id)) {
return this.ragApplications.get(base.id)!
private getRagApplication = async ({
id,
embedApiClient,
dimensions,
documentCount
}: KnowledgeBaseParams): Promise<RAGApplication> => {
if (this.ragApplications.has(id)) {
return this.ragApplications.get(id)!
}
let ragApplication: RAGApplication
const embeddings = new Embeddings({
embedApiClient: base.embedApiClient,
dimensions: base.dimensions
embedApiClient,
dimensions
})
try {
const libSqlDb = new LibSqlDb({ path: path.join(this.storageDir, base.id) })
const libSqlDb = new LibSqlDb({ path: path.join(this.storageDir, id) })
// Save database instance for later closing
this.dbInstances.set(base.id, libSqlDb)
this.dbInstances.set(id, libSqlDb)
ragApplication = await new RAGApplicationBuilder()
.setModel('NO_MODEL')
.setEmbeddingModel(embeddings)
.setVectorDatabase(libSqlDb)
.setSearchResultCount(base.documentCount || 30)
.setSearchResultCount(documentCount || 30)
.build()
this.ragApplications.set(base.id, ragApplication)
this.ragApplications.set(id, ragApplication)
} catch (e) {
logger.error('Failed to create RAGApplication:', e as Error)
throw new Error(`Failed to create RAGApplication: ${e}`)
@ -189,14 +262,17 @@ export class EmbedJsFramework implements IKnowledgeFramework {
return ragApplication
}
async initialize(base: KnowledgeBaseParams): Promise<void> {
public create = async (_: Electron.IpcMainInvokeEvent, base: KnowledgeBaseParams): Promise<void> => {
await this.getRagApplication(base)
}
async reset(base: KnowledgeBaseParams): Promise<void> {
const ragApp = await this.getRagApplication(base)
await ragApp.reset()
public reset = async (_: Electron.IpcMainInvokeEvent, base: KnowledgeBaseParams): Promise<void> => {
const ragApplication = await this.getRagApplication(base)
await ragApplication.reset()
}
async delete(id: string): Promise<void> {
public async delete(_: Electron.IpcMainInvokeEvent, id: string): Promise<void> {
logger.debug(`delete id: ${id}`)
await this.cleanupKnowledgeResources(id)
@ -209,41 +285,15 @@ export class EmbedJsFramework implements IKnowledgeFramework {
this.pendingDeleteManager.add(id)
}
}
getLoaderTask(options: KnowledgeBaseAddItemOptionsNonNullableAttribute): LoaderTask {
const { item } = options
const getRagApplication = () => this.getRagApplication(options.base)
switch (item.type) {
case 'file':
return this.fileTask(getRagApplication, options)
case 'directory':
return this.directoryTask(getRagApplication, options)
case 'url':
return this.urlTask(getRagApplication, options)
case 'sitemap':
return this.sitemapTask(getRagApplication, options)
case 'note':
return this.noteTask(getRagApplication, options)
default:
return {
loaderTasks: [],
loaderDoneReturn: null
}
}
}
async remove(options: { uniqueIds: string[]; base: KnowledgeBaseParams }): Promise<void> {
const ragApp = await this.getRagApplication(options.base)
for (const id of options.uniqueIds) {
await ragApp.deleteLoader(id)
}
private maximumLoad() {
return (
this.processingItemCount >= KnowledgeService.MAXIMUM_PROCESSING_ITEM_COUNT ||
this.workload >= KnowledgeService.MAXIMUM_WORKLOAD
)
}
async search(options: { search: string; base: KnowledgeBaseParams }): Promise<KnowledgeSearchResult[]> {
const ragApp = await this.getRagApplication(options.base)
return await ragApp.search(options.search)
}
private fileTask(
getRagApplication: () => Promise<RAGApplication>,
ragApplication: RAGApplication,
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
): LoaderTask {
const { base, item, forceReload, userId } = options
@ -256,8 +306,7 @@ export class EmbedJsFramework implements IKnowledgeFramework {
task: async () => {
try {
// Add preprocessing logic
const ragApplication = await getRagApplication()
const fileToProcess: FileMetadata = await preprocessingService.preprocessFile(file, base, item, userId)
const fileToProcess: FileMetadata = await this.preprocessing(file, base, item, userId)
// Use processed file for loading
return addFileLoader(ragApplication, fileToProcess, base, forceReload)
@ -268,7 +317,7 @@ export class EmbedJsFramework implements IKnowledgeFramework {
.catch((e) => {
logger.error(`Error in addFileLoader for ${file.name}: ${e}`)
const errorResult: LoaderReturn = {
...EmbedJsFramework.ERROR_LOADER_RETURN,
...KnowledgeService.ERROR_LOADER_RETURN,
message: e.message,
messageSource: 'embedding'
}
@ -278,7 +327,7 @@ export class EmbedJsFramework implements IKnowledgeFramework {
} catch (e: any) {
logger.error(`Preprocessing failed for ${file.name}: ${e}`)
const errorResult: LoaderReturn = {
...EmbedJsFramework.ERROR_LOADER_RETURN,
...KnowledgeService.ERROR_LOADER_RETURN,
message: e.message,
messageSource: 'preprocess'
}
@ -295,7 +344,7 @@ export class EmbedJsFramework implements IKnowledgeFramework {
return loaderTask
}
private directoryTask(
getRagApplication: () => Promise<RAGApplication>,
ragApplication: RAGApplication,
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
): LoaderTask {
const { base, item, forceReload } = options
@ -322,9 +371,8 @@ export class EmbedJsFramework implements IKnowledgeFramework {
for (const file of files) {
loaderTasks.push({
state: LoaderTaskItemState.PENDING,
task: async () => {
const ragApplication = await getRagApplication()
return addFileLoader(ragApplication, file, base, forceReload)
task: () =>
addFileLoader(ragApplication, file, base, forceReload)
.then((result) => {
loaderDoneReturn.entriesAdded += 1
processedFiles += 1
@ -335,12 +383,11 @@ export class EmbedJsFramework implements IKnowledgeFramework {
.catch((err) => {
logger.error('Failed to add dir loader:', err)
return {
...EmbedJsFramework.ERROR_LOADER_RETURN,
...KnowledgeService.ERROR_LOADER_RETURN,
message: `Failed to add dir loader: ${err.message}`,
messageSource: 'embedding'
}
})
},
}),
evaluateTaskWorkload: { workload: file.size }
})
}
@ -352,7 +399,7 @@ export class EmbedJsFramework implements IKnowledgeFramework {
}
private urlTask(
getRagApplication: () => Promise<RAGApplication>,
ragApplication: RAGApplication,
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
): LoaderTask {
const { base, item, forceReload } = options
@ -362,8 +409,7 @@ export class EmbedJsFramework implements IKnowledgeFramework {
loaderTasks: [
{
state: LoaderTaskItemState.PENDING,
task: async () => {
const ragApplication = await getRagApplication()
task: () => {
const loaderReturn = ragApplication.addLoader(
new WebLoader({
urlOrContent: content,
@ -387,7 +433,7 @@ export class EmbedJsFramework implements IKnowledgeFramework {
.catch((err) => {
logger.error('Failed to add url loader:', err)
return {
...EmbedJsFramework.ERROR_LOADER_RETURN,
...KnowledgeService.ERROR_LOADER_RETURN,
message: `Failed to add url loader: ${err.message}`,
messageSource: 'embedding'
}
@ -402,7 +448,7 @@ export class EmbedJsFramework implements IKnowledgeFramework {
}
private sitemapTask(
getRagApplication: () => Promise<RAGApplication>,
ragApplication: RAGApplication,
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
): LoaderTask {
const { base, item, forceReload } = options
@ -412,9 +458,8 @@ export class EmbedJsFramework implements IKnowledgeFramework {
loaderTasks: [
{
state: LoaderTaskItemState.PENDING,
task: async () => {
const ragApplication = await getRagApplication()
return ragApplication
task: () =>
ragApplication
.addLoader(
new SitemapLoader({ url: content, chunkSize: base.chunkSize, chunkOverlap: base.chunkOverlap }) as any,
forceReload
@ -432,12 +477,11 @@ export class EmbedJsFramework implements IKnowledgeFramework {
.catch((err) => {
logger.error('Failed to add sitemap loader:', err)
return {
...EmbedJsFramework.ERROR_LOADER_RETURN,
...KnowledgeService.ERROR_LOADER_RETURN,
message: `Failed to add sitemap loader: ${err.message}`,
messageSource: 'embedding'
}
})
},
}),
evaluateTaskWorkload: { workload: 20 * MB }
}
],
@ -447,7 +491,7 @@ export class EmbedJsFramework implements IKnowledgeFramework {
}
private noteTask(
getRagApplication: () => Promise<RAGApplication>,
ragApplication: RAGApplication,
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
): LoaderTask {
const { base, item, forceReload } = options
@ -460,8 +504,7 @@ export class EmbedJsFramework implements IKnowledgeFramework {
loaderTasks: [
{
state: LoaderTaskItemState.PENDING,
task: async () => {
const ragApplication = await getRagApplication()
task: () => {
const loaderReturn = ragApplication.addLoader(
new NoteLoader({
text: content,
@ -484,7 +527,7 @@ export class EmbedJsFramework implements IKnowledgeFramework {
.catch((err) => {
logger.error('Failed to add note loader:', err)
return {
...EmbedJsFramework.ERROR_LOADER_RETURN,
...KnowledgeService.ERROR_LOADER_RETURN,
message: `Failed to add note loader: ${err.message}`,
messageSource: 'embedding'
}
@ -497,4 +540,199 @@ export class EmbedJsFramework implements IKnowledgeFramework {
}
return loaderTask
}
private processingQueueHandle() {
const getSubtasksUntilMaximumLoad = (): QueueTaskItem[] => {
const queueTaskList: QueueTaskItem[] = []
that: for (const [task, resolve] of this.knowledgeItemProcessingQueueMappingPromise) {
for (const item of task.loaderTasks) {
if (this.maximumLoad()) {
break that
}
const { state, task: taskPromise, evaluateTaskWorkload } = item
if (state !== LoaderTaskItemState.PENDING) {
continue
}
const { workload } = evaluateTaskWorkload
this.workload += workload
this.processingItemCount += 1
item.state = LoaderTaskItemState.PROCESSING
queueTaskList.push({
taskPromise: () =>
taskPromise().then(() => {
this.workload -= workload
this.processingItemCount -= 1
task.loaderTasks.delete(item)
if (task.loaderTasks.size === 0) {
this.knowledgeItemProcessingQueueMappingPromise.delete(task)
resolve()
}
this.processingQueueHandle()
}),
resolve: () => {},
evaluateTaskWorkload
})
}
}
return queueTaskList
}
const subTasks = getSubtasksUntilMaximumLoad()
if (subTasks.length > 0) {
const subTaskPromises = subTasks.map(({ taskPromise }) => taskPromise())
Promise.all(subTaskPromises).then(() => {
subTasks.forEach(({ resolve }) => resolve())
})
}
}
private appendProcessingQueue(task: LoaderTask): Promise<LoaderReturn> {
return new Promise((resolve) => {
this.knowledgeItemProcessingQueueMappingPromise.set(loaderTaskIntoOfSet(task), () => {
resolve(task.loaderDoneReturn!)
})
})
}
public add = (_: Electron.IpcMainInvokeEvent, options: KnowledgeBaseAddItemOptions): Promise<LoaderReturn> => {
return new Promise((resolve) => {
const { base, item, forceReload = false, userId = '' } = options
const optionsNonNullableAttribute = { base, item, forceReload, userId }
this.getRagApplication(base)
.then((ragApplication) => {
const task = (() => {
switch (item.type) {
case 'file':
return this.fileTask(ragApplication, optionsNonNullableAttribute)
case 'directory':
return this.directoryTask(ragApplication, optionsNonNullableAttribute)
case 'url':
return this.urlTask(ragApplication, optionsNonNullableAttribute)
case 'sitemap':
return this.sitemapTask(ragApplication, optionsNonNullableAttribute)
case 'note':
return this.noteTask(ragApplication, optionsNonNullableAttribute)
default:
return null
}
})()
if (task) {
this.appendProcessingQueue(task).then(() => {
resolve(task.loaderDoneReturn!)
})
this.processingQueueHandle()
} else {
resolve({
...KnowledgeService.ERROR_LOADER_RETURN,
message: 'Unsupported item type',
messageSource: 'embedding'
})
}
})
.catch((err) => {
logger.error('Failed to add item:', err)
resolve({
...KnowledgeService.ERROR_LOADER_RETURN,
message: `Failed to add item: ${err.message}`,
messageSource: 'embedding'
})
})
})
}
@TraceMethod({ spanName: 'remove', tag: 'Knowledge' })
public async remove(
_: Electron.IpcMainInvokeEvent,
{ uniqueId, uniqueIds, base }: { uniqueId: string; uniqueIds: string[]; base: KnowledgeBaseParams }
): Promise<void> {
const ragApplication = await this.getRagApplication(base)
logger.debug(`Remove Item UniqueId: ${uniqueId}`)
for (const id of uniqueIds) {
await ragApplication.deleteLoader(id)
}
}
@TraceMethod({ spanName: 'RagSearch', tag: 'Knowledge' })
public async search(
_: Electron.IpcMainInvokeEvent,
{ search, base }: { search: string; base: KnowledgeBaseParams }
): Promise<KnowledgeSearchResult[]> {
const ragApplication = await this.getRagApplication(base)
return await ragApplication.search(search)
}
@TraceMethod({ spanName: 'rerank', tag: 'Knowledge' })
public async rerank(
_: Electron.IpcMainInvokeEvent,
{ search, base, results }: { search: string; base: KnowledgeBaseParams; results: KnowledgeSearchResult[] }
): Promise<KnowledgeSearchResult[]> {
if (results.length === 0) {
return results
}
return await new Reranker(base).rerank(search, results)
}
public getStorageDir = (): string => {
return this.storageDir
}
private preprocessing = async (
file: FileMetadata,
base: KnowledgeBaseParams,
item: KnowledgeItem,
userId: string
): Promise<FileMetadata> => {
let fileToProcess: FileMetadata = file
if (base.preprocessProvider && file.ext.toLowerCase() === '.pdf') {
try {
const provider = new PreprocessProvider(base.preprocessProvider.provider, userId)
const filePath = fileStorage.getFilePathById(file)
// Check if file has already been preprocessed
const alreadyProcessed = await provider.checkIfAlreadyProcessed(file)
if (alreadyProcessed) {
logger.debug(`File already preprocess processed, using cached result: ${filePath}`)
return alreadyProcessed
}
// Execute preprocessing
logger.debug(`Starting preprocess processing for scanned PDF: ${filePath}`)
const { processedFile, quota } = await provider.parseFile(item.id, file)
fileToProcess = processedFile
const mainWindow = windowService.getMainWindow()
mainWindow?.webContents.send('file-preprocess-finished', {
itemId: item.id,
quota: quota
})
} catch (err) {
logger.error(`Preprocess processing failed: ${err}`)
// If preprocessing fails, use original file
// fileToProcess = file
throw new Error(`Preprocess processing failed: ${err}`)
}
}
return fileToProcess
}
public checkQuota = async (
_: Electron.IpcMainInvokeEvent,
base: KnowledgeBaseParams,
userId: string
): Promise<number> => {
try {
if (base.preprocessProvider && base.preprocessProvider.type === 'preprocess') {
const provider = new PreprocessProvider(base.preprocessProvider.provider, userId)
return await provider.checkQuota()
}
throw new Error('No preprocess provider configured')
} catch (err) {
logger.error(`Failed to check quota: ${err}`)
throw new Error(`Failed to check quota: ${err}`)
}
}
}
export default new KnowledgeService()

View File

@ -235,7 +235,7 @@ class McpService {
try {
await inMemoryServer.connect(serverTransport)
getServerLogger(server).debug(`In-memory server started`)
} catch (error: Error | any) {
} catch (error: any) {
getServerLogger(server).error(`Error starting in-memory server`, error as Error)
throw new Error(`Failed to start in-memory server: ${error.message}`)
}
@ -419,7 +419,7 @@ class McpService {
const transport = await initTransport()
try {
await client.connect(transport)
} catch (error: Error | any) {
} catch (error: any) {
if (
error instanceof Error &&
(error.name === 'UnauthorizedError' || error.message.includes('Unauthorized'))
@ -852,7 +852,7 @@ class McpService {
return {
contents: contents
}
} catch (error: Error | any) {
} catch (error: any) {
getServerLogger(server, { uri }).error(`Failed to get resource`, error as Error)
throw new Error(`Failed to get resource ${uri} from server: ${server.name}: ${error.message}`)
}

View File

@ -5,7 +5,7 @@ export class MistralClientManager {
private static instance: MistralClientManager
private client: Mistral | null = null
// eslint-disable-next-line @typescript-eslint/no-empty-function
// oxlint-disable-next-line @typescript-eslint/no-empty-function
private constructor() {}
public static getInstance(): MistralClientManager {

View File

@ -1,5 +1,5 @@
import { Notification } from '@types'
import { Notification as ElectronNotification } from 'electron'
import { Notification } from 'src/renderer/src/types/notification'
import { windowService } from './WindowService'

View File

@ -235,7 +235,7 @@ export class ProxyManager {
https.request = this.bindHttpMethod(this.originalHttpsRequest, agent)
}
// eslint-disable-next-line @typescript-eslint/no-unsafe-function-type
// oxlint-disable-next-line @typescript-eslint/no-unsafe-function-type
private bindHttpMethod(originalMethod: Function, agent: http.Agent | https.Agent) {
return (...args: any[]) => {
let url: string | URL | undefined

View File

@ -256,7 +256,7 @@ export class WindowService {
private setupWebContentsHandlers(mainWindow: BrowserWindow) {
mainWindow.webContents.on('will-navigate', (event, url) => {
if (url.includes('localhost:5173')) {
if (url.includes('localhost:517')) {
return
}
@ -275,7 +275,8 @@ export class WindowService {
'https://aihubmix.com/topup',
'https://aihubmix.com/statistics',
'https://dash.302.ai/sso/login',
'https://dash.302.ai/charge'
'https://dash.302.ai/charge',
'https://www.aiionly.com/login'
]
if (oauthProviderUrls.some((link) => url.startsWith(link))) {

View File

@ -0,0 +1,319 @@
import { UpdateInfo } from 'builder-util-runtime'
import { beforeEach, describe, expect, it, vi } from 'vitest'
// Mock dependencies
vi.mock('@logger', () => ({
loggerService: {
withContext: () => ({
info: vi.fn(),
error: vi.fn(),
warn: vi.fn()
})
}
}))
vi.mock('../ConfigManager', () => ({
configManager: {
getLanguage: vi.fn(),
getAutoUpdate: vi.fn(() => false),
getTestPlan: vi.fn(() => false),
getTestChannel: vi.fn(),
getClientId: vi.fn(() => 'test-client-id')
}
}))
vi.mock('../WindowService', () => ({
windowService: {
getMainWindow: vi.fn()
}
}))
vi.mock('@main/constant', () => ({
isWin: false
}))
vi.mock('@main/utils/ipService', () => ({
getIpCountry: vi.fn(() => 'US')
}))
vi.mock('@main/utils/locales', () => ({
locales: {
en: { translation: { update: {} } },
'zh-CN': { translation: { update: {} } }
}
}))
vi.mock('@main/utils/systemInfo', () => ({
generateUserAgent: vi.fn(() => 'test-user-agent')
}))
vi.mock('electron', () => ({
app: {
isPackaged: true,
getVersion: vi.fn(() => '1.0.0'),
getPath: vi.fn(() => '/test/path')
},
dialog: {
showMessageBox: vi.fn()
},
BrowserWindow: vi.fn(),
net: {
fetch: vi.fn()
}
}))
vi.mock('electron-updater', () => ({
autoUpdater: {
logger: null,
forceDevUpdateConfig: false,
autoDownload: false,
autoInstallOnAppQuit: false,
requestHeaders: {},
on: vi.fn(),
setFeedURL: vi.fn(),
checkForUpdates: vi.fn(),
downloadUpdate: vi.fn(),
quitAndInstall: vi.fn(),
channel: '',
allowDowngrade: false,
disableDifferentialDownload: false,
currentVersion: '1.0.0'
},
Logger: vi.fn(),
NsisUpdater: vi.fn(),
AppUpdater: vi.fn()
}))
// Import after mocks
import AppUpdater from '../AppUpdater'
import { configManager } from '../ConfigManager'
describe('AppUpdater', () => {
let appUpdater: AppUpdater
beforeEach(() => {
vi.clearAllMocks()
appUpdater = new AppUpdater()
})
describe('parseMultiLangReleaseNotes', () => {
const sampleReleaseNotes = `<!--LANG:en-->
🚀 New Features:
- Feature A
- Feature B
🎨 UI Improvements:
- Improvement A
<!--LANG:zh-CN-->
🚀
- A
- B
🎨
- A
<!--LANG:END-->`
it('should return Chinese notes for zh-CN users', () => {
vi.mocked(configManager.getLanguage).mockReturnValue('zh-CN')
const result = (appUpdater as any).parseMultiLangReleaseNotes(sampleReleaseNotes)
expect(result).toContain('新功能')
expect(result).toContain('功能 A')
expect(result).not.toContain('New Features')
})
it('should return Chinese notes for zh-TW users', () => {
vi.mocked(configManager.getLanguage).mockReturnValue('zh-TW')
const result = (appUpdater as any).parseMultiLangReleaseNotes(sampleReleaseNotes)
expect(result).toContain('新功能')
expect(result).toContain('功能 A')
expect(result).not.toContain('New Features')
})
it('should return English notes for non-Chinese users', () => {
vi.mocked(configManager.getLanguage).mockReturnValue('en-US')
const result = (appUpdater as any).parseMultiLangReleaseNotes(sampleReleaseNotes)
expect(result).toContain('New Features')
expect(result).toContain('Feature A')
expect(result).not.toContain('新功能')
})
it('should return English notes for other language users', () => {
vi.mocked(configManager.getLanguage).mockReturnValue('ru-RU')
const result = (appUpdater as any).parseMultiLangReleaseNotes(sampleReleaseNotes)
expect(result).toContain('New Features')
expect(result).not.toContain('新功能')
})
it('should handle missing language sections gracefully', () => {
const malformedNotes = 'Simple release notes without markers'
const result = (appUpdater as any).parseMultiLangReleaseNotes(malformedNotes)
expect(result).toBe('Simple release notes without markers')
})
it('should handle malformed markers', () => {
const malformedNotes = `<!--LANG:en-->English only`
vi.mocked(configManager.getLanguage).mockReturnValue('zh-CN')
const result = (appUpdater as any).parseMultiLangReleaseNotes(malformedNotes)
// Should clean up markers and return cleaned content
expect(result).toContain('English only')
expect(result).not.toContain('<!--LANG:')
})
it('should handle empty release notes', () => {
const result = (appUpdater as any).parseMultiLangReleaseNotes('')
expect(result).toBe('')
})
it('should handle errors gracefully', () => {
// Force an error by mocking configManager to throw
vi.mocked(configManager.getLanguage).mockImplementation(() => {
throw new Error('Test error')
})
const result = (appUpdater as any).parseMultiLangReleaseNotes(sampleReleaseNotes)
// Should return original notes as fallback
expect(result).toBe(sampleReleaseNotes)
})
})
describe('hasMultiLanguageMarkers', () => {
it('should return true when markers are present', () => {
const notes = '<!--LANG:en-->Test'
const result = (appUpdater as any).hasMultiLanguageMarkers(notes)
expect(result).toBe(true)
})
it('should return false when no markers are present', () => {
const notes = 'Simple text without markers'
const result = (appUpdater as any).hasMultiLanguageMarkers(notes)
expect(result).toBe(false)
})
})
describe('processReleaseInfo', () => {
it('should process multi-language release notes in string format', () => {
vi.mocked(configManager.getLanguage).mockReturnValue('zh-CN')
const releaseInfo = {
version: '1.0.0',
files: [],
path: '',
sha512: '',
releaseDate: new Date().toISOString(),
releaseNotes: `<!--LANG:en-->English notes<!--LANG:zh-CN-->中文说明<!--LANG:END-->`
} as UpdateInfo
const result = (appUpdater as any).processReleaseInfo(releaseInfo)
expect(result.releaseNotes).toBe('中文说明')
})
it('should not process release notes without markers', () => {
const releaseInfo = {
version: '1.0.0',
files: [],
path: '',
sha512: '',
releaseDate: new Date().toISOString(),
releaseNotes: 'Simple release notes'
} as UpdateInfo
const result = (appUpdater as any).processReleaseInfo(releaseInfo)
expect(result.releaseNotes).toBe('Simple release notes')
})
it('should handle array format release notes', () => {
const releaseInfo = {
version: '1.0.0',
files: [],
path: '',
sha512: '',
releaseDate: new Date().toISOString(),
releaseNotes: [
{ version: '1.0.0', note: 'Note 1' },
{ version: '1.0.1', note: 'Note 2' }
]
} as UpdateInfo
const result = (appUpdater as any).processReleaseInfo(releaseInfo)
expect(result.releaseNotes).toEqual(releaseInfo.releaseNotes)
})
it('should handle null release notes', () => {
const releaseInfo = {
version: '1.0.0',
files: [],
path: '',
sha512: '',
releaseDate: new Date().toISOString(),
releaseNotes: null
} as UpdateInfo
const result = (appUpdater as any).processReleaseInfo(releaseInfo)
expect(result.releaseNotes).toBeNull()
})
})
describe('formatReleaseNotes', () => {
it('should format string release notes with markers', () => {
vi.mocked(configManager.getLanguage).mockReturnValue('en-US')
const notes = `<!--LANG:en-->English<!--LANG:zh-CN-->中文<!--LANG:END-->`
const result = (appUpdater as any).formatReleaseNotes(notes)
expect(result).toBe('English')
})
it('should format string release notes without markers', () => {
const notes = 'Simple notes'
const result = (appUpdater as any).formatReleaseNotes(notes)
expect(result).toBe('Simple notes')
})
it('should format array release notes', () => {
const notes = [
{ version: '1.0.0', note: 'Note 1' },
{ version: '1.0.1', note: 'Note 2' }
]
const result = (appUpdater as any).formatReleaseNotes(notes)
expect(result).toBe('Note 1\nNote 2')
})
it('should handle null release notes', () => {
const result = (appUpdater as any).formatReleaseNotes(null)
expect(result).toBe('')
})
it('should handle undefined release notes', () => {
const result = (appUpdater as any).formatReleaseNotes(undefined)
expect(result).toBe('')
})
})
})

View File

@ -1,72 +0,0 @@
import { LoaderReturn } from '@shared/config/types'
import { KnowledgeBaseParams, KnowledgeItem, KnowledgeSearchResult } from '@types'
export interface KnowledgeBaseAddItemOptions {
base: KnowledgeBaseParams
item: KnowledgeItem
forceReload?: boolean
userId?: string
}
export interface KnowledgeBaseAddItemOptionsNonNullableAttribute {
base: KnowledgeBaseParams
item: KnowledgeItem
forceReload: boolean
userId: string
}
export interface EvaluateTaskWorkload {
workload: number
}
export type LoaderDoneReturn = LoaderReturn | null
export enum LoaderTaskItemState {
PENDING,
PROCESSING,
DONE
}
export interface LoaderTaskItem {
state: LoaderTaskItemState
task: () => Promise<unknown>
evaluateTaskWorkload: EvaluateTaskWorkload
}
export interface LoaderTask {
loaderTasks: LoaderTaskItem[]
loaderDoneReturn: LoaderDoneReturn
}
export interface LoaderTaskOfSet {
loaderTasks: Set<LoaderTaskItem>
loaderDoneReturn: LoaderDoneReturn
}
export interface QueueTaskItem {
taskPromise: () => Promise<unknown>
resolve: () => void
evaluateTaskWorkload: EvaluateTaskWorkload
}
export const loaderTaskIntoOfSet = (loaderTask: LoaderTask): LoaderTaskOfSet => {
return {
loaderTasks: new Set(loaderTask.loaderTasks),
loaderDoneReturn: loaderTask.loaderDoneReturn
}
}
export interface IKnowledgeFramework {
/** 为给定知识库初始化框架资源 */
initialize(base: KnowledgeBaseParams): Promise<void>
/** 重置知识库,删除其所有内容 */
reset(base: KnowledgeBaseParams): Promise<void>
/** 删除与知识库关联的资源,包括文件 */
delete(id: string): Promise<void>
/** 生成用于添加条目的任务对象,由队列处理 */
getLoaderTask(options: KnowledgeBaseAddItemOptionsNonNullableAttribute): LoaderTask
/** 从知识库中删除特定条目 */
remove(options: { uniqueIds: string[]; base: KnowledgeBaseParams }): Promise<void>
/** 搜索知识库 */
search(options: { search: string; base: KnowledgeBaseParams }): Promise<KnowledgeSearchResult[]>
}

View File

@ -1,48 +0,0 @@
import path from 'node:path'
import { KnowledgeBaseParams } from '@types'
import { app } from 'electron'
import { EmbedJsFramework } from './EmbedJsFramework'
import { IKnowledgeFramework } from './IKnowledgeFramework'
import { LangChainFramework } from './LangChainFramework'
class KnowledgeFrameworkFactory {
private static instance: KnowledgeFrameworkFactory
private frameworks: Map<string, IKnowledgeFramework> = new Map()
private storageDir: string
private constructor(storageDir: string) {
this.storageDir = storageDir
}
public static getInstance(storageDir: string): KnowledgeFrameworkFactory {
if (!KnowledgeFrameworkFactory.instance) {
KnowledgeFrameworkFactory.instance = new KnowledgeFrameworkFactory(storageDir)
}
return KnowledgeFrameworkFactory.instance
}
public getFramework(base: KnowledgeBaseParams): IKnowledgeFramework {
const frameworkType = base.framework || 'embedjs' // 如果未指定,默认为 embedjs
if (this.frameworks.has(frameworkType)) {
return this.frameworks.get(frameworkType)!
}
let framework: IKnowledgeFramework
switch (frameworkType) {
case 'langchain':
framework = new LangChainFramework(this.storageDir)
break
case 'embedjs':
default:
framework = new EmbedJsFramework(this.storageDir)
break
}
this.frameworks.set(frameworkType, framework)
return framework
}
}
export const knowledgeFrameworkFactory = KnowledgeFrameworkFactory.getInstance(
path.join(app.getPath('userData'), 'Data', 'KnowledgeBase')
)

View File

@ -1,190 +0,0 @@
import * as fs from 'node:fs'
import path from 'node:path'
import { loggerService } from '@logger'
import { preprocessingService } from '@main/knowledge/preprocess/PreprocessingService'
import Reranker from '@main/knowledge/reranker/Reranker'
import { TraceMethod } from '@mcp-trace/trace-core'
import { MB } from '@shared/config/constant'
import { LoaderReturn } from '@shared/config/types'
import { KnowledgeBaseParams, KnowledgeSearchResult } from '@types'
import { app } from 'electron'
import {
KnowledgeBaseAddItemOptions,
LoaderTask,
loaderTaskIntoOfSet,
LoaderTaskItemState,
LoaderTaskOfSet,
QueueTaskItem
} from './IKnowledgeFramework'
import { knowledgeFrameworkFactory } from './KnowledgeFrameworkFactory'
const logger = loggerService.withContext('MainKnowledgeService')
class KnowledgeService {
private storageDir = path.join(app.getPath('userData'), 'Data', 'KnowledgeBase')
private workload = 0
private processingItemCount = 0
private knowledgeItemProcessingQueueMappingPromise: Map<LoaderTaskOfSet, () => void> = new Map()
private static MAXIMUM_WORKLOAD = 80 * MB
private static MAXIMUM_PROCESSING_ITEM_COUNT = 30
private static ERROR_LOADER_RETURN: LoaderReturn = {
entriesAdded: 0,
uniqueId: '',
uniqueIds: [''],
loaderType: '',
status: 'failed'
}
constructor() {
this.initStorageDir()
}
private initStorageDir = (): void => {
if (!fs.existsSync(this.storageDir)) {
fs.mkdirSync(this.storageDir, { recursive: true })
}
}
private maximumLoad() {
return (
this.processingItemCount >= KnowledgeService.MAXIMUM_PROCESSING_ITEM_COUNT ||
this.workload >= KnowledgeService.MAXIMUM_WORKLOAD
)
}
private processingQueueHandle() {
const getSubtasksUntilMaximumLoad = (): QueueTaskItem[] => {
const queueTaskList: QueueTaskItem[] = []
that: for (const [task, resolve] of this.knowledgeItemProcessingQueueMappingPromise) {
for (const item of task.loaderTasks) {
if (this.maximumLoad()) {
break that
}
const { state, task: taskPromise, evaluateTaskWorkload } = item
if (state !== LoaderTaskItemState.PENDING) {
continue
}
const { workload } = evaluateTaskWorkload
this.workload += workload
this.processingItemCount += 1
item.state = LoaderTaskItemState.PROCESSING
queueTaskList.push({
taskPromise: () =>
taskPromise().then(() => {
this.workload -= workload
this.processingItemCount -= 1
task.loaderTasks.delete(item)
if (task.loaderTasks.size === 0) {
this.knowledgeItemProcessingQueueMappingPromise.delete(task)
resolve()
}
this.processingQueueHandle()
}),
resolve: () => {},
evaluateTaskWorkload
})
}
}
return queueTaskList
}
const subTasks = getSubtasksUntilMaximumLoad()
if (subTasks.length > 0) {
const subTaskPromises = subTasks.map(({ taskPromise }) => taskPromise())
Promise.all(subTaskPromises).then(() => {
subTasks.forEach(({ resolve }) => resolve())
})
}
}
private appendProcessingQueue(task: LoaderTask): Promise<LoaderReturn> {
return new Promise((resolve) => {
this.knowledgeItemProcessingQueueMappingPromise.set(loaderTaskIntoOfSet(task), () => {
resolve(task.loaderDoneReturn!)
})
})
}
public async create(_: Electron.IpcMainInvokeEvent, base: KnowledgeBaseParams): Promise<void> {
logger.info(`Creating knowledge base: ${JSON.stringify(base)}`)
const framework = knowledgeFrameworkFactory.getFramework(base)
await framework.initialize(base)
}
public async reset(_: Electron.IpcMainInvokeEvent, base: KnowledgeBaseParams): Promise<void> {
const framework = knowledgeFrameworkFactory.getFramework(base)
await framework.reset(base)
}
public async delete(_: Electron.IpcMainInvokeEvent, base: KnowledgeBaseParams, id: string): Promise<void> {
logger.info(`Deleting knowledge base: ${JSON.stringify(base)}`)
const framework = knowledgeFrameworkFactory.getFramework(base)
await framework.delete(id)
}
public add = async (_: Electron.IpcMainInvokeEvent, options: KnowledgeBaseAddItemOptions): Promise<LoaderReturn> => {
logger.info(`Adding item to knowledge base: ${JSON.stringify(options)}`)
return new Promise((resolve) => {
const { base, item, forceReload = false, userId = '' } = options
const framework = knowledgeFrameworkFactory.getFramework(base)
const task = framework.getLoaderTask({ base, item, forceReload, userId })
if (task) {
this.appendProcessingQueue(task).then(() => {
resolve(task.loaderDoneReturn!)
})
this.processingQueueHandle()
} else {
resolve({
...KnowledgeService.ERROR_LOADER_RETURN,
message: 'Unsupported item type',
messageSource: 'embedding'
})
}
})
}
public async remove(
_: Electron.IpcMainInvokeEvent,
{ uniqueIds, base }: { uniqueIds: string[]; base: KnowledgeBaseParams }
): Promise<void> {
logger.info(`Removing items from knowledge base: ${JSON.stringify({ uniqueIds, base })}`)
const framework = knowledgeFrameworkFactory.getFramework(base)
await framework.remove({ uniqueIds, base })
}
public async search(
_: Electron.IpcMainInvokeEvent,
{ search, base }: { search: string; base: KnowledgeBaseParams }
): Promise<KnowledgeSearchResult[]> {
logger.info(`Searching knowledge base: ${JSON.stringify({ search, base })}`)
const framework = knowledgeFrameworkFactory.getFramework(base)
return framework.search({ search, base })
}
@TraceMethod({ spanName: 'rerank', tag: 'Knowledge' })
public async rerank(
_: Electron.IpcMainInvokeEvent,
{ search, base, results }: { search: string; base: KnowledgeBaseParams; results: KnowledgeSearchResult[] }
): Promise<KnowledgeSearchResult[]> {
logger.info(`Reranking knowledge base: ${JSON.stringify({ search, base, results })}`)
if (results.length === 0) {
return results
}
return await new Reranker(base).rerank(search, results)
}
public getStorageDir = (): string => {
return this.storageDir
}
public async checkQuota(_: Electron.IpcMainInvokeEvent, base: KnowledgeBaseParams, userId: string): Promise<number> {
return preprocessingService.checkQuota(base, userId)
}
}
export default new KnowledgeService()

View File

@ -1,557 +0,0 @@
import * as fs from 'node:fs'
import path from 'node:path'
import { FaissStore } from '@langchain/community/vectorstores/faiss'
import type { Document } from '@langchain/core/documents'
import { loggerService } from '@logger'
import TextEmbeddings from '@main/knowledge/langchain/embeddings/TextEmbeddings'
import {
addFileLoader,
addNoteLoader,
addSitemapLoader,
addVideoLoader,
addWebLoader
} from '@main/knowledge/langchain/loader'
import { RetrieverFactory } from '@main/knowledge/langchain/retriever'
import { preprocessingService } from '@main/knowledge/preprocess/PreprocessingService'
import { getAllFiles } from '@main/utils/file'
import { getUrlSource } from '@main/utils/knowledge'
import { MB } from '@shared/config/constant'
import { LoaderReturn } from '@shared/config/types'
import { IpcChannel } from '@shared/IpcChannel'
import {
FileMetadata,
isKnowledgeDirectoryItem,
isKnowledgeFileItem,
isKnowledgeNoteItem,
isKnowledgeSitemapItem,
isKnowledgeUrlItem,
isKnowledgeVideoItem,
KnowledgeBaseParams,
KnowledgeSearchResult
} from '@types'
import { uuidv4 } from 'zod'
import { windowService } from '../WindowService'
import {
IKnowledgeFramework,
KnowledgeBaseAddItemOptionsNonNullableAttribute,
LoaderDoneReturn,
LoaderTask,
LoaderTaskItem,
LoaderTaskItemState
} from './IKnowledgeFramework'
const logger = loggerService.withContext('LangChainFramework')
export class LangChainFramework implements IKnowledgeFramework {
private storageDir: string
private static ERROR_LOADER_RETURN: LoaderReturn = {
entriesAdded: 0,
uniqueId: '',
uniqueIds: [''],
loaderType: '',
status: 'failed'
}
constructor(storageDir: string) {
this.storageDir = storageDir
this.initStorageDir()
}
private initStorageDir = (): void => {
if (!fs.existsSync(this.storageDir)) {
fs.mkdirSync(this.storageDir, { recursive: true })
}
}
private async createDatabase(base: KnowledgeBaseParams): Promise<void> {
const dbPath = path.join(this.storageDir, base.id)
const embeddings = this.getEmbeddings(base)
const vectorStore = new FaissStore(embeddings, {})
const mockDocument: Document = {
pageContent: 'Create Database Document',
metadata: {}
}
await vectorStore.addDocuments([mockDocument], { ids: ['1'] })
await vectorStore.save(dbPath)
await vectorStore.delete({ ids: ['1'] })
await vectorStore.save(dbPath)
}
private getEmbeddings(base: KnowledgeBaseParams): TextEmbeddings {
return new TextEmbeddings({
embedApiClient: base.embedApiClient,
dimensions: base.dimensions
})
}
private async getVectorStore(base: KnowledgeBaseParams): Promise<FaissStore> {
const embeddings = this.getEmbeddings(base)
const vectorStore = await FaissStore.load(path.join(this.storageDir, base.id), embeddings)
return vectorStore
}
async initialize(base: KnowledgeBaseParams): Promise<void> {
await this.createDatabase(base)
}
async reset(base: KnowledgeBaseParams): Promise<void> {
const dbPath = path.join(this.storageDir, base.id)
if (fs.existsSync(dbPath)) {
fs.rmSync(dbPath, { recursive: true })
}
// 立即重建空索引,避免随后加载时报错
await this.createDatabase(base)
}
async delete(id: string): Promise<void> {
const dbPath = path.join(this.storageDir, id)
if (fs.existsSync(dbPath)) {
fs.rmSync(dbPath, { recursive: true })
}
}
getLoaderTask(options: KnowledgeBaseAddItemOptionsNonNullableAttribute): LoaderTask {
const { item } = options
const getStore = () => this.getVectorStore(options.base)
switch (item.type) {
case 'file':
return this.fileTask(getStore, options)
case 'directory':
return this.directoryTask(getStore, options)
case 'url':
return this.urlTask(getStore, options)
case 'sitemap':
return this.sitemapTask(getStore, options)
case 'note':
return this.noteTask(getStore, options)
case 'video':
return this.videoTask(getStore, options)
default:
return {
loaderTasks: [],
loaderDoneReturn: null
}
}
}
async remove(options: { uniqueIds: string[]; base: KnowledgeBaseParams }): Promise<void> {
const { uniqueIds, base } = options
const vectorStore = await this.getVectorStore(base)
logger.info(`[ KnowledgeService Remove Item UniqueIds: ${uniqueIds}]`)
await vectorStore.delete({ ids: uniqueIds })
await vectorStore.save(path.join(this.storageDir, base.id))
}
async search(options: { search: string; base: KnowledgeBaseParams }): Promise<KnowledgeSearchResult[]> {
const { search, base } = options
logger.info(`search base: ${JSON.stringify(base)}`)
try {
const vectorStore = await this.getVectorStore(base)
// 如果是 bm25 或 hybrid 模式,则从数据库获取所有文档
const documents: Document[] = await this.getAllDocuments(base)
if (documents.length === 0) return []
const retrieverFactory = new RetrieverFactory()
const retriever = retrieverFactory.createRetriever(base, vectorStore, documents)
const results = await retriever.invoke(search)
logger.info(`Search Results: ${JSON.stringify(results)}`)
// VectorStoreRetriever 和 EnsembleRetriever 会将分数附加到 metadata.score
// BM25Retriever 默认不返回分数,所以我们需要处理这种情况
return results.map((item) => {
return {
pageContent: item.pageContent,
metadata: item.metadata,
// 如果 metadata 中没有 score提供一个默认值
score: typeof item.metadata.score === 'number' ? item.metadata.score : 0
}
})
} catch (error: any) {
logger.error(`Error during search in knowledge base ${base.id}: ${error.message}`)
return []
}
}
private fileTask(
getVectorStore: () => Promise<FaissStore>,
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
): LoaderTask {
const { base, item, userId } = options
if (!isKnowledgeFileItem(item)) {
logger.error(`Invalid item type for fileTask: expected 'file', got '${item.type}'`)
return {
loaderTasks: [],
loaderDoneReturn: {
...LangChainFramework.ERROR_LOADER_RETURN,
message: `Invalid item type: expected 'file', got '${item.type}'`,
messageSource: 'validation'
}
}
}
const file = item.content
const loaderTask: LoaderTask = {
loaderTasks: [
{
state: LoaderTaskItemState.PENDING,
task: async () => {
try {
const vectorStore = await getVectorStore()
// 添加预处理逻辑
const fileToProcess: FileMetadata = await preprocessingService.preprocessFile(file, base, item, userId)
// 使用处理后的文件进行加载
return addFileLoader(base, vectorStore, fileToProcess)
.then((result) => {
loaderTask.loaderDoneReturn = result
return result
})
.then(async () => {
await vectorStore.save(path.join(this.storageDir, base.id))
})
.catch((e) => {
logger.error(`Error in addFileLoader for ${file.name}: ${e}`)
const errorResult: LoaderReturn = {
...LangChainFramework.ERROR_LOADER_RETURN,
message: e.message,
messageSource: 'embedding'
}
loaderTask.loaderDoneReturn = errorResult
return errorResult
})
} catch (e: any) {
logger.error(`Preprocessing failed for ${file.name}: ${e}`)
const errorResult: LoaderReturn = {
...LangChainFramework.ERROR_LOADER_RETURN,
message: e.message,
messageSource: 'preprocess'
}
loaderTask.loaderDoneReturn = errorResult
return errorResult
}
},
evaluateTaskWorkload: { workload: file.size }
}
],
loaderDoneReturn: null
}
return loaderTask
}
private directoryTask(
getVectorStore: () => Promise<FaissStore>,
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
): LoaderTask {
const { base, item } = options
if (!isKnowledgeDirectoryItem(item)) {
logger.error(`Invalid item type for directoryTask: expected 'directory', got '${item.type}'`)
return {
loaderTasks: [],
loaderDoneReturn: {
...LangChainFramework.ERROR_LOADER_RETURN,
message: `Invalid item type: expected 'directory', got '${item.type}'`,
messageSource: 'validation'
}
}
}
const directory = item.content
const files = getAllFiles(directory)
const totalFiles = files.length
let processedFiles = 0
const sendDirectoryProcessingPercent = (totalFiles: number, processedFiles: number) => {
const mainWindow = windowService.getMainWindow()
mainWindow?.webContents.send(IpcChannel.DirectoryProcessingPercent, {
itemId: item.id,
percent: (processedFiles / totalFiles) * 100
})
}
const loaderDoneReturn: LoaderDoneReturn = {
entriesAdded: 0,
uniqueId: `DirectoryLoader_${uuidv4()}`,
uniqueIds: [],
loaderType: 'DirectoryLoader'
}
const loaderTasks: LoaderTaskItem[] = []
for (const file of files) {
loaderTasks.push({
state: LoaderTaskItemState.PENDING,
task: async () => {
const vectorStore = await getVectorStore()
return addFileLoader(base, vectorStore, file)
.then((result) => {
loaderDoneReturn.entriesAdded += 1
processedFiles += 1
sendDirectoryProcessingPercent(totalFiles, processedFiles)
loaderDoneReturn.uniqueIds.push(result.uniqueId)
return result
})
.then(async () => {
await vectorStore.save(path.join(this.storageDir, base.id))
})
.catch((err) => {
logger.error(err)
return {
...LangChainFramework.ERROR_LOADER_RETURN,
message: `Failed to add dir loader: ${err.message}`,
messageSource: 'embedding'
}
})
},
evaluateTaskWorkload: { workload: file.size }
})
}
return {
loaderTasks,
loaderDoneReturn
}
}
private urlTask(
getVectorStore: () => Promise<FaissStore>,
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
): LoaderTask {
const { base, item } = options
if (!isKnowledgeUrlItem(item)) {
logger.error(`Invalid item type for urlTask: expected 'url', got '${item.type}'`)
return {
loaderTasks: [],
loaderDoneReturn: {
...LangChainFramework.ERROR_LOADER_RETURN,
message: `Invalid item type: expected 'url', got '${item.type}'`,
messageSource: 'validation'
}
}
}
const url = item.content
const loaderTask: LoaderTask = {
loaderTasks: [
{
state: LoaderTaskItemState.PENDING,
task: async () => {
// 使用处理后的网页进行加载
const vectorStore = await getVectorStore()
return addWebLoader(base, vectorStore, url, getUrlSource(url))
.then((result) => {
loaderTask.loaderDoneReturn = result
return result
})
.then(async () => {
await vectorStore.save(path.join(this.storageDir, base.id))
})
.catch((e) => {
logger.error(`Error in addWebLoader for ${url}: ${e}`)
const errorResult: LoaderReturn = {
...LangChainFramework.ERROR_LOADER_RETURN,
message: e.message,
messageSource: 'embedding'
}
loaderTask.loaderDoneReturn = errorResult
return errorResult
})
},
evaluateTaskWorkload: { workload: 2 * MB }
}
],
loaderDoneReturn: null
}
return loaderTask
}
private sitemapTask(
getVectorStore: () => Promise<FaissStore>,
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
): LoaderTask {
const { base, item } = options
if (!isKnowledgeSitemapItem(item)) {
logger.error(`Invalid item type for sitemapTask: expected 'sitemap', got '${item.type}'`)
return {
loaderTasks: [],
loaderDoneReturn: {
...LangChainFramework.ERROR_LOADER_RETURN,
message: `Invalid item type: expected 'sitemap', got '${item.type}'`,
messageSource: 'validation'
}
}
}
const url = item.content
const loaderTask: LoaderTask = {
loaderTasks: [
{
state: LoaderTaskItemState.PENDING,
task: async () => {
// 使用处理后的网页进行加载
const vectorStore = await getVectorStore()
return addSitemapLoader(base, vectorStore, url)
.then((result) => {
loaderTask.loaderDoneReturn = result
return result
})
.then(async () => {
await vectorStore.save(path.join(this.storageDir, base.id))
})
.catch((e) => {
logger.error(`Error in addWebLoader for ${url}: ${e}`)
const errorResult: LoaderReturn = {
...LangChainFramework.ERROR_LOADER_RETURN,
message: e.message,
messageSource: 'embedding'
}
loaderTask.loaderDoneReturn = errorResult
return errorResult
})
},
evaluateTaskWorkload: { workload: 2 * MB }
}
],
loaderDoneReturn: null
}
return loaderTask
}
private noteTask(
getVectorStore: () => Promise<FaissStore>,
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
): LoaderTask {
const { base, item } = options
if (!isKnowledgeNoteItem(item)) {
logger.error(`Invalid item type for noteTask: expected 'note', got '${item.type}'`)
return {
loaderTasks: [],
loaderDoneReturn: {
...LangChainFramework.ERROR_LOADER_RETURN,
message: `Invalid item type: expected 'note', got '${item.type}'`,
messageSource: 'validation'
}
}
}
const content = item.content
const sourceUrl = item.sourceUrl ?? ''
logger.info(`noteTask ${content}, ${sourceUrl}`)
const encoder = new TextEncoder()
const contentBytes = encoder.encode(content)
const loaderTask: LoaderTask = {
loaderTasks: [
{
state: LoaderTaskItemState.PENDING,
task: async () => {
// 使用处理后的笔记进行加载
const vectorStore = await getVectorStore()
return addNoteLoader(base, vectorStore, content, sourceUrl)
.then((result) => {
loaderTask.loaderDoneReturn = result
return result
})
.then(async () => {
await vectorStore.save(path.join(this.storageDir, base.id))
})
.catch((e) => {
logger.error(`Error in addNoteLoader for ${sourceUrl}: ${e}`)
const errorResult: LoaderReturn = {
...LangChainFramework.ERROR_LOADER_RETURN,
message: e.message,
messageSource: 'embedding'
}
loaderTask.loaderDoneReturn = errorResult
return errorResult
})
},
evaluateTaskWorkload: { workload: contentBytes.length }
}
],
loaderDoneReturn: null
}
return loaderTask
}
private videoTask(
getVectorStore: () => Promise<FaissStore>,
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
): LoaderTask {
const { base, item } = options
if (!isKnowledgeVideoItem(item)) {
logger.error(`Invalid item type for videoTask: expected 'video', got '${item.type}'`)
return {
loaderTasks: [],
loaderDoneReturn: {
...LangChainFramework.ERROR_LOADER_RETURN,
message: `Invalid item type: expected 'video', got '${item.type}'`,
messageSource: 'validation'
}
}
}
const files = item.content
const loaderTask: LoaderTask = {
loaderTasks: [
{
state: LoaderTaskItemState.PENDING,
task: async () => {
const vectorStore = await getVectorStore()
return addVideoLoader(base, vectorStore, files)
.then((result) => {
loaderTask.loaderDoneReturn = result
return result
})
.then(async () => {
await vectorStore.save(path.join(this.storageDir, base.id))
})
.catch((e) => {
logger.error(`Preprocessing failed for ${files[0].name}: ${e}`)
const errorResult: LoaderReturn = {
...LangChainFramework.ERROR_LOADER_RETURN,
message: e.message,
messageSource: 'preprocess'
}
loaderTask.loaderDoneReturn = errorResult
return errorResult
})
},
evaluateTaskWorkload: { workload: files[0].size }
}
],
loaderDoneReturn: null
}
return loaderTask
}
private async getAllDocuments(base: KnowledgeBaseParams): Promise<Document[]> {
logger.info(`Fetching all documents from database for knowledge base: ${base.id}`)
try {
const results = (await this.getVectorStore(base)).docstore._docs
const documents: Document[] = Array.from(results.values())
logger.info(`Fetched ${documents.length} documents for BM25/Hybrid retriever.`)
return documents
} catch (e) {
logger.error(`Could not fetch documents from database for base ${base.id}: ${e}`)
// 如果表不存在或查询失败,返回空数组
return []
}
}
}

View File

@ -1,13 +1,7 @@
import { isLinux, isWin } from '@main/constant'
import { loadOcrImage } from '@main/utils/ocr'
import { OcrAccuracy, recognize } from '@napi-rs/system-ocr'
import {
ImageFileMetadata,
isImageFileMetadata as isImageFileMetadata,
OcrResult,
OcrSystemConfig,
SupportedOcrFile
} from '@types'
import { ImageFileMetadata, isImageFileMetadata, OcrResult, OcrSystemConfig, SupportedOcrFile } from '@types'
import { OcrBaseService } from './OcrBaseService'

View File

@ -9,7 +9,7 @@ export class FileServiceManager {
private static instance: FileServiceManager
private services: Map<string, BaseFileService> = new Map()
// eslint-disable-next-line @typescript-eslint/no-empty-function
// oxlint-disable-next-line @typescript-eslint/no-empty-function
private constructor() {}
static getInstance(): FileServiceManager {

View File

@ -1,3 +1,4 @@
import { createHash } from 'node:crypto'
import * as fs from 'node:fs'
import { readFile } from 'node:fs/promises'
import os from 'node:os'
@ -264,11 +265,12 @@ export async function scanDir(dirPath: string, depth = 0, basePath?: string): Pr
if (entry.isDirectory() && options.includeDirectories) {
const stats = await fs.promises.stat(entryPath)
const externalDirPath = entryPath.replace(/\\/g, '/')
const dirTreeNode: NotesTreeNode = {
id: uuidv4(),
id: createHash('sha1').update(externalDirPath).digest('hex'),
name: entry.name,
treePath: treePath,
externalPath: entryPath,
externalPath: externalDirPath,
createdAt: stats.birthtime.toISOString(),
updatedAt: stats.mtime.toISOString(),
type: 'folder',
@ -299,11 +301,12 @@ export async function scanDir(dirPath: string, depth = 0, basePath?: string): Pr
? `/${dirRelativePath.replace(/\\/g, '/')}/${nameWithoutExt}`
: `/${nameWithoutExt}`
const externalFilePath = entryPath.replace(/\\/g, '/')
const fileTreeNode: NotesTreeNode = {
id: uuidv4(),
id: createHash('sha1').update(externalFilePath).digest('hex'),
name: name,
treePath: fileTreePath,
externalPath: entryPath,
externalPath: externalFilePath,
createdAt: stats.birthtime.toISOString(),
updatedAt: stats.mtime.toISOString(),
type: 'file'
@ -420,7 +423,7 @@ export function sanitizeFilename(fileName: string, replacement = '_'): string {
// 移除或替换非法字符
let sanitized = fileName
// eslint-disable-next-line no-control-regex
// oxlint-disable-next-line no-control-regex
.replace(/[<>:"/\\|?*\x00-\x1f]/g, replacement) // Windows 非法字符
.replace(/^(CON|PRN|AUX|NUL|COM[1-9]|LPT[1-9])(\.|$)/i, replacement + '$2') // Windows 保留名
.replace(/[\s.]+$/, '') // 移除末尾的空格和点

View File

@ -36,13 +36,14 @@ export function debounce(func: (...args: any[]) => void, wait: number, immediate
}
}
export function dumpPersistState() {
const persistState = JSON.parse(localStorage.getItem('persist:cherry-studio') || '{}')
for (const key in persistState) {
persistState[key] = JSON.parse(persistState[key])
}
return JSON.stringify(persistState)
}
// NOTE: It's an unused function. localStorage should not be accessed in main process.
// export function dumpPersistState() {
// const persistState = JSON.parse(localStorage.getItem('persist:cherry-studio') || '{}')
// for (const key in persistState) {
// persistState[key] = JSON.parse(persistState[key])
// }
// return JSON.stringify(persistState)
// }
export const runAsyncFunction = async (fn: () => void) => {
await fn()

View File

@ -1,10 +1,11 @@
import { electronAPI } from '@electron-toolkit/preload'
import { SpanEntity, TokenUsage } from '@mcp-trace/trace-core'
import { SpanContext } from '@opentelemetry/api'
import { UpgradeChannel } from '@shared/config/constant'
import { TerminalConfig, UpgradeChannel } from '@shared/config/constant'
import type { LogLevel, LogSourceWithContext } from '@shared/config/logger'
import type { FileChangeEvent } from '@shared/config/types'
import { IpcChannel } from '@shared/IpcChannel'
import type { Notification } from '@types'
import {
AddMemoryOptions,
AssistantMessage,
@ -28,7 +29,6 @@ import {
WebDavConfig
} from '@types'
import { contextBridge, ipcRenderer, OpenDialogOptions, shell, webUtils } from 'electron'
import { Notification } from 'src/renderer/src/types/notification'
import { CreateDirectoryOptions } from 'webdav'
import type { ActionItem } from '../renderer/src/types/selectionTypes'
@ -47,6 +47,7 @@ const api = {
getDiskInfo: (directoryPath: string): Promise<{ free: number; size: number } | null> =>
ipcRenderer.invoke(IpcChannel.App_GetDiskInfo, directoryPath),
reload: () => ipcRenderer.invoke(IpcChannel.App_Reload),
quit: () => ipcRenderer.invoke(IpcChannel.App_Quit),
setProxy: (proxy: string | undefined, bypassRules?: string) =>
ipcRenderer.invoke(IpcChannel.App_Proxy, proxy, bypassRules),
checkForUpdate: () => ipcRenderer.invoke(IpcChannel.App_CheckForUpdate),
@ -84,6 +85,7 @@ const api = {
ipcRenderer.invoke(IpcChannel.App_LogToMain, source, level, message, data),
setFullScreen: (value: boolean): Promise<void> => ipcRenderer.invoke(IpcChannel.App_SetFullScreen, value),
isFullScreen: (): Promise<boolean> => ipcRenderer.invoke(IpcChannel.App_IsFullScreen),
getSystemFonts: (): Promise<string[]> => ipcRenderer.invoke(IpcChannel.App_GetSystemFonts),
mac: {
isProcessTrusted: (): Promise<boolean> => ipcRenderer.invoke(IpcChannel.App_MacIsProcessTrusted),
requestProcessTrust: (): Promise<boolean> => ipcRenderer.invoke(IpcChannel.App_MacRequestProcessTrust)
@ -439,16 +441,24 @@ const api = {
model: string,
directory: string,
env: Record<string, string>,
options?: { autoUpdateToLatest?: boolean }
) => ipcRenderer.invoke(IpcChannel.CodeTools_Run, cliTool, model, directory, env, options)
options?: { autoUpdateToLatest?: boolean; terminal?: string }
) => ipcRenderer.invoke(IpcChannel.CodeTools_Run, cliTool, model, directory, env, options),
getAvailableTerminals: (): Promise<TerminalConfig[]> =>
ipcRenderer.invoke(IpcChannel.CodeTools_GetAvailableTerminals),
setCustomTerminalPath: (terminalId: string, path: string): Promise<void> =>
ipcRenderer.invoke(IpcChannel.CodeTools_SetCustomTerminalPath, terminalId, path),
getCustomTerminalPath: (terminalId: string): Promise<string | undefined> =>
ipcRenderer.invoke(IpcChannel.CodeTools_GetCustomTerminalPath, terminalId),
removeCustomTerminalPath: (terminalId: string): Promise<void> =>
ipcRenderer.invoke(IpcChannel.CodeTools_RemoveCustomTerminalPath, terminalId)
},
ocr: {
ocr: (file: SupportedOcrFile, provider: OcrProvider): Promise<OcrResult> =>
ipcRenderer.invoke(IpcChannel.OCR_ocr, file, provider)
},
cherryin: {
cherryai: {
generateSignature: (params: { method: string; path: string; query: string; body: Record<string, any> }) =>
ipcRenderer.invoke(IpcChannel.Cherryin_GetSignature, params)
ipcRenderer.invoke(IpcChannel.Cherryai_GetSignature, params)
},
windowControls: {
minimize: (): Promise<void> => ipcRenderer.invoke(IpcChannel.Windows_Minimize),
@ -475,13 +485,10 @@ if (process.contextIsolated) {
contextBridge.exposeInMainWorld('electron', electronAPI)
contextBridge.exposeInMainWorld('api', api)
} catch (error) {
// eslint-disable-next-line no-restricted-syntax
console.error('[Preload]Failed to expose APIs:', error as Error)
}
} else {
// @ts-ignore (define in dts)
window.electron = electronAPI
// @ts-ignore (define in dts)
window.api = api
}

View File

@ -1,6 +1,5 @@
import '@renderer/databases'
import { HeroUIProvider } from '@heroui/react'
import { loggerService } from '@logger'
import store, { persistor } from '@renderer/store'
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
@ -11,6 +10,7 @@ import { ToastPortal } from './components/ToastPortal'
import TopViewContainer from './components/TopView'
import AntdProvider from './context/AntdProvider'
import { CodeStyleProvider } from './context/CodeStyleProvider'
import { HeroUIProvider } from './context/HeroUIProvider'
import { NotificationProvider } from './context/NotificationProvider'
import StyleSheetManager from './context/StyleSheetManager'
import { ThemeProvider } from './context/ThemeProvider'
@ -34,7 +34,7 @@ function App(): React.ReactElement {
return (
<Provider store={store}>
<QueryClientProvider client={queryClient}>
<HeroUIProvider className="flex h-full w-full flex-1">
<HeroUIProvider>
<StyleSheetManager>
<ThemeProvider>
<AntdProvider>

View File

@ -4,24 +4,15 @@
*/
import { loggerService } from '@logger'
import { MCPTool, WebSearchResults, WebSearchSource } from '@renderer/types'
import { AISDKWebSearchResult, MCPTool, WebSearchResults, WebSearchSource } from '@renderer/types'
import { Chunk, ChunkType } from '@renderer/types/chunk'
import { convertLinks, flushLinkConverterBuffer } from '@renderer/utils/linkConverter'
import type { TextStreamPart, ToolSet } from 'ai'
import { ToolCallChunkHandler } from './handleToolCallChunk'
const logger = loggerService.withContext('AiSdkToChunkAdapter')
export interface CherryStudioChunk {
type: 'text-delta' | 'text-complete' | 'tool-call' | 'tool-result' | 'finish' | 'error'
text?: string
toolCall?: any
toolResult?: any
finishReason?: string
usage?: any
error?: any
}
/**
* AI SDK Cherry Studio Chunk
* fullStream Cherry Studio chunk
@ -29,13 +20,18 @@ export interface CherryStudioChunk {
export class AiSdkToChunkAdapter {
toolCallHandler: ToolCallChunkHandler
private accumulate: boolean | undefined
private isFirstChunk = true
private enableWebSearch: boolean = false
constructor(
private onChunk: (chunk: Chunk) => void,
mcpTools: MCPTool[] = [],
accumulate?: boolean
accumulate?: boolean,
enableWebSearch?: boolean
) {
this.toolCallHandler = new ToolCallChunkHandler(onChunk, mcpTools)
this.accumulate = accumulate
this.enableWebSearch = enableWebSearch || false
}
/**
@ -65,11 +61,24 @@ export class AiSdkToChunkAdapter {
webSearchResults: [],
reasoningId: ''
}
// Reset link converter state at the start of stream
this.isFirstChunk = true
try {
while (true) {
const { done, value } = await reader.read()
if (done) {
// Flush any remaining content from link converter buffer if web search is enabled
if (this.enableWebSearch) {
const remainingText = flushLinkConverterBuffer()
if (remainingText) {
this.onChunk({
type: ChunkType.TEXT_DELTA,
text: remainingText
})
}
}
break
}
@ -87,7 +96,7 @@ export class AiSdkToChunkAdapter {
*/
private convertAndEmitChunk(
chunk: TextStreamPart<any>,
final: { text: string; reasoningContent: string; webSearchResults: any[]; reasoningId: string }
final: { text: string; reasoningContent: string; webSearchResults: AISDKWebSearchResult[]; reasoningId: string }
) {
logger.silly(`AI SDK chunk type: ${chunk.type}`, chunk)
switch (chunk.type) {
@ -97,17 +106,44 @@ export class AiSdkToChunkAdapter {
type: ChunkType.TEXT_START
})
break
case 'text-delta':
if (this.accumulate) {
final.text += chunk.text || ''
case 'text-delta': {
const processedText = chunk.text || ''
let finalText: string
// Only apply link conversion if web search is enabled
if (this.enableWebSearch) {
const result = convertLinks(processedText, this.isFirstChunk)
if (this.isFirstChunk) {
this.isFirstChunk = false
}
// Handle buffered content
if (result.hasBufferedContent) {
finalText = result.text
} else {
finalText = result.text || processedText
}
} else {
final.text = chunk.text || ''
// Without web search, just use the original text
finalText = processedText
}
if (this.accumulate) {
final.text += finalText
} else {
final.text = finalText
}
// Only emit chunk if there's text to send
if (finalText) {
this.onChunk({
type: ChunkType.TEXT_DELTA,
text: this.accumulate ? final.text : finalText
})
}
this.onChunk({
type: ChunkType.TEXT_DELTA,
text: final.text || ''
})
break
}
case 'text-end':
this.onChunk({
type: ChunkType.TEXT_COMPLETE,
@ -127,15 +163,13 @@ export class AiSdkToChunkAdapter {
final.reasoningContent += chunk.text || ''
this.onChunk({
type: ChunkType.THINKING_DELTA,
text: final.reasoningContent || '',
thinking_millsec: (chunk.providerMetadata?.metadata?.thinking_millsec as number) || 0
text: final.reasoningContent || ''
})
break
case 'reasoning-end':
this.onChunk({
type: ChunkType.THINKING_COMPLETE,
text: (chunk.providerMetadata?.metadata?.thinking_content as string) || '',
thinking_millsec: (chunk.providerMetadata?.metadata?.thinking_millsec as number) || 0
text: final.reasoningContent || ''
})
final.reasoningContent = ''
break
@ -200,7 +234,7 @@ export class AiSdkToChunkAdapter {
[WebSearchSource.ANTHROPIC]: WebSearchSource.ANTHROPIC,
[WebSearchSource.OPENROUTER]: WebSearchSource.OPENROUTER,
[WebSearchSource.GEMINI]: WebSearchSource.GEMINI,
[WebSearchSource.PERPLEXITY]: WebSearchSource.PERPLEXITY,
// [WebSearchSource.PERPLEXITY]: WebSearchSource.PERPLEXITY,
[WebSearchSource.QWEN]: WebSearchSource.QWEN,
[WebSearchSource.HUNYUAN]: WebSearchSource.HUNYUAN,
[WebSearchSource.ZHIPU]: WebSearchSource.ZHIPU,
@ -268,18 +302,9 @@ export class AiSdkToChunkAdapter {
// === 源和文件相关事件 ===
case 'source':
if (chunk.sourceType === 'url') {
// if (final.webSearchResults.length === 0) {
// eslint-disable-next-line @typescript-eslint/no-unused-vars
// oxlint-disable-next-line @typescript-eslint/no-unused-vars
const { sourceType: _, ...rest } = chunk
final.webSearchResults.push(rest)
// }
// this.onChunk({
// type: ChunkType.LLM_WEB_SEARCH_COMPLETE,
// llm_web_search: {
// source: WebSearchSource.AISDK,
// results: final.webSearchResults
// }
// })
}
break
case 'file':

View File

@ -298,8 +298,29 @@ export class ToolCallChunkHandler {
type: ChunkType.MCP_TOOL_COMPLETE,
responses: [toolResponse]
})
const images: string[] = []
for (const content of toolResponse.response?.content || []) {
if (content.type === 'image' && content.data) {
images.push(`data:${content.mimeType};base64,${content.data}`)
}
}
if (images.length) {
this.onChunk({
type: ChunkType.IMAGE_CREATED
})
this.onChunk({
type: ChunkType.IMAGE_COMPLETE,
image: {
type: 'base64',
images: images
}
})
}
}
}
handleToolError(
chunk: {
type: 'tool-error'

View File

@ -284,7 +284,7 @@ export default class ModernAiProvider {
// 创建带有中间件的执行器
if (config.onChunk) {
const accumulate = this.model!.supported_text_delta !== false // true and undefined
const adapter = new AiSdkToChunkAdapter(config.onChunk, config.mcpTools, accumulate)
const adapter = new AiSdkToChunkAdapter(config.onChunk, config.mcpTools, accumulate, config.enableWebSearch)
const streamResult = await executor.streamText({
...params,

View File

@ -1,11 +1,12 @@
import { loggerService } from '@logger'
import { isNewApiProvider } from '@renderer/config/providers'
import { Provider } from '@renderer/types'
import { AihubmixAPIClient } from './aihubmix/AihubmixAPIClient'
import { AnthropicAPIClient } from './anthropic/AnthropicAPIClient'
import { AwsBedrockAPIClient } from './aws/AwsBedrockAPIClient'
import { BaseApiClient } from './BaseApiClient'
import { CherryinAPIClient } from './cherryin/CherryinAPIClient'
import { CherryAiAPIClient } from './cherryai/CherryAiAPIClient'
import { GeminiAPIClient } from './gemini/GeminiAPIClient'
import { VertexAPIClient } from './gemini/VertexAPIClient'
import { NewAPIClient } from './newapi/NewAPIClient'
@ -34,8 +35,8 @@ export class ApiClientFactory {
let instance: BaseApiClient
// 首先检查特殊的 Provider ID
if (provider.id === 'cherryin') {
instance = new CherryinAPIClient(provider) as BaseApiClient
if (provider.id === 'cherryai') {
instance = new CherryAiAPIClient(provider) as BaseApiClient
return instance
}
@ -45,7 +46,7 @@ export class ApiClientFactory {
return instance
}
if (provider.id === 'new-api') {
if (isNewApiProvider(provider)) {
logger.debug(`Creating NewAPIClient for provider: ${provider.id}`)
instance = new NewAPIClient(provider) as BaseApiClient
return instance

View File

@ -84,7 +84,7 @@ export abstract class BaseApiClient<
* instanceof检查的类型收窄问题
* AihubmixAPIClient使
*/
// eslint-disable-next-line @typescript-eslint/no-unused-vars
// oxlint-disable-next-line @typescript-eslint/no-unused-vars
public getClientCompatibilityType(_model?: Model): string[] {
// 默认返回类的名称
return [this.constructor.name]

View File

@ -67,7 +67,9 @@ vi.mock('@renderer/config/models', () => ({
silicon: [],
defaultModel: []
},
isOpenAIModel: vi.fn(() => false)
isOpenAIModel: vi.fn(() => false),
glm45FlashModel: {},
qwen38bModel: {}
}))
describe('ApiClientFactory', () => {

View File

@ -35,12 +35,8 @@ vi.mock('@renderer/config/models', () => ({
findTokenLimit: vi.fn().mockReturnValue(4096),
isFunctionCallingModel: vi.fn().mockReturnValue(false),
DEFAULT_MAX_TOKENS: 4096,
glm45FlashModel: {
id: 'glm-4.5-flash',
name: 'GLM-4.5-Flash',
provider: 'cherryin',
group: 'GLM-4.5'
}
qwen38bModel: {},
glm45FlashModel: {}
}))
vi.mock('@renderer/services/AssistantService', () => ({

View File

@ -177,7 +177,7 @@ export class AnthropicAPIClient extends BaseApiClient<
}
// @ts-ignore sdk未提供
// eslint-disable-next-line @typescript-eslint/no-unused-vars
// oxlint-disable-next-line @typescript-eslint/no-unused-vars
override async generateImage(generateImageParams: GenerateImageParams): Promise<string[]> {
return []
}

View File

@ -455,7 +455,7 @@ export class AwsBedrockAPIClient extends BaseApiClient<
}
// @ts-ignore sdk未提供
// eslint-disable-next-line @typescript-eslint/no-unused-vars
// oxlint-disable-next-line @typescript-eslint/no-unused-vars
override async generateImage(_generateImageParams: GenerateImageParams): Promise<string[]> {
return []
}

View File

@ -4,7 +4,7 @@ import OpenAI from 'openai'
import { OpenAIAPIClient } from '../openai/OpenAIApiClient'
export class CherryinAPIClient extends OpenAIAPIClient {
export class CherryAiAPIClient extends OpenAIAPIClient {
constructor(provider: Provider) {
super(provider)
}
@ -17,7 +17,7 @@ export class CherryinAPIClient extends OpenAIAPIClient {
options = options || {}
options.headers = options.headers || {}
const signature = await window.api.cherryin.generateSignature({
const signature = await window.api.cherryai.generateSignature({
method: 'POST',
path: '/chat/completions',
query: '',
@ -34,7 +34,7 @@ export class CherryinAPIClient extends OpenAIAPIClient {
}
override getClientCompatibilityType(): string[] {
return ['CherryinAPIClient']
return ['CherryAiAPIClient']
}
public async listModels(): Promise<OpenAI.Models.Model[]> {
@ -43,7 +43,7 @@ export class CherryinAPIClient extends OpenAIAPIClient {
const created = Date.now()
return models.map((id) => ({
id,
owned_by: 'cherryin',
owned_by: 'cherryai',
object: 'model' as const,
created
}))

View File

@ -11,7 +11,7 @@ export class PPIOAPIClient extends OpenAIAPIClient {
super(provider)
}
// eslint-disable-next-line @typescript-eslint/no-unused-vars
// oxlint-disable-next-line @typescript-eslint/no-unused-vars
override getClientCompatibilityType(_model?: Model): string[] {
return ['OpenAIAPIClient']
}

View File

@ -1,6 +1,6 @@
import { loggerService } from '@logger'
import { isZhipuModel } from '@renderer/config/models'
import store from '@renderer/store'
import { getStoreProviders } from '@renderer/hooks/useStore'
import { Chunk } from '@renderer/types/chunk'
import { CompletionsParams, CompletionsResult } from '../schemas'
@ -87,7 +87,7 @@ function handleError(error: any, params: CompletionsParams): any {
* 2. enableGenerateImage为true使
*/
function handleZhipuError(error: any): any {
const provider = store.getState().llm.providers.find((p) => p.id === 'zhipu')
const provider = getStoreProviders().find((p) => p.id === 'zhipu')
const logger = loggerService.withContext('handleZhipuError')
// 定义错误模式映射

View File

@ -44,7 +44,7 @@ const stringifyArgsForLogging = (args: any[]): string => {
*/
export const createGenericLoggingMiddleware: () => MethodMiddleware = () => {
const middlewareName = 'GenericLoggingMiddleware'
// eslint-disable-next-line @typescript-eslint/no-unused-vars
// oxlint-disable-next-line @typescript-eslint/no-unused-vars
return (_: MiddlewareAPI<BaseContext, any[]>) => (next) => async (ctx, args) => {
const methodName = ctx.methodName
const logPrefix = `[${middlewareName} (${methodName})]`

View File

@ -1,6 +1,6 @@
import { loggerService } from '@logger'
import { ChunkType } from '@renderer/types/chunk'
import { flushLinkConverterBuffer, smartLinkConverter } from '@renderer/utils/linkConverter'
import { convertLinks, flushLinkConverterBuffer } from '@renderer/utils/linkConverter'
import { CompletionsParams, CompletionsResult, GenericChunk } from '../schemas'
import { CompletionsContext, CompletionsMiddleware } from '../types'
@ -28,8 +28,6 @@ export const WebSearchMiddleware: CompletionsMiddleware =
}
// 调用下游中间件
const result = await next(ctx, params)
const model = params.assistant?.model!
let isFirstChunk = true
// 响应后处理记录Web搜索事件
@ -42,15 +40,9 @@ export const WebSearchMiddleware: CompletionsMiddleware =
new TransformStream<GenericChunk, GenericChunk>({
transform(chunk: GenericChunk, controller) {
if (chunk.type === ChunkType.TEXT_DELTA) {
const providerType = model.provider || 'openai'
// 使用当前可用的Web搜索结果进行链接转换
const text = chunk.text
const result = smartLinkConverter(
text,
providerType,
isFirstChunk,
ctx._internal.webSearchState!.results
)
const result = convertLinks(text, isFirstChunk)
if (isFirstChunk) {
isFirstChunk = false
}

View File

@ -1,3 +1,4 @@
import { WebSearchPluginConfig } from '@cherrystudio/ai-core/built-in/plugins'
import { loggerService } from '@logger'
import type { MCPTool, Message, Model, Provider } from '@renderer/types'
import type { Chunk } from '@renderer/types/chunk'
@ -20,11 +21,14 @@ export interface AiSdkMiddlewareConfig {
isSupportedToolUse: boolean
// image generation endpoint
isImageGenerationEndpoint: boolean
// 是否开启内置搜索
enableWebSearch: boolean
enableGenerateImage: boolean
enableUrlContext: boolean
mcpTools?: MCPTool[]
uiMessages?: Message[]
// 内置搜索配置
webSearchPluginConfig?: WebSearchPluginConfig
}
/**
@ -136,7 +140,19 @@ export function buildAiSdkMiddlewares(config: AiSdkMiddlewareConfig): LanguageMo
return builder.build()
}
const tagNameArray = ['think', 'thought']
const tagName = {
reasoning: 'reasoning',
think: 'think',
thought: 'thought',
seedThink: 'seed:think'
}
function getReasoningTagName(modelId: string | undefined): string {
if (modelId?.includes('gpt-oss')) return tagName.reasoning
if (modelId?.includes('gemini')) return tagName.thought
if (modelId?.includes('seed-oss-36b')) return tagName.seedThink
return tagName.think
}
/**
* provider特定的中间件
@ -152,7 +168,7 @@ function addProviderSpecificMiddlewares(builder: AiSdkMiddlewareBuilder, config:
case 'openai':
case 'azure-openai': {
if (config.enableReasoning) {
const tagName = config.model?.id.includes('gemini') ? tagNameArray[1] : tagNameArray[0]
const tagName = getReasoningTagName(config.model?.id.toLowerCase())
builder.add({
name: 'thinking-tag-extraction',
middleware: extractReasoningMiddleware({ tagName })
@ -163,6 +179,9 @@ function addProviderSpecificMiddlewares(builder: AiSdkMiddlewareBuilder, config:
case 'gemini':
// Gemini特定中间件
break
case 'aws-bedrock': {
break
}
default:
// 其他provider的通用处理
break

View File

@ -5,7 +5,6 @@ import { getEnableDeveloperMode } from '@renderer/hooks/useSettings'
import { Assistant } from '@renderer/types'
import { AiSdkMiddlewareConfig } from '../middleware/AiSdkMiddlewareBuilder'
import reasoningTimePlugin from './reasoningTimePlugin'
import { searchOrchestrationPlugin } from './searchOrchestrationPlugin'
import { createTelemetryPlugin } from './telemetryPlugin'
@ -30,9 +29,8 @@ export function buildPlugins(
}
// 1. 模型内置搜索
if (middlewareConfig.enableWebSearch) {
// 内置了默认搜索参数如果改的话可以传config进去
plugins.push(webSearchPlugin())
if (middlewareConfig.enableWebSearch && middlewareConfig.webSearchPluginConfig) {
plugins.push(webSearchPlugin(middlewareConfig.webSearchPluginConfig))
}
// 2. 支持工具调用时添加搜索插件
if (middlewareConfig.isSupportedToolUse || middlewareConfig.isPromptToolUse) {
@ -40,9 +38,9 @@ export function buildPlugins(
}
// 3. 推理模型时添加推理插件
if (middlewareConfig.enableReasoning) {
plugins.push(reasoningTimePlugin)
}
// if (middlewareConfig.enableReasoning) {
// plugins.push(reasoningTimePlugin)
// }
// 4. 启用Prompt工具调用时添加工具插件
if (middlewareConfig.isPromptToolUse) {

View File

@ -7,18 +7,14 @@ export default definePlugin({
transformStream: () => () => {
// === 时间跟踪状态 ===
let thinkingStartTime = 0
let hasStartedThinking = false
let accumulatedThinkingContent = ''
let reasoningBlockId = ''
return new TransformStream<TextStreamPart<ToolSet>, TextStreamPart<ToolSet>>({
transform(chunk: TextStreamPart<ToolSet>, controller: TransformStreamDefaultController<TextStreamPart<ToolSet>>) {
// === 处理 reasoning 类型 ===
if (chunk.type === 'reasoning-start') {
controller.enqueue(chunk)
hasStartedThinking = true
thinkingStartTime = performance.now()
reasoningBlockId = chunk.id
} else if (chunk.type === 'reasoning-delta') {
accumulatedThinkingContent += chunk.text
controller.enqueue({
@ -32,21 +28,6 @@ export default definePlugin({
}
}
})
} else if (chunk.type === 'reasoning-end' && hasStartedThinking) {
controller.enqueue({
type: 'reasoning-end',
id: reasoningBlockId,
providerMetadata: {
metadata: {
thinking_millsec: performance.now() - thinkingStartTime,
thinking_content: accumulatedThinkingContent
}
}
})
accumulatedThinkingContent = ''
hasStartedThinking = false
thinkingStartTime = 0
reasoningBlockId = ''
} else {
controller.enqueue(chunk)
}

View File

@ -66,6 +66,7 @@ class AdapterTracer {
spanName: name,
topicId: this.topicId,
modelName: this.modelName,
// oxlint-disable-next-line no-undef False alarm. see https://github.com/oxc-project/oxc/issues/4232
argCount: arguments.length
})

View File

@ -5,6 +5,8 @@
import { vertexAnthropic } from '@ai-sdk/google-vertex/anthropic/edge'
import { vertex } from '@ai-sdk/google-vertex/edge'
import { WebSearchPluginConfig } from '@cherrystudio/ai-core/built-in/plugins'
import { isBaseProvider } from '@cherrystudio/ai-core/core/providers/schemas'
import { loggerService } from '@logger'
import {
isGenerateImageModel,
@ -16,19 +18,25 @@ import {
isWebSearchModel
} from '@renderer/config/models'
import { getAssistantSettings, getDefaultModel } from '@renderer/services/AssistantService'
import store from '@renderer/store'
import { CherryWebSearchConfig } from '@renderer/store/websearch'
import { type Assistant, type MCPTool, type Provider } from '@renderer/types'
import type { StreamTextParams } from '@renderer/types/aiCoreTypes'
import type { ModelMessage } from 'ai'
import { mapRegexToPatterns } from '@renderer/utils/blacklistMatchPattern'
import type { ModelMessage, Tool } from 'ai'
import { stepCountIs } from 'ai'
import { getAiSdkProviderId } from '../provider/factory'
import { setupToolsConfig } from '../utils/mcp'
import { buildProviderOptions } from '../utils/options'
import { getAnthropicThinkingBudget } from '../utils/reasoning'
import { buildProviderBuiltinWebSearchConfig } from '../utils/websearch'
import { getTemperature, getTopP } from './modelParameters'
const logger = loggerService.withContext('parameterBuilder')
type ProviderDefinedTool = Extract<Tool<any, any>, { type: 'provider-defined' }>
/**
* AI SDK
*
@ -40,6 +48,7 @@ export async function buildStreamTextParams(
options: {
mcpTools?: MCPTool[]
webSearchProviderId?: string
webSearchConfig?: CherryWebSearchConfig
requestOptions?: {
signal?: AbortSignal
timeout?: number
@ -55,6 +64,7 @@ export async function buildStreamTextParams(
enableGenerateImage: boolean
enableUrlContext: boolean
}
webSearchPluginConfig?: WebSearchPluginConfig
}> {
const { mcpTools } = options
@ -91,6 +101,12 @@ export async function buildStreamTextParams(
// }
// 构建真正的 providerOptions
const webSearchConfig: CherryWebSearchConfig = {
maxResults: store.getState().websearch.maxResults,
excludeDomains: store.getState().websearch.excludeDomains,
searchWithTime: store.getState().websearch.searchWithTime
}
const providerOptions = buildProviderOptions(assistant, model, provider, {
enableReasoning,
enableWebSearch,
@ -107,15 +123,22 @@ export async function buildStreamTextParams(
maxTokens -= getAnthropicThinkingBudget(assistant, model)
}
// google-vertex | google-vertex-anthropic
let webSearchPluginConfig: WebSearchPluginConfig | undefined = undefined
if (enableWebSearch) {
if (isBaseProvider(aiSdkProviderId)) {
webSearchPluginConfig = buildProviderBuiltinWebSearchConfig(aiSdkProviderId, webSearchConfig)
}
if (!tools) {
tools = {}
}
if (aiSdkProviderId === 'google-vertex') {
tools.google_search = vertex.tools.googleSearch({})
tools.google_search = vertex.tools.googleSearch({}) as ProviderDefinedTool
} else if (aiSdkProviderId === 'google-vertex-anthropic') {
tools.web_search = vertexAnthropic.tools.webSearch_20250305({})
const blockedDomains = mapRegexToPatterns(webSearchConfig.excludeDomains)
tools.web_search = vertexAnthropic.tools.webSearch_20250305({
maxUses: webSearchConfig.maxResults,
blockedDomains: blockedDomains.length > 0 ? blockedDomains : undefined
}) as ProviderDefinedTool
}
}
@ -124,7 +147,7 @@ export async function buildStreamTextParams(
if (!tools) {
tools = {}
}
tools.url_context = vertex.tools.urlContext({})
tools.url_context = vertex.tools.urlContext({}) as ProviderDefinedTool
}
// 构建基础参数
@ -149,7 +172,8 @@ export async function buildStreamTextParams(
return {
params,
modelId: model.id,
capabilities: { enableReasoning, enableWebSearch, enableGenerateImage, enableUrlContext }
capabilities: { enableReasoning, enableWebSearch, enableGenerateImage, enableUrlContext },
webSearchPluginConfig
}
}

View File

@ -69,6 +69,9 @@ export function getAiSdkProviderId(provider: Provider): ProviderId | 'openai-com
return resolvedFromType
}
}
if (provider.apiHost.includes('api.openai.com')) {
return 'openai-chat'
}
// 3. 最后的fallback通常会成为openai-compatible
return provider.id as ProviderId
}

View File

@ -6,6 +6,7 @@ import {
type ProviderSettingsMap
} from '@cherrystudio/ai-core/provider'
import { isOpenAIChatCompletionOnlyModel } from '@renderer/config/models'
import { isNewApiProvider } from '@renderer/config/providers'
import {
getAwsBedrockAccessKeyId,
getAwsBedrockRegion,
@ -15,9 +16,9 @@ import { createVertexProvider, isVertexAIConfigured } from '@renderer/hooks/useV
import { getProviderByModel } from '@renderer/services/AssistantService'
import { loggerService } from '@renderer/services/LoggerService'
import store from '@renderer/store'
import type { Model, Provider } from '@renderer/types'
import { isSystemProvider, type Model, type Provider } from '@renderer/types'
import { formatApiHost } from '@renderer/utils/api'
import { cloneDeep, isEmpty } from 'lodash'
import { cloneDeep, trim } from 'lodash'
import { aihubmixProviderCreator, newApiResolverCreator, vertexAnthropicProviderCreator } from './config'
import { getAiSdkProviderId } from './factory'
@ -61,14 +62,16 @@ function handleSpecialProviders(model: Model, provider: Provider): Provider {
// return createVertexProvider(provider)
// }
if (provider.id === 'aihubmix') {
return aihubmixProviderCreator(model, provider)
}
if (provider.id === 'newapi') {
return newApiResolverCreator(model, provider)
}
if (provider.id === 'vertexai') {
return vertexAnthropicProviderCreator(model, provider)
if (isSystemProvider(provider)) {
if (provider.id === 'aihubmix') {
return aihubmixProviderCreator(model, provider)
}
if (isNewApiProvider(provider)) {
return newApiResolverCreator(model, provider)
}
if (provider.id === 'vertexai') {
return vertexAnthropicProviderCreator(model, provider)
}
}
return provider
}
@ -117,7 +120,7 @@ export function providerToAiSdkConfig(
// 构建基础配置
const baseConfig = {
baseURL: actualProvider.apiHost,
baseURL: trim(actualProvider.apiHost),
apiKey: getRotatedApiKey(actualProvider)
}
// 处理OpenAI模式
@ -192,7 +195,10 @@ export function providerToAiSdkConfig(
} else if (baseConfig.baseURL.endsWith('/v1')) {
baseConfig.baseURL = baseConfig.baseURL.slice(0, -3)
}
baseConfig.baseURL = isEmpty(baseConfig.baseURL) ? '' : baseConfig.baseURL
if (baseConfig.baseURL && !baseConfig.baseURL.includes('publishers/google')) {
baseConfig.baseURL = `${baseConfig.baseURL}/v1/projects/${project}/locations/${location}/publishers/google`
}
}
// 如果AI SDK支持该provider使用原生配置
@ -211,7 +217,8 @@ export function providerToAiSdkConfig(
options: {
...options,
name: actualProvider.id,
...extraOptions
...extraOptions,
includeUsage: true
}
}
}
@ -247,10 +254,10 @@ export async function prepareSpecialProviderConfig(
config.options.apiKey = token
break
}
case 'cherryin': {
case 'cherryai': {
config.options.fetch = async (url, options) => {
// 在这里对最终参数进行签名
const signature = await window.api.cherryin.generateSignature({
const signature = await window.api.cherryai.generateSignature({
method: 'POST',
path: '/chat/completions',
query: '',

Some files were not shown because too many files have changed in this diff Show More