mirror of
https://github.com/CherryHQ/cherry-studio.git
synced 2026-01-07 05:39:05 +08:00
Compare commits
101 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6b0bb64795 | ||
|
|
116ee6f94b | ||
|
|
af7896b900 | ||
|
|
bb9b73557b | ||
|
|
a5038ac844 | ||
|
|
9e45f801a8 | ||
|
|
313dac0f64 | ||
|
|
76ee67d4d7 | ||
|
|
2a31fa2ad5 | ||
|
|
c4f372feba | ||
|
|
ad164f2c1b | ||
|
|
ca3ddff00e | ||
|
|
b4aeced1f9 | ||
|
|
d27d750bc5 | ||
|
|
a2639053ef | ||
|
|
68a75dc4e3 | ||
|
|
4c67e5b43a | ||
|
|
2383fd06db | ||
|
|
f8519f0bf0 | ||
|
|
2012378341 | ||
|
|
86adb2e11c | ||
|
|
680bda3993 | ||
|
|
acd1ecc09c | ||
|
|
e3d1996254 | ||
|
|
56cf347909 | ||
|
|
2a3955919e | ||
|
|
ca2b0ac28d | ||
|
|
078cf39313 | ||
|
|
48a582820f | ||
|
|
77e024027c | ||
|
|
d391e55a8a | ||
|
|
f878c8ab3b | ||
|
|
33cdcaa558 | ||
|
|
bc9eeb9f30 | ||
|
|
068cf1083c | ||
|
|
ed4353b054 | ||
|
|
528d6d37f2 | ||
|
|
efbe64e5da | ||
|
|
cccf9bb7be | ||
|
|
c242860abc | ||
|
|
cb93eee29d | ||
|
|
5ff173fcc7 | ||
|
|
b78df05f28 | ||
|
|
c13dc6eab5 | ||
|
|
2008d70707 | ||
|
|
723fa11647 | ||
|
|
9586f38157 | ||
|
|
401d66f3dd | ||
|
|
99b431ec92 | ||
|
|
ab3bce33b8 | ||
|
|
0f0e18231d | ||
|
|
4ae9bf8ff4 | ||
|
|
05dfb459a6 | ||
|
|
0669253abb | ||
|
|
4ba0f2d25c | ||
|
|
f7312697e7 | ||
|
|
d9171e0596 | ||
|
|
89a6d817f1 | ||
|
|
09e58d3756 | ||
|
|
e093a18deb | ||
|
|
265934be5a | ||
|
|
5f0006dced | ||
|
|
6815ab65d1 | ||
|
|
6bdaba8a15 | ||
|
|
d1c93e4eae | ||
|
|
7a862974c2 | ||
|
|
26a3bd0259 | ||
|
|
e16413de76 | ||
|
|
fc3e92e2f7 | ||
|
|
9a435b8abb | ||
|
|
c4f94848e8 | ||
|
|
c747b8e2a4 | ||
|
|
a35bf4afa1 | ||
|
|
9f948e1ce7 | ||
|
|
4508fe2877 | ||
|
|
3045f924ce | ||
|
|
a6ba5d34e0 | ||
|
|
8ab375161d | ||
|
|
42260710d8 | ||
|
|
5e8646c6a5 | ||
|
|
7e93e8b9b2 | ||
|
|
eb7a2cc85a | ||
|
|
fd6986076a | ||
|
|
6309cc179d | ||
|
|
c04529a23c | ||
|
|
0f1b3afa72 | ||
|
|
0cf0072b51 | ||
|
|
150bb3e3a0 | ||
|
|
739096deca | ||
|
|
1d5dafa325 | ||
|
|
bdfda7afb1 | ||
|
|
ef25eef0eb | ||
|
|
c676a93595 | ||
|
|
e85009fcd6 | ||
|
|
99d7223a0a | ||
|
|
bdd272b7cd | ||
|
|
782f8496e0 | ||
|
|
bfeef7ef91 | ||
|
|
784fdd4fed | ||
|
|
432b31c7b1 | ||
|
|
f2b4a2382b |
31
.github/workflows/auto-i18n.yml
vendored
31
.github/workflows/auto-i18n.yml
vendored
@ -32,38 +32,37 @@ jobs:
|
||||
with:
|
||||
node-version: 22
|
||||
|
||||
- name: 📦 Install corepack
|
||||
run: corepack enable && corepack prepare yarn@4.9.1 --activate
|
||||
- name: 📦 Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
|
||||
- name: 📂 Get yarn cache directory path
|
||||
id: yarn-cache-dir-path
|
||||
run: echo "dir=$(yarn config get cacheFolder)" >> $GITHUB_OUTPUT
|
||||
- name: 📂 Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: 💾 Cache yarn dependencies
|
||||
- name: 💾 Cache pnpm dependencies
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
${{ steps.yarn-cache-dir-path.outputs.dir }}
|
||||
node_modules
|
||||
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-yarn-
|
||||
${{ runner.os }}-pnpm-
|
||||
|
||||
- name: 📦 Install dependencies
|
||||
run: |
|
||||
yarn install
|
||||
pnpm install
|
||||
|
||||
- name: 🏃♀️ Translate
|
||||
run: yarn sync:i18n && yarn auto:i18n
|
||||
run: pnpm i18n:sync && pnpm i18n:translate
|
||||
|
||||
- name: 🔍 Format
|
||||
run: yarn format
|
||||
run: pnpm format
|
||||
|
||||
- name: 🔍 Check for changes
|
||||
id: git_status
|
||||
run: |
|
||||
# Check if there are any uncommitted changes
|
||||
git reset -- package.json yarn.lock # 不提交 package.json 和 yarn.lock 的更改
|
||||
git reset -- package.json pnpm-lock.yaml # 不提交 package.json 和 pnpm-lock.yaml 的更改
|
||||
git diff --exit-code --quiet || echo "::set-output name=has_changes::true"
|
||||
git status --porcelain
|
||||
|
||||
@ -73,7 +72,7 @@ jobs:
|
||||
|
||||
- name: 🚀 Create Pull Request if changes exist
|
||||
if: steps.git_status.outputs.has_changes == 'true'
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
uses: peter-evans/create-pull-request@v8
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }} # Use the built-in GITHUB_TOKEN for bot actions
|
||||
commit-message: "feat(bot): Weekly automated script run"
|
||||
|
||||
29
.github/workflows/nightly-build.yml
vendored
29
.github/workflows/nightly-build.yml
vendored
@ -65,25 +65,24 @@ jobs:
|
||||
run: |
|
||||
brew install python-setuptools
|
||||
|
||||
- name: Install corepack
|
||||
run: corepack enable && corepack prepare yarn@4.9.1 --activate
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
|
||||
- name: Get yarn cache directory path
|
||||
id: yarn-cache-dir-path
|
||||
run: echo "dir=$(yarn config get cacheFolder)" >> $GITHUB_OUTPUT
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Cache yarn dependencies
|
||||
- name: Cache pnpm dependencies
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
${{ steps.yarn-cache-dir-path.outputs.dir }}
|
||||
node_modules
|
||||
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-yarn-
|
||||
${{ runner.os }}-pnpm-
|
||||
|
||||
- name: Install Dependencies
|
||||
run: yarn install
|
||||
run: pnpm install
|
||||
|
||||
- name: Generate date tag
|
||||
id: date
|
||||
@ -94,7 +93,7 @@ jobs:
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
run: |
|
||||
sudo apt-get install -y rpm
|
||||
yarn build:linux
|
||||
pnpm build:linux
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
NODE_OPTIONS: --max-old-space-size=8192
|
||||
@ -106,7 +105,7 @@ jobs:
|
||||
- name: Build Mac
|
||||
if: matrix.os == 'macos-latest'
|
||||
run: |
|
||||
yarn build:mac
|
||||
pnpm build:mac
|
||||
env:
|
||||
CSC_LINK: ${{ secrets.CSC_LINK }}
|
||||
CSC_KEY_PASSWORD: ${{ secrets.CSC_KEY_PASSWORD }}
|
||||
@ -123,7 +122,7 @@ jobs:
|
||||
- name: Build Windows
|
||||
if: matrix.os == 'windows-latest'
|
||||
run: |
|
||||
yarn build:win
|
||||
pnpm build:win
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
NODE_OPTIONS: --max-old-space-size=8192
|
||||
|
||||
33
.github/workflows/pr-ci.yml
vendored
33
.github/workflows/pr-ci.yml
vendored
@ -28,37 +28,36 @@ jobs:
|
||||
with:
|
||||
node-version: 22
|
||||
|
||||
- name: Install corepack
|
||||
run: corepack enable && corepack prepare yarn@4.9.1 --activate
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
|
||||
- name: Get yarn cache directory path
|
||||
id: yarn-cache-dir-path
|
||||
run: echo "dir=$(yarn config get cacheFolder)" >> $GITHUB_OUTPUT
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Cache yarn dependencies
|
||||
- name: Cache pnpm dependencies
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
${{ steps.yarn-cache-dir-path.outputs.dir }}
|
||||
node_modules
|
||||
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-yarn-
|
||||
${{ runner.os }}-pnpm-
|
||||
|
||||
- name: Install Dependencies
|
||||
run: yarn install
|
||||
run: pnpm install
|
||||
|
||||
- name: Lint Check
|
||||
run: yarn test:lint
|
||||
run: pnpm test:lint
|
||||
|
||||
- name: Format Check
|
||||
run: yarn format:check
|
||||
run: pnpm format:check
|
||||
|
||||
- name: Type Check
|
||||
run: yarn typecheck
|
||||
run: pnpm typecheck
|
||||
|
||||
- name: i18n Check
|
||||
run: yarn check:i18n
|
||||
run: pnpm i18n:check
|
||||
|
||||
- name: Test
|
||||
run: yarn test
|
||||
run: pnpm test
|
||||
|
||||
29
.github/workflows/release.yml
vendored
29
.github/workflows/release.yml
vendored
@ -56,31 +56,30 @@ jobs:
|
||||
run: |
|
||||
brew install python-setuptools
|
||||
|
||||
- name: Install corepack
|
||||
run: corepack enable && corepack prepare yarn@4.9.1 --activate
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
|
||||
- name: Get yarn cache directory path
|
||||
id: yarn-cache-dir-path
|
||||
run: echo "dir=$(yarn config get cacheFolder)" >> $GITHUB_OUTPUT
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Cache yarn dependencies
|
||||
- name: Cache pnpm dependencies
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
${{ steps.yarn-cache-dir-path.outputs.dir }}
|
||||
node_modules
|
||||
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-yarn-
|
||||
${{ runner.os }}-pnpm-
|
||||
|
||||
- name: Install Dependencies
|
||||
run: yarn install
|
||||
run: pnpm install
|
||||
|
||||
- name: Build Linux
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
run: |
|
||||
sudo apt-get install -y rpm
|
||||
yarn build:linux
|
||||
pnpm build:linux
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
@ -94,7 +93,7 @@ jobs:
|
||||
if: matrix.os == 'macos-latest'
|
||||
run: |
|
||||
sudo -H pip install setuptools
|
||||
yarn build:mac
|
||||
pnpm build:mac
|
||||
env:
|
||||
CSC_LINK: ${{ secrets.CSC_LINK }}
|
||||
CSC_KEY_PASSWORD: ${{ secrets.CSC_KEY_PASSWORD }}
|
||||
@ -111,7 +110,7 @@ jobs:
|
||||
- name: Build Windows
|
||||
if: matrix.os == 'windows-latest'
|
||||
run: |
|
||||
yarn build:win
|
||||
pnpm build:win
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
NODE_OPTIONS: --max-old-space-size=8192
|
||||
|
||||
9
.github/workflows/sync-to-gitcode.yml
vendored
9
.github/workflows/sync-to-gitcode.yml
vendored
@ -48,9 +48,8 @@ jobs:
|
||||
with:
|
||||
node-version: 22
|
||||
|
||||
- name: Install corepack
|
||||
shell: bash
|
||||
run: corepack enable && corepack prepare yarn@4.9.1 --activate
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
|
||||
- name: Clean node_modules
|
||||
if: ${{ github.event.inputs.clean == 'true' }}
|
||||
@ -59,11 +58,11 @@ jobs:
|
||||
|
||||
- name: Install Dependencies
|
||||
shell: bash
|
||||
run: yarn install
|
||||
run: pnpm install
|
||||
|
||||
- name: Build Windows with code signing
|
||||
shell: bash
|
||||
run: yarn build:win
|
||||
run: pnpm build:win
|
||||
env:
|
||||
WIN_SIGN: true
|
||||
CHERRY_CERT_PATH: ${{ secrets.CHERRY_CERT_PATH }}
|
||||
|
||||
@ -154,14 +154,14 @@ jobs:
|
||||
with:
|
||||
node-version: 22
|
||||
|
||||
- name: Enable Corepack
|
||||
- name: Install pnpm
|
||||
if: steps.check.outputs.should_run == 'true'
|
||||
run: corepack enable && corepack prepare yarn@4.9.1 --activate
|
||||
uses: pnpm/action-setup@v4
|
||||
|
||||
- name: Install dependencies
|
||||
if: steps.check.outputs.should_run == 'true'
|
||||
working-directory: main
|
||||
run: yarn install --immutable
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Update upgrade config
|
||||
if: steps.check.outputs.should_run == 'true'
|
||||
@ -170,7 +170,7 @@ jobs:
|
||||
RELEASE_TAG: ${{ steps.meta.outputs.tag }}
|
||||
IS_PRERELEASE: ${{ steps.check.outputs.is_prerelease }}
|
||||
run: |
|
||||
yarn tsx scripts/update-app-upgrade-config.ts \
|
||||
pnpm tsx scripts/update-app-upgrade-config.ts \
|
||||
--tag "$RELEASE_TAG" \
|
||||
--config ../cs/app-upgrade-config.json \
|
||||
--is-prerelease "$IS_PRERELEASE"
|
||||
|
||||
@ -1 +1 @@
|
||||
yarn lint-staged
|
||||
pnpm lint-staged
|
||||
|
||||
@ -1,140 +0,0 @@
|
||||
diff --git a/dist/index.js b/dist/index.js
|
||||
index 73045a7d38faafdc7f7d2cd79d7ff0e2b031056b..8d948c9ac4ea4b474db9ef3c5491961e7fcf9a07 100644
|
||||
--- a/dist/index.js
|
||||
+++ b/dist/index.js
|
||||
@@ -421,6 +421,17 @@ var OpenAICompatibleChatLanguageModel = class {
|
||||
text: reasoning
|
||||
});
|
||||
}
|
||||
+ if (choice.message.images) {
|
||||
+ for (const image of choice.message.images) {
|
||||
+ const match1 = image.image_url.url.match(/^data:([^;]+)/)
|
||||
+ const match2 = image.image_url.url.match(/^data:[^;]*;base64,(.+)$/);
|
||||
+ content.push({
|
||||
+ type: 'file',
|
||||
+ mediaType: match1 ? (match1[1] ?? 'image/jpeg') : 'image/jpeg',
|
||||
+ data: match2 ? match2[1] : image.image_url.url,
|
||||
+ });
|
||||
+ }
|
||||
+ }
|
||||
if (choice.message.tool_calls != null) {
|
||||
for (const toolCall of choice.message.tool_calls) {
|
||||
content.push({
|
||||
@@ -598,6 +609,17 @@ var OpenAICompatibleChatLanguageModel = class {
|
||||
delta: delta.content
|
||||
});
|
||||
}
|
||||
+ if (delta.images) {
|
||||
+ for (const image of delta.images) {
|
||||
+ const match1 = image.image_url.url.match(/^data:([^;]+)/)
|
||||
+ const match2 = image.image_url.url.match(/^data:[^;]*;base64,(.+)$/);
|
||||
+ controller.enqueue({
|
||||
+ type: 'file',
|
||||
+ mediaType: match1 ? (match1[1] ?? 'image/jpeg') : 'image/jpeg',
|
||||
+ data: match2 ? match2[1] : image.image_url.url,
|
||||
+ });
|
||||
+ }
|
||||
+ }
|
||||
if (delta.tool_calls != null) {
|
||||
for (const toolCallDelta of delta.tool_calls) {
|
||||
const index = toolCallDelta.index;
|
||||
@@ -765,6 +787,14 @@ var OpenAICompatibleChatResponseSchema = import_v43.z.object({
|
||||
arguments: import_v43.z.string()
|
||||
})
|
||||
})
|
||||
+ ).nullish(),
|
||||
+ images: import_v43.z.array(
|
||||
+ import_v43.z.object({
|
||||
+ type: import_v43.z.literal('image_url'),
|
||||
+ image_url: import_v43.z.object({
|
||||
+ url: import_v43.z.string(),
|
||||
+ })
|
||||
+ })
|
||||
).nullish()
|
||||
}),
|
||||
finish_reason: import_v43.z.string().nullish()
|
||||
@@ -795,6 +825,14 @@ var createOpenAICompatibleChatChunkSchema = (errorSchema) => import_v43.z.union(
|
||||
arguments: import_v43.z.string().nullish()
|
||||
})
|
||||
})
|
||||
+ ).nullish(),
|
||||
+ images: import_v43.z.array(
|
||||
+ import_v43.z.object({
|
||||
+ type: import_v43.z.literal('image_url'),
|
||||
+ image_url: import_v43.z.object({
|
||||
+ url: import_v43.z.string(),
|
||||
+ })
|
||||
+ })
|
||||
).nullish()
|
||||
}).nullish(),
|
||||
finish_reason: import_v43.z.string().nullish()
|
||||
diff --git a/dist/index.mjs b/dist/index.mjs
|
||||
index 1c2b9560bbfbfe10cb01af080aeeed4ff59db29c..2c8ddc4fc9bfc5e7e06cfca105d197a08864c427 100644
|
||||
--- a/dist/index.mjs
|
||||
+++ b/dist/index.mjs
|
||||
@@ -405,6 +405,17 @@ var OpenAICompatibleChatLanguageModel = class {
|
||||
text: reasoning
|
||||
});
|
||||
}
|
||||
+ if (choice.message.images) {
|
||||
+ for (const image of choice.message.images) {
|
||||
+ const match1 = image.image_url.url.match(/^data:([^;]+)/)
|
||||
+ const match2 = image.image_url.url.match(/^data:[^;]*;base64,(.+)$/);
|
||||
+ content.push({
|
||||
+ type: 'file',
|
||||
+ mediaType: match1 ? (match1[1] ?? 'image/jpeg') : 'image/jpeg',
|
||||
+ data: match2 ? match2[1] : image.image_url.url,
|
||||
+ });
|
||||
+ }
|
||||
+ }
|
||||
if (choice.message.tool_calls != null) {
|
||||
for (const toolCall of choice.message.tool_calls) {
|
||||
content.push({
|
||||
@@ -582,6 +593,17 @@ var OpenAICompatibleChatLanguageModel = class {
|
||||
delta: delta.content
|
||||
});
|
||||
}
|
||||
+ if (delta.images) {
|
||||
+ for (const image of delta.images) {
|
||||
+ const match1 = image.image_url.url.match(/^data:([^;]+)/)
|
||||
+ const match2 = image.image_url.url.match(/^data:[^;]*;base64,(.+)$/);
|
||||
+ controller.enqueue({
|
||||
+ type: 'file',
|
||||
+ mediaType: match1 ? (match1[1] ?? 'image/jpeg') : 'image/jpeg',
|
||||
+ data: match2 ? match2[1] : image.image_url.url,
|
||||
+ });
|
||||
+ }
|
||||
+ }
|
||||
if (delta.tool_calls != null) {
|
||||
for (const toolCallDelta of delta.tool_calls) {
|
||||
const index = toolCallDelta.index;
|
||||
@@ -749,6 +771,14 @@ var OpenAICompatibleChatResponseSchema = z3.object({
|
||||
arguments: z3.string()
|
||||
})
|
||||
})
|
||||
+ ).nullish(),
|
||||
+ images: z3.array(
|
||||
+ z3.object({
|
||||
+ type: z3.literal('image_url'),
|
||||
+ image_url: z3.object({
|
||||
+ url: z3.string(),
|
||||
+ })
|
||||
+ })
|
||||
).nullish()
|
||||
}),
|
||||
finish_reason: z3.string().nullish()
|
||||
@@ -779,6 +809,14 @@ var createOpenAICompatibleChatChunkSchema = (errorSchema) => z3.union([
|
||||
arguments: z3.string().nullish()
|
||||
})
|
||||
})
|
||||
+ ).nullish(),
|
||||
+ images: z3.array(
|
||||
+ z3.object({
|
||||
+ type: z3.literal('image_url'),
|
||||
+ image_url: z3.object({
|
||||
+ url: z3.string(),
|
||||
+ })
|
||||
+ })
|
||||
).nullish()
|
||||
}).nullish(),
|
||||
finish_reason: z3.string().nullish()
|
||||
BIN
.yarn/releases/yarn-4.9.1.cjs
vendored
BIN
.yarn/releases/yarn-4.9.1.cjs
vendored
Binary file not shown.
@ -1,9 +0,0 @@
|
||||
enableImmutableInstalls: false
|
||||
|
||||
httpTimeout: 300000
|
||||
|
||||
nodeLinker: node-modules
|
||||
|
||||
yarnPath: .yarn/releases/yarn-4.9.1.cjs
|
||||
npmRegistryServer: https://registry.npmjs.org
|
||||
npmPublishRegistry: https://registry.npmjs.org
|
||||
35
CLAUDE.md
35
CLAUDE.md
@ -10,7 +10,7 @@ This file provides guidance to AI coding assistants when working with code in th
|
||||
- **Log centrally**: Route all logging through `loggerService` with the right context—no `console.log`.
|
||||
- **Research via subagent**: Lean on `subagent` for external docs, APIs, news, and references.
|
||||
- **Always propose before executing**: Before making any changes, clearly explain your planned approach and wait for explicit user approval to ensure alignment and prevent unwanted modifications.
|
||||
- **Lint, test, and format before completion**: Coding tasks are only complete after running `yarn lint`, `yarn test`, and `yarn format` successfully.
|
||||
- **Lint, test, and format before completion**: Coding tasks are only complete after running `pnpm lint`, `pnpm test`, and `pnpm format` successfully.
|
||||
- **Write conventional commits**: Commit small, focused changes using Conventional Commit messages (e.g., `feat:`, `fix:`, `refactor:`, `docs:`).
|
||||
|
||||
## Pull Request Workflow (CRITICAL)
|
||||
@ -24,36 +24,39 @@ When creating a Pull Request, you MUST:
|
||||
|
||||
## Development Commands
|
||||
|
||||
- **Install**: `yarn install` - Install all project dependencies
|
||||
- **Development**: `yarn dev` - Runs Electron app in development mode with hot reload
|
||||
- **Debug**: `yarn debug` - Starts with debugging enabled, use `chrome://inspect` to attach debugger
|
||||
- **Build Check**: `yarn build:check` - **REQUIRED** before commits (lint + test + typecheck)
|
||||
- If having i18n sort issues, run `yarn sync:i18n` first to sync template
|
||||
- If having formatting issues, run `yarn format` first
|
||||
- **Test**: `yarn test` - Run all tests (Vitest) across main and renderer processes
|
||||
- **Install**: `pnpm install` - Install all project dependencies
|
||||
- **Development**: `pnpm dev` - Runs Electron app in development mode with hot reload
|
||||
- **Debug**: `pnpm debug` - Starts with debugging enabled, use `chrome://inspect` to attach debugger
|
||||
- **Build Check**: `pnpm build:check` - **REQUIRED** before commits (lint + test + typecheck)
|
||||
- If having i18n sort issues, run `pnpm i18n:sync` first to sync template
|
||||
- If having formatting issues, run `pnpm format` first
|
||||
- **Test**: `pnpm test` - Run all tests (Vitest) across main and renderer processes
|
||||
- **Single Test**:
|
||||
- `yarn test:main` - Run tests for main process only
|
||||
- `yarn test:renderer` - Run tests for renderer process only
|
||||
- **Lint**: `yarn lint` - Fix linting issues and run TypeScript type checking
|
||||
- **Format**: `yarn format` - Auto-format code using Biome
|
||||
- `pnpm test:main` - Run tests for main process only
|
||||
- `pnpm test:renderer` - Run tests for renderer process only
|
||||
- **Lint**: `pnpm lint` - Fix linting issues and run TypeScript type checking
|
||||
- **Format**: `pnpm format` - Auto-format code using Biome
|
||||
|
||||
## Project Architecture
|
||||
|
||||
### Electron Structure
|
||||
|
||||
- **Main Process** (`src/main/`): Node.js backend with services (MCP, Knowledge, Storage, etc.)
|
||||
- **Renderer Process** (`src/renderer/`): React UI with Redux state management
|
||||
- **Preload Scripts** (`src/preload/`): Secure IPC bridge
|
||||
|
||||
### Key Components
|
||||
|
||||
- **AI Core** (`src/renderer/src/aiCore/`): Middleware pipeline for multiple AI providers.
|
||||
- **Services** (`src/main/services/`): MCPService, KnowledgeService, WindowService, etc.
|
||||
- **Build System**: Electron-Vite with experimental rolldown-vite, yarn workspaces.
|
||||
- **Build System**: Electron-Vite with experimental rolldown-vite, pnpm workspaces.
|
||||
- **State Management**: Redux Toolkit (`src/renderer/src/store/`) for predictable state.
|
||||
|
||||
### Logging
|
||||
|
||||
```typescript
|
||||
import { loggerService } from '@logger'
|
||||
const logger = loggerService.withContext('moduleName')
|
||||
import { loggerService } from "@logger";
|
||||
const logger = loggerService.withContext("moduleName");
|
||||
// Renderer: loggerService.initWindowSource('windowName') first
|
||||
logger.info('message', CONTEXT)
|
||||
logger.info("message", CONTEXT);
|
||||
```
|
||||
|
||||
14
README.md
14
README.md
@ -34,7 +34,7 @@
|
||||
</a>
|
||||
</h1>
|
||||
|
||||
<p align="center">English | <a href="./docs/zh/README.md">中文</a> | <a href="https://cherry-ai.com">Official Site</a> | <a href="https://docs.cherry-ai.com/cherry-studio-wen-dang/en-us">Documents</a> | <a href="./docs/en/guides/development.md">Development</a> | <a href="https://github.com/CherryHQ/cherry-studio/issues">Feedback</a><br></p>
|
||||
<p align="center">English | <a href="./docs/zh/README.md">中文</a> | <a href="https://cherry-ai.com">Official Site</a> | <a href="https://docs.cherry-ai.com/docs/en-us">Documents</a> | <a href="./docs/en/guides/development.md">Development</a> | <a href="https://github.com/CherryHQ/cherry-studio/issues">Feedback</a><br></p>
|
||||
|
||||
<div align="center">
|
||||
|
||||
@ -242,12 +242,12 @@ The Enterprise Edition addresses core challenges in team collaboration by centra
|
||||
|
||||
## Version Comparison
|
||||
|
||||
| Feature | Community Edition | Enterprise Edition |
|
||||
| :---------------- | :----------------------------------------- | :-------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **Open Source** | ✅ Yes | ⭕️ Partially released to customers |
|
||||
| Feature | Community Edition | Enterprise Edition |
|
||||
| :---------------- | :----------------------------------------------------------------------------------- | :-------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **Open Source** | ✅ Yes | ⭕️ Partially released to customers |
|
||||
| **Cost** | [AGPL-3.0 License](https://github.com/CherryHQ/cherry-studio?tab=AGPL-3.0-1-ov-file) | Buyout / Subscription Fee |
|
||||
| **Admin Backend** | — | ● Centralized **Model** Access<br>● **Employee** Management<br>● Shared **Knowledge Base**<br>● **Access** Control<br>● **Data** Backup |
|
||||
| **Server** | — | ✅ Dedicated Private Deployment |
|
||||
| **Admin Backend** | — | ● Centralized **Model** Access<br>● **Employee** Management<br>● Shared **Knowledge Base**<br>● **Access** Control<br>● **Data** Backup |
|
||||
| **Server** | — | ✅ Dedicated Private Deployment |
|
||||
|
||||
## Get the Enterprise Edition
|
||||
|
||||
@ -275,7 +275,7 @@ We believe the Enterprise Edition will become your team's AI productivity engine
|
||||
|
||||
# 📊 GitHub Stats
|
||||
|
||||

|
||||

|
||||
|
||||
# ⭐️ Star History
|
||||
|
||||
|
||||
@ -50,7 +50,8 @@
|
||||
"!*.json",
|
||||
"!src/main/integration/**",
|
||||
"!**/tailwind.css",
|
||||
"!**/package.json"
|
||||
"!**/package.json",
|
||||
"!.zed/**"
|
||||
],
|
||||
"indentStyle": "space",
|
||||
"indentWidth": 2,
|
||||
|
||||
@ -12,8 +12,13 @@
|
||||
|
||||
; https://github.com/electron-userland/electron-builder/issues/1122
|
||||
!ifndef BUILD_UNINSTALLER
|
||||
; Check VC++ Redistributable based on architecture stored in $1
|
||||
Function checkVCRedist
|
||||
ReadRegDWORD $0 HKLM "SOFTWARE\Microsoft\VisualStudio\14.0\VC\Runtimes\x64" "Installed"
|
||||
${If} $1 == "arm64"
|
||||
ReadRegDWORD $0 HKLM "SOFTWARE\Microsoft\VisualStudio\14.0\VC\Runtimes\ARM64" "Installed"
|
||||
${Else}
|
||||
ReadRegDWORD $0 HKLM "SOFTWARE\Microsoft\VisualStudio\14.0\VC\Runtimes\x64" "Installed"
|
||||
${EndIf}
|
||||
FunctionEnd
|
||||
|
||||
Function checkArchitectureCompatibility
|
||||
@ -97,29 +102,47 @@
|
||||
|
||||
Call checkVCRedist
|
||||
${If} $0 != "1"
|
||||
MessageBox MB_YESNO "\
|
||||
NOTE: ${PRODUCT_NAME} requires $\r$\n\
|
||||
'Microsoft Visual C++ Redistributable'$\r$\n\
|
||||
to function properly.$\r$\n$\r$\n\
|
||||
Download and install now?" /SD IDYES IDYES InstallVCRedist IDNO DontInstall
|
||||
InstallVCRedist:
|
||||
inetc::get /CAPTION " " /BANNER "Downloading Microsoft Visual C++ Redistributable..." "https://aka.ms/vs/17/release/vc_redist.x64.exe" "$TEMP\vc_redist.x64.exe"
|
||||
ExecWait "$TEMP\vc_redist.x64.exe /install /norestart"
|
||||
;IfErrors InstallError ContinueInstall ; vc_redist exit code is unreliable :(
|
||||
Call checkVCRedist
|
||||
${If} $0 == "1"
|
||||
Goto ContinueInstall
|
||||
${EndIf}
|
||||
; VC++ is required - install automatically since declining would abort anyway
|
||||
; Select download URL based on system architecture (stored in $1)
|
||||
${If} $1 == "arm64"
|
||||
StrCpy $2 "https://aka.ms/vs/17/release/vc_redist.arm64.exe"
|
||||
StrCpy $3 "$TEMP\vc_redist.arm64.exe"
|
||||
${Else}
|
||||
StrCpy $2 "https://aka.ms/vs/17/release/vc_redist.x64.exe"
|
||||
StrCpy $3 "$TEMP\vc_redist.x64.exe"
|
||||
${EndIf}
|
||||
|
||||
;InstallError:
|
||||
MessageBox MB_ICONSTOP "\
|
||||
There was an unexpected error installing$\r$\n\
|
||||
Microsoft Visual C++ Redistributable.$\r$\n\
|
||||
The installation of ${PRODUCT_NAME} cannot continue."
|
||||
DontInstall:
|
||||
inetc::get /CAPTION " " /BANNER "Downloading Microsoft Visual C++ Redistributable..." \
|
||||
$2 $3 /END
|
||||
Pop $0 ; Get download status from inetc::get
|
||||
${If} $0 != "OK"
|
||||
MessageBox MB_ICONSTOP|MB_YESNO "\
|
||||
Failed to download Microsoft Visual C++ Redistributable.$\r$\n$\r$\n\
|
||||
Error: $0$\r$\n$\r$\n\
|
||||
Would you like to open the download page in your browser?$\r$\n\
|
||||
$2" IDYES openDownloadUrl IDNO skipDownloadUrl
|
||||
openDownloadUrl:
|
||||
ExecShell "open" $2
|
||||
skipDownloadUrl:
|
||||
Abort
|
||||
${EndIf}
|
||||
|
||||
ExecWait "$3 /install /quiet /norestart"
|
||||
; Note: vc_redist exit code is unreliable, verify via registry check instead
|
||||
|
||||
Call checkVCRedist
|
||||
${If} $0 != "1"
|
||||
MessageBox MB_ICONSTOP|MB_YESNO "\
|
||||
Microsoft Visual C++ Redistributable installation failed.$\r$\n$\r$\n\
|
||||
Would you like to open the download page in your browser?$\r$\n\
|
||||
$2$\r$\n$\r$\n\
|
||||
The installation of ${PRODUCT_NAME} cannot continue." IDYES openInstallUrl IDNO skipInstallUrl
|
||||
openInstallUrl:
|
||||
ExecShell "open" $2
|
||||
skipInstallUrl:
|
||||
Abort
|
||||
${EndIf}
|
||||
${EndIf}
|
||||
ContinueInstall:
|
||||
Pop $4
|
||||
Pop $3
|
||||
Pop $2
|
||||
|
||||
@ -11,7 +11,7 @@
|
||||
### Install
|
||||
|
||||
```bash
|
||||
yarn
|
||||
pnpm install
|
||||
```
|
||||
|
||||
### Development
|
||||
@ -20,35 +20,35 @@ yarn
|
||||
|
||||
Download and install [Node.js v22.x.x](https://nodejs.org/en/download)
|
||||
|
||||
### Setup Yarn
|
||||
### Setup pnpm
|
||||
|
||||
```bash
|
||||
corepack enable
|
||||
corepack prepare yarn@4.9.1 --activate
|
||||
corepack prepare pnpm@10.27.0 --activate
|
||||
```
|
||||
|
||||
### Install Dependencies
|
||||
|
||||
```bash
|
||||
yarn install
|
||||
pnpm install
|
||||
```
|
||||
|
||||
### ENV
|
||||
|
||||
```bash
|
||||
copy .env.example .env
|
||||
cp .env.example .env
|
||||
```
|
||||
|
||||
### Start
|
||||
|
||||
```bash
|
||||
yarn dev
|
||||
pnpm dev
|
||||
```
|
||||
|
||||
### Debug
|
||||
|
||||
```bash
|
||||
yarn debug
|
||||
pnpm debug
|
||||
```
|
||||
|
||||
Then input chrome://inspect in browser
|
||||
@ -56,18 +56,18 @@ Then input chrome://inspect in browser
|
||||
### Test
|
||||
|
||||
```bash
|
||||
yarn test
|
||||
pnpm test
|
||||
```
|
||||
|
||||
### Build
|
||||
|
||||
```bash
|
||||
# For windows
|
||||
$ yarn build:win
|
||||
$ pnpm build:win
|
||||
|
||||
# For macOS
|
||||
$ yarn build:mac
|
||||
$ pnpm build:mac
|
||||
|
||||
# For Linux
|
||||
$ yarn build:linux
|
||||
$ pnpm build:linux
|
||||
```
|
||||
|
||||
@ -71,7 +71,7 @@ Tools like i18n Ally cannot parse dynamic content within template strings, resul
|
||||
|
||||
```javascript
|
||||
// Not recommended - Plugin cannot resolve
|
||||
const message = t(`fruits.${fruit}`)
|
||||
const message = t(`fruits.${fruit}`);
|
||||
```
|
||||
|
||||
#### 2. **No Real-time Rendering in Editor**
|
||||
@ -91,14 +91,14 @@ For example:
|
||||
```ts
|
||||
// src/renderer/src/i18n/label.ts
|
||||
const themeModeKeyMap = {
|
||||
dark: 'settings.theme.dark',
|
||||
light: 'settings.theme.light',
|
||||
system: 'settings.theme.system'
|
||||
} as const
|
||||
dark: "settings.theme.dark",
|
||||
light: "settings.theme.light",
|
||||
system: "settings.theme.system",
|
||||
} as const;
|
||||
|
||||
export const getThemeModeLabel = (key: string): string => {
|
||||
return themeModeKeyMap[key] ? t(themeModeKeyMap[key]) : key
|
||||
}
|
||||
return themeModeKeyMap[key] ? t(themeModeKeyMap[key]) : key;
|
||||
};
|
||||
```
|
||||
|
||||
By avoiding template strings, you gain better developer experience, more reliable translation checks, and a more maintainable codebase.
|
||||
@ -107,7 +107,7 @@ By avoiding template strings, you gain better developer experience, more reliabl
|
||||
|
||||
The project includes several scripts to automate i18n-related tasks:
|
||||
|
||||
### `check:i18n` - Validate i18n Structure
|
||||
### `i18n:check` - Validate i18n Structure
|
||||
|
||||
This script checks:
|
||||
|
||||
@ -116,10 +116,10 @@ This script checks:
|
||||
- Whether keys are properly sorted
|
||||
|
||||
```bash
|
||||
yarn check:i18n
|
||||
pnpm i18n:check
|
||||
```
|
||||
|
||||
### `sync:i18n` - Synchronize JSON Structure and Sort Order
|
||||
### `i18n:sync` - Synchronize JSON Structure and Sort Order
|
||||
|
||||
This script uses `zh-cn.json` as the source of truth to sync structure across all language files, including:
|
||||
|
||||
@ -128,14 +128,14 @@ This script uses `zh-cn.json` as the source of truth to sync structure across al
|
||||
3. Sorting keys automatically
|
||||
|
||||
```bash
|
||||
yarn sync:i18n
|
||||
pnpm i18n:sync
|
||||
```
|
||||
|
||||
### `auto:i18n` - Automatically Translate Pending Texts
|
||||
### `i18n:translate` - Automatically Translate Pending Texts
|
||||
|
||||
This script fills in texts marked as `[to be translated]` using machine translation.
|
||||
|
||||
Typically, after adding new texts in `zh-cn.json`, run `sync:i18n`, then `auto:i18n` to complete translations.
|
||||
Typically, after adding new texts in `zh-cn.json`, run `i18n:sync`, then `i18n:translate` to complete translations.
|
||||
|
||||
Before using this script, set the required environment variables:
|
||||
|
||||
@ -148,30 +148,20 @@ MODEL="qwen-plus-latest"
|
||||
Alternatively, add these variables directly to your `.env` file.
|
||||
|
||||
```bash
|
||||
yarn auto:i18n
|
||||
```
|
||||
|
||||
### `update:i18n` - Object-level Translation Update
|
||||
|
||||
Updates translations in language files under `src/renderer/src/i18n/translate` at the object level, preserving existing translations and only updating new content.
|
||||
|
||||
**Not recommended** — prefer `auto:i18n` for translation tasks.
|
||||
|
||||
```bash
|
||||
yarn update:i18n
|
||||
pnpm i18n:translate
|
||||
```
|
||||
|
||||
### Workflow
|
||||
|
||||
1. During development, first add the required text in `zh-cn.json`
|
||||
2. Confirm it displays correctly in the Chinese environment
|
||||
3. Run `yarn sync:i18n` to propagate the keys to other language files
|
||||
4. Run `yarn auto:i18n` to perform machine translation
|
||||
3. Run `pnpm i18n:sync` to propagate the keys to other language files
|
||||
4. Run `pnpm i18n:translate` to perform machine translation
|
||||
5. Grab a coffee and let the magic happen!
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Use Chinese as Source Language**: All development starts in Chinese, then translates to other languages.
|
||||
2. **Run Check Script Before Commit**: Use `yarn check:i18n` to catch i18n issues early.
|
||||
2. **Run Check Script Before Commit**: Use `pnpm i18n:check` to catch i18n issues early.
|
||||
3. **Translate in Small Increments**: Avoid accumulating a large backlog of untranslated content.
|
||||
4. **Keep Keys Semantically Clear**: Keys should clearly express their purpose, e.g., `user.profile.avatar.upload.error`
|
||||
|
||||
@ -37,8 +37,8 @@ The `x-files/app-upgrade-config/app-upgrade-config.json` file is synchronized by
|
||||
|
||||
1. **Guard + metadata preparation** – the `Check if should proceed` and `Prepare metadata` steps compute the target tag, prerelease flag, whether the tag is the newest release, and a `safe_tag` slug used for branch names. When any rule fails, the workflow stops without touching the config.
|
||||
2. **Checkout source branches** – the default branch is checked out into `main/`, while the long-lived `x-files/app-upgrade-config` branch lives in `cs/`. All modifications happen in the latter directory.
|
||||
3. **Install toolchain** – Node.js 22, Corepack, and frozen Yarn dependencies are installed inside `main/`.
|
||||
4. **Run the update script** – `yarn tsx scripts/update-app-upgrade-config.ts --tag <tag> --config ../cs/app-upgrade-config.json --is-prerelease <flag>` updates the JSON in-place.
|
||||
3. **Install toolchain** – Node.js 22, Corepack, and frozen pnpm dependencies are installed inside `main/`.
|
||||
4. **Run the update script** – `pnpm tsx scripts/update-app-upgrade-config.ts --tag <tag> --config ../cs/app-upgrade-config.json --is-prerelease <flag>` updates the JSON in-place.
|
||||
- The script normalizes the tag (e.g., strips `v` prefix), detects the release channel (`latest`, `rc`, `beta`), and loads segment rules from `config/app-upgrade-segments.json`.
|
||||
- It validates that prerelease flags and semantic suffixes agree, enforces locked segments, builds mirror feed URLs, and performs release-availability checks (GitHub HEAD request for every channel; GitCode GET for latest channels, falling back to `https://releases.cherry-ai.com` when gitcode is delayed).
|
||||
- After updating the relevant channel entry, the script rewrites the config with semver-sort order and a new `lastUpdated` timestamp.
|
||||
@ -223,10 +223,10 @@ interface ChannelConfig {
|
||||
Starting from this change, `.github/workflows/update-app-upgrade-config.yml` listens to GitHub release events (published + prerelease). The workflow:
|
||||
|
||||
1. Checks out the default branch (for scripts) and the `x-files/app-upgrade-config` branch (where the config is hosted).
|
||||
2. Runs `yarn tsx scripts/update-app-upgrade-config.ts --tag <tag> --config ../cs/app-upgrade-config.json` to regenerate the config directly inside the `x-files/app-upgrade-config` working tree.
|
||||
2. Runs `pnpm tsx scripts/update-app-upgrade-config.ts --tag <tag> --config ../cs/app-upgrade-config.json` to regenerate the config directly inside the `x-files/app-upgrade-config` working tree.
|
||||
3. If the file changed, it opens a PR against `x-files/app-upgrade-config` via `peter-evans/create-pull-request`, with the generated diff limited to `app-upgrade-config.json`.
|
||||
|
||||
You can run the same script locally via `yarn update:upgrade-config --tag v2.1.6 --config ../cs/app-upgrade-config.json` (add `--dry-run` to preview) to reproduce or debug whatever the workflow does. Passing `--skip-release-checks` along with `--dry-run` lets you bypass the release-page existence check (useful when the GitHub/GitCode pages aren’t published yet). Running without `--config` continues to update the copy in your current working directory (main branch) for documentation purposes.
|
||||
You can run the same script locally via `pnpm update:upgrade-config --tag v2.1.6 --config ../cs/app-upgrade-config.json` (add `--dry-run` to preview) to reproduce or debug whatever the workflow does. Passing `--skip-release-checks` along with `--dry-run` lets you bypass the release-page existence check (useful when the GitHub/GitCode pages aren't published yet). Running without `--config` continues to update the copy in your current working directory (main branch) for documentation purposes.
|
||||
|
||||
## Version Matching Logic
|
||||
|
||||
|
||||
129
docs/en/references/fuzzy-search.md
Normal file
129
docs/en/references/fuzzy-search.md
Normal file
@ -0,0 +1,129 @@
|
||||
# Fuzzy Search for File List
|
||||
|
||||
This document describes the fuzzy search implementation for file listing in Cherry Studio.
|
||||
|
||||
## Overview
|
||||
|
||||
The fuzzy search feature allows users to find files by typing partial or approximate file names/paths. It uses a two-tier file filtering strategy (ripgrep glob pre-filtering with greedy substring fallback) combined with subsequence-based scoring for optimal performance and flexibility.
|
||||
|
||||
## Features
|
||||
|
||||
- **Ripgrep Glob Pre-filtering**: Primary filtering using glob patterns for fast native-level filtering
|
||||
- **Greedy Substring Matching**: Fallback file filtering strategy when ripgrep glob pre-filtering returns no results
|
||||
- **Subsequence-based Segment Scoring**: During scoring, path segments gain additional weight when query characters appear in order
|
||||
- **Relevance Scoring**: Results are sorted by a relevance score derived from multiple factors
|
||||
|
||||
## Matching Strategies
|
||||
|
||||
### 1. Ripgrep Glob Pre-filtering (Primary)
|
||||
|
||||
The query is converted to a glob pattern for ripgrep to do initial filtering:
|
||||
|
||||
```
|
||||
Query: "updater"
|
||||
Glob: "*u*p*d*a*t*e*r*"
|
||||
```
|
||||
|
||||
This leverages ripgrep's native performance for the initial file filtering.
|
||||
|
||||
### 2. Greedy Substring Matching (Fallback)
|
||||
|
||||
When the glob pre-filter returns no results, the system falls back to greedy substring matching. This allows more flexible matching:
|
||||
|
||||
```
|
||||
Query: "updatercontroller"
|
||||
File: "packages/update/src/node/updateController.ts"
|
||||
|
||||
Matching process:
|
||||
1. Find "update" (longest match from start)
|
||||
2. Remaining "rcontroller" → find "r" then "controller"
|
||||
3. All parts matched → Success
|
||||
```
|
||||
|
||||
## Scoring Algorithm
|
||||
|
||||
Results are ranked by a relevance score based on named constants defined in `FileStorage.ts`:
|
||||
|
||||
| Constant | Value | Description |
|
||||
|----------|-------|-------------|
|
||||
| `SCORE_FILENAME_STARTS` | 100 | Filename starts with query (highest priority) |
|
||||
| `SCORE_FILENAME_CONTAINS` | 80 | Filename contains exact query substring |
|
||||
| `SCORE_SEGMENT_MATCH` | 60 | Per path segment that matches query |
|
||||
| `SCORE_WORD_BOUNDARY` | 20 | Query matches start of a word |
|
||||
| `SCORE_CONSECUTIVE_CHAR` | 15 | Per consecutive character match |
|
||||
| `PATH_LENGTH_PENALTY_FACTOR` | 4 | Logarithmic penalty for longer paths |
|
||||
|
||||
### Scoring Strategy
|
||||
|
||||
The scoring prioritizes:
|
||||
1. **Filename matches** (highest): Files where the query appears in the filename are most relevant
|
||||
2. **Path segment matches**: Multiple matching segments indicate stronger relevance
|
||||
3. **Word boundaries**: Matching at word starts (e.g., "upd" matching "update") is preferred
|
||||
4. **Consecutive matches**: Longer consecutive character sequences score higher
|
||||
5. **Path length**: Shorter paths are preferred (logarithmic penalty prevents long paths from dominating)
|
||||
|
||||
### Example Scoring
|
||||
|
||||
For query `updater`:
|
||||
|
||||
| File | Score Factors |
|
||||
|------|---------------|
|
||||
| `RCUpdater.js` | Short path + filename contains "updater" |
|
||||
| `updateController.ts` | Multiple segment matches |
|
||||
| `UpdaterHelper.plist` | Long path penalty |
|
||||
|
||||
## Configuration
|
||||
|
||||
### DirectoryListOptions
|
||||
|
||||
```typescript
|
||||
interface DirectoryListOptions {
|
||||
recursive?: boolean // Default: true
|
||||
maxDepth?: number // Default: 10
|
||||
includeHidden?: boolean // Default: false
|
||||
includeFiles?: boolean // Default: true
|
||||
includeDirectories?: boolean // Default: true
|
||||
maxEntries?: number // Default: 20
|
||||
searchPattern?: string // Default: '.'
|
||||
fuzzy?: boolean // Default: true
|
||||
}
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```typescript
|
||||
// Basic fuzzy search
|
||||
const files = await window.api.file.listDirectory(dirPath, {
|
||||
searchPattern: 'updater',
|
||||
fuzzy: true,
|
||||
maxEntries: 20
|
||||
})
|
||||
|
||||
// Disable fuzzy search (exact glob matching)
|
||||
const files = await window.api.file.listDirectory(dirPath, {
|
||||
searchPattern: 'update',
|
||||
fuzzy: false
|
||||
})
|
||||
```
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
1. **Ripgrep Pre-filtering**: Most queries are handled by ripgrep's native glob matching, which is extremely fast
|
||||
2. **Fallback Only When Needed**: Greedy substring matching (which loads all files) only runs when glob matching returns empty results
|
||||
3. **Result Limiting**: Only top 20 results are returned by default
|
||||
4. **Excluded Directories**: Common large directories are automatically excluded:
|
||||
- `node_modules`
|
||||
- `.git`
|
||||
- `dist`, `build`
|
||||
- `.next`, `.nuxt`
|
||||
- `coverage`, `.cache`
|
||||
|
||||
## Implementation Details
|
||||
|
||||
The implementation is located in `src/main/services/FileStorage.ts`:
|
||||
|
||||
- `queryToGlobPattern()`: Converts query to ripgrep glob pattern
|
||||
- `isFuzzyMatch()`: Subsequence matching algorithm
|
||||
- `isGreedySubstringMatch()`: Greedy substring matching fallback
|
||||
- `getFuzzyMatchScore()`: Calculates relevance score
|
||||
- `listDirectoryWithRipgrep()`: Main search orchestration
|
||||
@ -34,7 +34,7 @@
|
||||
</a>
|
||||
</h1>
|
||||
<p align="center">
|
||||
<a href="https://github.com/CherryHQ/cherry-studio">English</a> | 中文 | <a href="https://cherry-ai.com">官方网站</a> | <a href="https://docs.cherry-ai.com/cherry-studio-wen-dang/zh-cn">文档</a> | <a href="./guides/development.md">开发</a> | <a href="https://github.com/CherryHQ/cherry-studio/issues">反馈</a><br>
|
||||
<a href="https://github.com/CherryHQ/cherry-studio">English</a> | 中文 | <a href="https://cherry-ai.com">官方网站</a> | <a href="https://docs.cherry-ai.com">文档</a> | <a href="./guides/development.md">开发</a> | <a href="https://github.com/CherryHQ/cherry-studio/issues">反馈</a><br>
|
||||
</p>
|
||||
|
||||
<!-- 题头徽章组合 -->
|
||||
@ -281,7 +281,7 @@ https://docs.cherry-ai.com
|
||||
|
||||
# 📊 GitHub 统计
|
||||
|
||||

|
||||

|
||||
|
||||
# ⭐️ Star 记录
|
||||
|
||||
|
||||
@ -11,7 +11,7 @@
|
||||
### Install
|
||||
|
||||
```bash
|
||||
yarn
|
||||
pnpm install
|
||||
```
|
||||
|
||||
### Development
|
||||
@ -20,35 +20,35 @@ yarn
|
||||
|
||||
Download and install [Node.js v22.x.x](https://nodejs.org/en/download)
|
||||
|
||||
### Setup Yarn
|
||||
### Setup pnpm
|
||||
|
||||
```bash
|
||||
corepack enable
|
||||
corepack prepare yarn@4.9.1 --activate
|
||||
corepack prepare pnpm@10.27.0 --activate
|
||||
```
|
||||
|
||||
### Install Dependencies
|
||||
|
||||
```bash
|
||||
yarn install
|
||||
pnpm install
|
||||
```
|
||||
|
||||
### ENV
|
||||
|
||||
```bash
|
||||
copy .env.example .env
|
||||
cp .env.example .env
|
||||
```
|
||||
|
||||
### Start
|
||||
|
||||
```bash
|
||||
yarn dev
|
||||
pnpm dev
|
||||
```
|
||||
|
||||
### Debug
|
||||
|
||||
```bash
|
||||
yarn debug
|
||||
pnpm debug
|
||||
```
|
||||
|
||||
Then input chrome://inspect in browser
|
||||
@ -56,18 +56,18 @@ Then input chrome://inspect in browser
|
||||
### Test
|
||||
|
||||
```bash
|
||||
yarn test
|
||||
pnpm test
|
||||
```
|
||||
|
||||
### Build
|
||||
|
||||
```bash
|
||||
# For windows
|
||||
$ yarn build:win
|
||||
$ pnpm build:win
|
||||
|
||||
# For macOS
|
||||
$ yarn build:mac
|
||||
$ pnpm build:mac
|
||||
|
||||
# For Linux
|
||||
$ yarn build:linux
|
||||
$ pnpm build:linux
|
||||
```
|
||||
|
||||
@ -1,17 +1,17 @@
|
||||
# 如何优雅地做好 i18n
|
||||
|
||||
## 使用i18n ally插件提升开发体验
|
||||
## 使用 i18n ally 插件提升开发体验
|
||||
|
||||
i18n ally是一个强大的VSCode插件,它能在开发阶段提供实时反馈,帮助开发者更早发现文案缺失和错译问题。
|
||||
i18n ally 是一个强大的 VSCode 插件,它能在开发阶段提供实时反馈,帮助开发者更早发现文案缺失和错译问题。
|
||||
|
||||
项目中已经配置好了插件设置,直接安装即可。
|
||||
|
||||
### 开发时优势
|
||||
|
||||
- **实时预览**:翻译文案会直接显示在编辑器中
|
||||
- **错误检测**:自动追踪标记出缺失的翻译或未使用的key
|
||||
- **快速跳转**:可通过key直接跳转到定义处(Ctrl/Cmd + click)
|
||||
- **自动补全**:输入i18n key时提供自动补全建议
|
||||
- **错误检测**:自动追踪标记出缺失的翻译或未使用的 key
|
||||
- **快速跳转**:可通过 key 直接跳转到定义处(Ctrl/Cmd + click)
|
||||
- **自动补全**:输入 i18n key 时提供自动补全建议
|
||||
|
||||
### 效果展示
|
||||
|
||||
@ -23,9 +23,9 @@ i18n ally是一个强大的VSCode插件,它能在开发阶段提供实时反
|
||||
|
||||
## i18n 约定
|
||||
|
||||
### **绝对避免使用flat格式**
|
||||
### **绝对避免使用 flat 格式**
|
||||
|
||||
绝对避免使用flat格式,如`"add.button.tip": "添加"`。应采用清晰的嵌套结构:
|
||||
绝对避免使用 flat 格式,如`"add.button.tip": "添加"`。应采用清晰的嵌套结构:
|
||||
|
||||
```json
|
||||
// 错误示例 - flat结构
|
||||
@ -52,14 +52,14 @@ i18n ally是一个强大的VSCode插件,它能在开发阶段提供实时反
|
||||
#### 为什么要使用嵌套结构
|
||||
|
||||
1. **自然分组**:通过对象结构天然能将相关上下文的文案分到一个组别中
|
||||
2. **插件要求**:i18n ally 插件需要嵌套或flat格式其一的文件才能正常分析
|
||||
2. **插件要求**:i18n ally 插件需要嵌套或 flat 格式其一的文件才能正常分析
|
||||
|
||||
### **避免在`t()`中使用模板字符串**
|
||||
|
||||
**强烈建议避免使用模板字符串**进行动态插值。虽然模板字符串在JavaScript开发中非常方便,但在国际化场景下会带来一系列问题。
|
||||
**强烈建议避免使用模板字符串**进行动态插值。虽然模板字符串在 JavaScript 开发中非常方便,但在国际化场景下会带来一系列问题。
|
||||
|
||||
1. **插件无法跟踪**
|
||||
i18n ally等工具无法解析模板字符串中的动态内容,导致:
|
||||
i18n ally 等工具无法解析模板字符串中的动态内容,导致:
|
||||
|
||||
- 无法正确显示实时预览
|
||||
- 无法检测翻译缺失
|
||||
@ -67,11 +67,11 @@ i18n ally是一个强大的VSCode插件,它能在开发阶段提供实时反
|
||||
|
||||
```javascript
|
||||
// 不推荐 - 插件无法解析
|
||||
const message = t(`fruits.${fruit}`)
|
||||
const message = t(`fruits.${fruit}`);
|
||||
```
|
||||
|
||||
2. **编辑器无法实时渲染**
|
||||
在IDE中,模板字符串会显示为原始代码而非最终翻译结果,降低了开发体验。
|
||||
在 IDE 中,模板字符串会显示为原始代码而非最终翻译结果,降低了开发体验。
|
||||
|
||||
3. **更难以维护**
|
||||
由于插件无法跟踪这样的文案,编辑器中也无法渲染,开发者必须人工确认语言文件中是否存在相应的文案。
|
||||
@ -85,36 +85,36 @@ i18n ally是一个强大的VSCode插件,它能在开发阶段提供实时反
|
||||
```ts
|
||||
// src/renderer/src/i18n/label.ts
|
||||
const themeModeKeyMap = {
|
||||
dark: 'settings.theme.dark',
|
||||
light: 'settings.theme.light',
|
||||
system: 'settings.theme.system'
|
||||
} as const
|
||||
dark: "settings.theme.dark",
|
||||
light: "settings.theme.light",
|
||||
system: "settings.theme.system",
|
||||
} as const;
|
||||
|
||||
export const getThemeModeLabel = (key: string): string => {
|
||||
return themeModeKeyMap[key] ? t(themeModeKeyMap[key]) : key
|
||||
}
|
||||
return themeModeKeyMap[key] ? t(themeModeKeyMap[key]) : key;
|
||||
};
|
||||
```
|
||||
|
||||
通过避免模板字符串,可以获得更好的开发体验、更可靠的翻译检查以及更易维护的代码库。
|
||||
|
||||
## 自动化脚本
|
||||
|
||||
项目中有一系列脚本来自动化i18n相关任务:
|
||||
项目中有一系列脚本来自动化 i18n 相关任务:
|
||||
|
||||
### `check:i18n` - 检查i18n结构
|
||||
### `i18n:check` - 检查 i18n 结构
|
||||
|
||||
此脚本会检查:
|
||||
|
||||
- 所有语言文件是否为嵌套结构
|
||||
- 是否存在缺失的key
|
||||
- 是否存在多余的key
|
||||
- 是否存在缺失的 key
|
||||
- 是否存在多余的 key
|
||||
- 是否已经有序
|
||||
|
||||
```bash
|
||||
yarn check:i18n
|
||||
pnpm i18n:check
|
||||
```
|
||||
|
||||
### `sync:i18n` - 同步json结构与排序
|
||||
### `i18n:sync` - 同步 json 结构与排序
|
||||
|
||||
此脚本以`zh-cn.json`文件为基准,将结构同步到其他语言文件,包括:
|
||||
|
||||
@ -123,14 +123,14 @@ yarn check:i18n
|
||||
3. 自动排序
|
||||
|
||||
```bash
|
||||
yarn sync:i18n
|
||||
pnpm i18n:sync
|
||||
```
|
||||
|
||||
### `auto:i18n` - 自动翻译待翻译文本
|
||||
### `i18n:translate` - 自动翻译待翻译文本
|
||||
|
||||
次脚本自动将标记为待翻译的文本通过机器翻译填充。
|
||||
|
||||
通常,在`zh-cn.json`中添加所需文案后,执行`sync:i18n`即可自动完成翻译。
|
||||
通常,在`zh-cn.json`中添加所需文案后,执行`i18n:sync`即可自动完成翻译。
|
||||
|
||||
使用该脚本前,需要配置环境变量,例如:
|
||||
|
||||
@ -143,29 +143,19 @@ MODEL="qwen-plus-latest"
|
||||
你也可以通过直接编辑`.env`文件来添加环境变量。
|
||||
|
||||
```bash
|
||||
yarn auto:i18n
|
||||
```
|
||||
|
||||
### `update:i18n` - 对象级别翻译更新
|
||||
|
||||
对`src/renderer/src/i18n/translate`中的语言文件进行对象级别的翻译更新,保留已有翻译,只更新新增内容。
|
||||
|
||||
**不建议**使用该脚本,更推荐使用`auto:i18n`进行翻译。
|
||||
|
||||
```bash
|
||||
yarn update:i18n
|
||||
pnpm i18n:translate
|
||||
```
|
||||
|
||||
### 工作流
|
||||
|
||||
1. 开发阶段,先在`zh-cn.json`中添加所需文案
|
||||
2. 确认在中文环境下显示无误后,使用`yarn sync:i18n`将文案同步到其他语言文件
|
||||
3. 使用`yarn auto:i18n`进行自动翻译
|
||||
2. 确认在中文环境下显示无误后,使用`pnpm i18n:sync`将文案同步到其他语言文件
|
||||
3. 使用`pnpm i18n:translate`进行自动翻译
|
||||
4. 喝杯咖啡,等翻译完成吧!
|
||||
|
||||
## 最佳实践
|
||||
|
||||
1. **以中文为源语言**:所有开发首先使用中文,再翻译为其他语言
|
||||
2. **提交前运行检查脚本**:使用`yarn check:i18n`检查i18n是否有问题
|
||||
2. **提交前运行检查脚本**:使用`pnpm i18n:check`检查 i18n 是否有问题
|
||||
3. **小步提交翻译**:避免积累大量未翻译文本
|
||||
4. **保持key语义明确**:key应能清晰表达其用途,如`user.profile.avatar.upload.error`
|
||||
4. **保持 key 语义明确**:key 应能清晰表达其用途,如`user.profile.avatar.upload.error`
|
||||
|
||||
@ -37,8 +37,8 @@
|
||||
|
||||
1. **检查与元数据准备**:`Check if should proceed` 和 `Prepare metadata` 步骤会计算 tag、prerelease 标志、是否最新版本以及用于分支名的 `safe_tag`。若任意校验失败,工作流立即退出。
|
||||
2. **检出分支**:默认分支被检出到 `main/`,长期维护的 `x-files/app-upgrade-config` 分支则在 `cs/` 中,所有改动都发生在 `cs/`。
|
||||
3. **安装工具链**:安装 Node.js 22、启用 Corepack,并在 `main/` 目录执行 `yarn install --immutable`。
|
||||
4. **运行更新脚本**:执行 `yarn tsx scripts/update-app-upgrade-config.ts --tag <tag> --config ../cs/app-upgrade-config.json --is-prerelease <flag>`。
|
||||
3. **安装工具链**:安装 Node.js 22、启用 Corepack,并在 `main/` 目录执行 `pnpm install --frozen-lockfile`。
|
||||
4. **运行更新脚本**:执行 `pnpm tsx scripts/update-app-upgrade-config.ts --tag <tag> --config ../cs/app-upgrade-config.json --is-prerelease <flag>`。
|
||||
- 脚本会标准化 tag(去掉 `v` 前缀等)、识别渠道、加载 `config/app-upgrade-segments.json` 中的分段规则。
|
||||
- 校验 prerelease 标志与语义后缀是否匹配、强制锁定的 segment 是否满足、生成镜像的下载地址,并检查 release 是否已经在 GitHub/GitCode 可用(latest 渠道在 GitCode 不可用时会回退到 `https://releases.cherry-ai.com`)。
|
||||
- 更新对应的渠道配置后,脚本会按 semver 排序写回 JSON,并刷新 `lastUpdated`。
|
||||
@ -223,10 +223,10 @@ interface ChannelConfig {
|
||||
`.github/workflows/update-app-upgrade-config.yml` 会在 GitHub Release(包含正常发布与 Pre Release)触发:
|
||||
|
||||
1. 同时 Checkout 仓库默认分支(用于脚本)和 `x-files/app-upgrade-config` 分支(真实托管配置的分支)。
|
||||
2. 在默认分支目录执行 `yarn tsx scripts/update-app-upgrade-config.ts --tag <tag> --config ../cs/app-upgrade-config.json`,直接重写 `x-files/app-upgrade-config` 分支里的配置文件。
|
||||
2. 在默认分支目录执行 `pnpm tsx scripts/update-app-upgrade-config.ts --tag <tag> --config ../cs/app-upgrade-config.json`,直接重写 `x-files/app-upgrade-config` 分支里的配置文件。
|
||||
3. 如果 `app-upgrade-config.json` 有变化,则通过 `peter-evans/create-pull-request` 自动创建一个指向 `x-files/app-upgrade-config` 的 PR,Diff 仅包含该文件。
|
||||
|
||||
如需本地调试,可执行 `yarn update:upgrade-config --tag v2.1.6 --config ../cs/app-upgrade-config.json`(加 `--dry-run` 仅打印结果)来复现 CI 行为。若需要暂时跳过 GitHub/GitCode Release 页面是否就绪的校验,可在 `--dry-run` 的同时附加 `--skip-release-checks`。不加 `--config` 时默认更新当前工作目录(通常是 main 分支)下的副本,方便文档/审查。
|
||||
如需本地调试,可执行 `pnpm update:upgrade-config --tag v2.1.6 --config ../cs/app-upgrade-config.json`(加 `--dry-run` 仅打印结果)来复现 CI 行为。若需要暂时跳过 GitHub/GitCode Release 页面是否就绪的校验,可在 `--dry-run` 的同时附加 `--skip-release-checks`。不加 `--config` 时默认更新当前工作目录(通常是 main 分支)下的副本,方便文档/审查。
|
||||
|
||||
## 版本匹配逻辑
|
||||
|
||||
|
||||
129
docs/zh/references/fuzzy-search.md
Normal file
129
docs/zh/references/fuzzy-search.md
Normal file
@ -0,0 +1,129 @@
|
||||
# 文件列表模糊搜索
|
||||
|
||||
本文档描述了 Cherry Studio 中文件列表的模糊搜索实现。
|
||||
|
||||
## 概述
|
||||
|
||||
模糊搜索功能允许用户通过输入部分或近似的文件名/路径来查找文件。它使用两层文件过滤策略(ripgrep glob 预过滤 + 贪婪子串匹配回退),结合基于子序列的评分,以获得最佳性能和灵活性。
|
||||
|
||||
## 功能特性
|
||||
|
||||
- **Ripgrep Glob 预过滤**:使用 glob 模式进行快速原生级过滤的主要过滤策略
|
||||
- **贪婪子串匹配**:当 ripgrep glob 预过滤无结果时的回退文件过滤策略
|
||||
- **基于子序列的段评分**:评分时,当查询字符按顺序出现时,路径段获得额外权重
|
||||
- **相关性评分**:结果按多因素相关性分数排序
|
||||
|
||||
## 匹配策略
|
||||
|
||||
### 1. Ripgrep Glob 预过滤(主要)
|
||||
|
||||
查询被转换为 glob 模式供 ripgrep 进行初始过滤:
|
||||
|
||||
```
|
||||
查询: "updater"
|
||||
Glob: "*u*p*d*a*t*e*r*"
|
||||
```
|
||||
|
||||
这利用了 ripgrep 的原生性能进行初始文件过滤。
|
||||
|
||||
### 2. 贪婪子串匹配(回退)
|
||||
|
||||
当 glob 预过滤无结果时,系统回退到贪婪子串匹配。这允许更灵活的匹配:
|
||||
|
||||
```
|
||||
查询: "updatercontroller"
|
||||
文件: "packages/update/src/node/updateController.ts"
|
||||
|
||||
匹配过程:
|
||||
1. 找到 "update"(从开头的最长匹配)
|
||||
2. 剩余 "rcontroller" → 找到 "r" 然后 "controller"
|
||||
3. 所有部分都匹配 → 成功
|
||||
```
|
||||
|
||||
## 评分算法
|
||||
|
||||
结果根据 `FileStorage.ts` 中定义的命名常量进行相关性分数排名:
|
||||
|
||||
| 常量 | 值 | 描述 |
|
||||
|------|-----|------|
|
||||
| `SCORE_FILENAME_STARTS` | 100 | 文件名以查询开头(最高优先级)|
|
||||
| `SCORE_FILENAME_CONTAINS` | 80 | 文件名包含精确查询子串 |
|
||||
| `SCORE_SEGMENT_MATCH` | 60 | 每个匹配查询的路径段 |
|
||||
| `SCORE_WORD_BOUNDARY` | 20 | 查询匹配单词开头 |
|
||||
| `SCORE_CONSECUTIVE_CHAR` | 15 | 每个连续字符匹配 |
|
||||
| `PATH_LENGTH_PENALTY_FACTOR` | 4 | 较长路径的对数惩罚 |
|
||||
|
||||
### 评分策略
|
||||
|
||||
评分优先级:
|
||||
1. **文件名匹配**(最高):查询出现在文件名中的文件最相关
|
||||
2. **路径段匹配**:多个匹配段表示更强的相关性
|
||||
3. **词边界**:在单词开头匹配(如 "upd" 匹配 "update")更优先
|
||||
4. **连续匹配**:更长的连续字符序列得分更高
|
||||
5. **路径长度**:较短路径更优先(对数惩罚防止长路径主导评分)
|
||||
|
||||
### 评分示例
|
||||
|
||||
对于查询 `updater`:
|
||||
|
||||
| 文件 | 评分因素 |
|
||||
|------|----------|
|
||||
| `RCUpdater.js` | 短路径 + 文件名包含 "updater" |
|
||||
| `updateController.ts` | 多个路径段匹配 |
|
||||
| `UpdaterHelper.plist` | 长路径惩罚 |
|
||||
|
||||
## 配置
|
||||
|
||||
### DirectoryListOptions
|
||||
|
||||
```typescript
|
||||
interface DirectoryListOptions {
|
||||
recursive?: boolean // 默认: true
|
||||
maxDepth?: number // 默认: 10
|
||||
includeHidden?: boolean // 默认: false
|
||||
includeFiles?: boolean // 默认: true
|
||||
includeDirectories?: boolean // 默认: true
|
||||
maxEntries?: number // 默认: 20
|
||||
searchPattern?: string // 默认: '.'
|
||||
fuzzy?: boolean // 默认: true
|
||||
}
|
||||
```
|
||||
|
||||
## 使用方法
|
||||
|
||||
```typescript
|
||||
// 基本模糊搜索
|
||||
const files = await window.api.file.listDirectory(dirPath, {
|
||||
searchPattern: 'updater',
|
||||
fuzzy: true,
|
||||
maxEntries: 20
|
||||
})
|
||||
|
||||
// 禁用模糊搜索(精确 glob 匹配)
|
||||
const files = await window.api.file.listDirectory(dirPath, {
|
||||
searchPattern: 'update',
|
||||
fuzzy: false
|
||||
})
|
||||
```
|
||||
|
||||
## 性能考虑
|
||||
|
||||
1. **Ripgrep 预过滤**:大多数查询由 ripgrep 的原生 glob 匹配处理,速度极快
|
||||
2. **仅在需要时回退**:贪婪子串匹配(加载所有文件)仅在 glob 匹配返回空结果时运行
|
||||
3. **结果限制**:默认只返回前 20 个结果
|
||||
4. **排除目录**:自动排除常见的大型目录:
|
||||
- `node_modules`
|
||||
- `.git`
|
||||
- `dist`、`build`
|
||||
- `.next`、`.nuxt`
|
||||
- `coverage`、`.cache`
|
||||
|
||||
## 实现细节
|
||||
|
||||
实现位于 `src/main/services/FileStorage.ts`:
|
||||
|
||||
- `queryToGlobPattern()`:将查询转换为 ripgrep glob 模式
|
||||
- `isFuzzyMatch()`:子序列匹配算法
|
||||
- `isGreedySubstringMatch()`:贪婪子串匹配回退
|
||||
- `getFuzzyMatchScore()`:计算相关性分数
|
||||
- `listDirectoryWithRipgrep()`:主搜索协调
|
||||
850
docs/zh/references/lan-transfer-protocol.md
Normal file
850
docs/zh/references/lan-transfer-protocol.md
Normal file
@ -0,0 +1,850 @@
|
||||
# Cherry Studio 局域网传输协议规范
|
||||
|
||||
> 版本: 1.0
|
||||
> 最后更新: 2025-12
|
||||
|
||||
本文档定义了 Cherry Studio 桌面客户端(Electron)与移动端(Expo)之间的局域网文件传输协议。
|
||||
|
||||
---
|
||||
|
||||
## 目录
|
||||
|
||||
1. [协议概述](#1-协议概述)
|
||||
2. [服务发现(Bonjour/mDNS)](#2-服务发现bonjourmdns)
|
||||
3. [TCP 连接与握手](#3-tcp-连接与握手)
|
||||
4. [消息格式规范](#4-消息格式规范)
|
||||
5. [文件传输协议](#5-文件传输协议)
|
||||
6. [心跳与连接保活](#6-心跳与连接保活)
|
||||
7. [错误处理](#7-错误处理)
|
||||
8. [常量与配置](#8-常量与配置)
|
||||
9. [完整时序图](#9-完整时序图)
|
||||
10. [移动端实现指南](#10-移动端实现指南)
|
||||
|
||||
---
|
||||
|
||||
## 1. 协议概述
|
||||
|
||||
### 1.1 架构角色
|
||||
|
||||
| 角色 | 平台 | 职责 |
|
||||
| -------------------- | --------------- | ---------------------------- |
|
||||
| **Client(客户端)** | Electron 桌面端 | 扫描服务、发起连接、发送文件 |
|
||||
| **Server(服务端)** | Expo 移动端 | 发布服务、接受连接、接收文件 |
|
||||
|
||||
### 1.2 协议栈(v1)
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────┐
|
||||
│ 应用层(文件传输) │
|
||||
├─────────────────────────────────────┤
|
||||
│ 消息层(控制: JSON \n) │
|
||||
│ (数据: 二进制帧) │
|
||||
├─────────────────────────────────────┤
|
||||
│ 传输层(TCP) │
|
||||
├─────────────────────────────────────┤
|
||||
│ 发现层(Bonjour/mDNS) │
|
||||
└─────────────────────────────────────┘
|
||||
```
|
||||
|
||||
### 1.3 通信流程概览
|
||||
|
||||
```
|
||||
1. 服务发现 → 移动端发布 mDNS 服务,桌面端扫描发现
|
||||
2. TCP 握手 → 建立连接,交换设备信息(`version=1`)
|
||||
3. 文件传输 → 控制消息使用 JSON,`file_chunk` 使用二进制帧分块传输
|
||||
4. 连接保活 → ping/pong 心跳
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 2. 服务发现(Bonjour/mDNS)
|
||||
|
||||
### 2.1 服务类型
|
||||
|
||||
| 属性 | 值 |
|
||||
| ------------ | -------------------- |
|
||||
| 服务类型 | `cherrystudio` |
|
||||
| 协议 | `tcp` |
|
||||
| 完整服务标识 | `_cherrystudio._tcp` |
|
||||
|
||||
### 2.2 服务发布(移动端)
|
||||
|
||||
移动端需要通过 mDNS/Bonjour 发布服务:
|
||||
|
||||
```typescript
|
||||
// 服务发布参数
|
||||
{
|
||||
name: "Cherry Studio Mobile", // 设备名称
|
||||
type: "cherrystudio", // 服务类型
|
||||
protocol: "tcp", // 协议
|
||||
port: 53317, // TCP 监听端口
|
||||
txt: { // TXT 记录(可选)
|
||||
version: "1",
|
||||
platform: "ios" // 或 "android"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 2.3 服务发现(桌面端)
|
||||
|
||||
桌面端扫描并解析服务信息:
|
||||
|
||||
```typescript
|
||||
// 发现的服务信息结构
|
||||
type LocalTransferPeer = {
|
||||
id: string; // 唯一标识符
|
||||
name: string; // 设备名称
|
||||
host?: string; // 主机名
|
||||
fqdn?: string; // 完全限定域名
|
||||
port?: number; // TCP 端口
|
||||
type?: string; // 服务类型
|
||||
protocol?: "tcp" | "udp"; // 协议
|
||||
addresses: string[]; // IP 地址列表
|
||||
txt?: Record<string, string>; // TXT 记录
|
||||
updatedAt: number; // 发现时间戳
|
||||
};
|
||||
```
|
||||
|
||||
### 2.4 IP 地址选择策略
|
||||
|
||||
当服务有多个 IP 地址时,优先选择 IPv4:
|
||||
|
||||
```typescript
|
||||
// 优先选择 IPv4 地址
|
||||
const preferredAddress = addresses.find((addr) => isIPv4(addr)) || addresses[0];
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 3. TCP 连接与握手
|
||||
|
||||
### 3.1 连接建立
|
||||
|
||||
1. 客户端使用发现的 `host:port` 建立 TCP 连接
|
||||
2. 连接成功后立即发送握手消息
|
||||
3. 等待服务端响应握手确认
|
||||
|
||||
### 3.2 握手消息(协议版本 v1)
|
||||
|
||||
#### Client → Server: `handshake`
|
||||
|
||||
```typescript
|
||||
type LanTransferHandshakeMessage = {
|
||||
type: "handshake";
|
||||
deviceName: string; // 设备名称
|
||||
version: string; // 协议版本,当前为 "1"
|
||||
platform?: string; // 平台:'darwin' | 'win32' | 'linux'
|
||||
appVersion?: string; // 应用版本
|
||||
};
|
||||
```
|
||||
|
||||
**示例:**
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "handshake",
|
||||
"deviceName": "Cherry Studio 1.7.2",
|
||||
"version": "1",
|
||||
"platform": "darwin",
|
||||
"appVersion": "1.7.2"
|
||||
}
|
||||
```
|
||||
|
||||
### 4. 消息格式规范(混合协议)
|
||||
|
||||
v1 使用"控制 JSON + 二进制数据帧"的混合协议(流式传输模式,无 per-chunk ACK):
|
||||
|
||||
- **控制消息**(握手、心跳、file_start/ack、file_end、file_complete):UTF-8 JSON,`\n` 分隔
|
||||
- **数据消息**(`file_chunk`):二进制帧,使用 Magic + 总长度做分帧,不经 Base64
|
||||
|
||||
### 4.1 控制消息编码(JSON + `\n`)
|
||||
|
||||
| 属性 | 规范 |
|
||||
| ---------- | ------------ |
|
||||
| 编码格式 | UTF-8 |
|
||||
| 序列化格式 | JSON |
|
||||
| 消息分隔符 | `\n`(0x0A) |
|
||||
|
||||
```typescript
|
||||
function sendControlMessage(socket: Socket, message: object): void {
|
||||
socket.write(`${JSON.stringify(message)}\n`);
|
||||
}
|
||||
```
|
||||
|
||||
### 4.2 `file_chunk` 二进制帧格式
|
||||
|
||||
为解决 TCP 分包/粘包并消除 Base64 开销,`file_chunk` 采用带总长度的二进制帧:
|
||||
|
||||
```
|
||||
┌──────────┬──────────┬────────┬───────────────┬──────────────┬────────────┬───────────┐
|
||||
│ Magic │ TotalLen │ Type │ TransferId Len│ TransferId │ ChunkIdx │ Data │
|
||||
│ 0x43 0x53│ (4B BE) │ 0x01 │ (2B BE) │ (UTF-8) │ (4B BE) │ (raw) │
|
||||
└──────────┴──────────┴────────┴───────────────┴──────────────┴────────────┴───────────┘
|
||||
```
|
||||
|
||||
| 字段 | 大小 | 说明 |
|
||||
| -------------- | ---- | ------------------------------------------- |
|
||||
| Magic | 2B | 常量 `0x43 0x53` ("CS"), 用于区分 JSON 消息 |
|
||||
| TotalLen | 4B | Big-endian,帧总长度(不含 Magic/TotalLen) |
|
||||
| Type | 1B | `0x01` 代表 `file_chunk` |
|
||||
| TransferId Len | 2B | Big-endian,transferId 字符串长度 |
|
||||
| TransferId | nB | UTF-8 transferId(长度由上一字段给出) |
|
||||
| ChunkIdx | 4B | Big-endian,块索引,从 0 开始 |
|
||||
| Data | mB | 原始文件二进制数据(未编码) |
|
||||
|
||||
> 计算帧总长度:`TotalLen = 1 + 2 + transferIdLen + 4 + dataLen`(即 Type~Data 的长度和)。
|
||||
|
||||
### 4.3 消息解析策略
|
||||
|
||||
1. 读取 socket 数据到缓冲区;
|
||||
2. 若前两字节为 `0x43 0x53` → 按二进制帧解析:
|
||||
- 至少需要 6 字节头(Magic + TotalLen),不足则等待更多数据
|
||||
- 读取 `TotalLen` 判断帧整体长度,缓冲区不足则继续等待
|
||||
- 解析 Type/TransferId/ChunkIdx/Data,并传入文件接收逻辑
|
||||
3. 否则若首字节为 `{` → 按 JSON + `\n` 解析控制消息
|
||||
4. 其它数据丢弃 1 字节并继续循环,避免阻塞。
|
||||
|
||||
### 4.4 消息类型汇总(v1)
|
||||
|
||||
| 类型 | 方向 | 编码 | 用途 |
|
||||
| ---------------- | --------------- | -------- | ----------------------- |
|
||||
| `handshake` | Client → Server | JSON+\n | 握手请求(version=1) |
|
||||
| `handshake_ack` | Server → Client | JSON+\n | 握手响应 |
|
||||
| `ping` | Client → Server | JSON+\n | 心跳请求 |
|
||||
| `pong` | Server → Client | JSON+\n | 心跳响应 |
|
||||
| `file_start` | Client → Server | JSON+\n | 开始文件传输 |
|
||||
| `file_start_ack` | Server → Client | JSON+\n | 文件传输确认 |
|
||||
| `file_chunk` | Client → Server | 二进制帧 | 文件数据块(无 Base64,流式无 per-chunk ACK) |
|
||||
| `file_end` | Client → Server | JSON+\n | 文件传输结束 |
|
||||
| `file_complete` | Server → Client | JSON+\n | 传输完成结果 |
|
||||
|
||||
```
|
||||
{"type":"message_type",...其他字段...}\n
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 5. 文件传输协议
|
||||
|
||||
### 5.1 传输流程
|
||||
|
||||
```
|
||||
Client (Sender) Server (Receiver)
|
||||
| |
|
||||
|──── 1. file_start ────────────────>|
|
||||
| (文件元数据) |
|
||||
| |
|
||||
|<─── 2. file_start_ack ─────────────|
|
||||
| (接受/拒绝) |
|
||||
| |
|
||||
|══════ 循环发送数据块(流式,无 ACK) ═════|
|
||||
| |
|
||||
|──── 3. file_chunk [0] ────────────>|
|
||||
| |
|
||||
|──── 3. file_chunk [1] ────────────>|
|
||||
| |
|
||||
| ... 重复直到所有块发送完成 ... |
|
||||
| |
|
||||
|══════════════════════════════════════
|
||||
| |
|
||||
|──── 5. file_end ──────────────────>|
|
||||
| (所有块已发送) |
|
||||
| |
|
||||
|<─── 6. file_complete ──────────────|
|
||||
| (最终结果) |
|
||||
```
|
||||
|
||||
### 5.2 消息定义
|
||||
|
||||
#### 5.2.1 `file_start` - 开始传输
|
||||
|
||||
**方向:** Client → Server
|
||||
|
||||
```typescript
|
||||
type LanTransferFileStartMessage = {
|
||||
type: "file_start";
|
||||
transferId: string; // UUID,唯一传输标识
|
||||
fileName: string; // 文件名(含扩展名)
|
||||
fileSize: number; // 文件总字节数
|
||||
mimeType: string; // MIME 类型
|
||||
checksum: string; // 整个文件的 SHA-256 哈希(hex)
|
||||
totalChunks: number; // 总数据块数
|
||||
chunkSize: number; // 每块大小(字节)
|
||||
};
|
||||
```
|
||||
|
||||
**示例:**
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "file_start",
|
||||
"transferId": "550e8400-e29b-41d4-a716-446655440000",
|
||||
"fileName": "backup.zip",
|
||||
"fileSize": 524288000,
|
||||
"mimeType": "application/zip",
|
||||
"checksum": "a1b2c3d4e5f6789012345678901234567890abcdef1234567890abcdef123456",
|
||||
"totalChunks": 8192,
|
||||
"chunkSize": 65536
|
||||
}
|
||||
```
|
||||
|
||||
#### 5.2.2 `file_start_ack` - 传输确认
|
||||
|
||||
**方向:** Server → Client
|
||||
|
||||
```typescript
|
||||
type LanTransferFileStartAckMessage = {
|
||||
type: "file_start_ack";
|
||||
transferId: string; // 对应的传输 ID
|
||||
accepted: boolean; // 是否接受传输
|
||||
message?: string; // 拒绝原因
|
||||
};
|
||||
```
|
||||
|
||||
**接受示例:**
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "file_start_ack",
|
||||
"transferId": "550e8400-e29b-41d4-a716-446655440000",
|
||||
"accepted": true
|
||||
}
|
||||
```
|
||||
|
||||
**拒绝示例:**
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "file_start_ack",
|
||||
"transferId": "550e8400-e29b-41d4-a716-446655440000",
|
||||
"accepted": false,
|
||||
"message": "Insufficient storage space"
|
||||
}
|
||||
```
|
||||
|
||||
#### 5.2.3 `file_chunk` - 数据块
|
||||
|
||||
**方向:** Client → Server(**二进制帧**,见 4.2)
|
||||
|
||||
- 不再使用 JSON/`\n`,也不再使用 Base64
|
||||
- 帧结构:`Magic` + `TotalLen` + `Type` + `TransferId` + `ChunkIdx` + `Data`
|
||||
- `Type` 固定 `0x01`,`Data` 为原始文件二进制数据
|
||||
- 传输完整性依赖 `file_start.checksum`(全文件 SHA-256);分块校验和可选,不在帧中发送
|
||||
|
||||
#### 5.2.4 `file_chunk_ack` - 数据块确认(v1 流式不使用)
|
||||
|
||||
v1 采用流式传输,不发送 per-chunk ACK。本节类型仅保留作为向后兼容参考,实际不会发送。
|
||||
|
||||
#### 5.2.5 `file_end` - 传输结束
|
||||
|
||||
**方向:** Client → Server
|
||||
|
||||
```typescript
|
||||
type LanTransferFileEndMessage = {
|
||||
type: "file_end";
|
||||
transferId: string; // 传输 ID
|
||||
};
|
||||
```
|
||||
|
||||
**示例:**
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "file_end",
|
||||
"transferId": "550e8400-e29b-41d4-a716-446655440000"
|
||||
}
|
||||
```
|
||||
|
||||
#### 5.2.6 `file_complete` - 传输完成
|
||||
|
||||
**方向:** Server → Client
|
||||
|
||||
```typescript
|
||||
type LanTransferFileCompleteMessage = {
|
||||
type: "file_complete";
|
||||
transferId: string; // 传输 ID
|
||||
success: boolean; // 是否成功
|
||||
filePath?: string; // 保存路径(成功时)
|
||||
error?: string; // 错误信息(失败时)
|
||||
};
|
||||
```
|
||||
|
||||
**成功示例:**
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "file_complete",
|
||||
"transferId": "550e8400-e29b-41d4-a716-446655440000",
|
||||
"success": true,
|
||||
"filePath": "/storage/emulated/0/Documents/backup.zip"
|
||||
}
|
||||
```
|
||||
|
||||
**失败示例:**
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "file_complete",
|
||||
"transferId": "550e8400-e29b-41d4-a716-446655440000",
|
||||
"success": false,
|
||||
"error": "File checksum verification failed"
|
||||
}
|
||||
```
|
||||
|
||||
### 5.3 校验和算法
|
||||
|
||||
#### 整个文件校验和(保持不变)
|
||||
|
||||
```typescript
|
||||
async function calculateFileChecksum(filePath: string): Promise<string> {
|
||||
const hash = crypto.createHash("sha256");
|
||||
const stream = fs.createReadStream(filePath);
|
||||
|
||||
for await (const chunk of stream) {
|
||||
hash.update(chunk);
|
||||
}
|
||||
|
||||
return hash.digest("hex");
|
||||
}
|
||||
```
|
||||
|
||||
#### 数据块校验和
|
||||
|
||||
v1 默认 **不传输分块校验和**,依赖最终文件 checksum。若需要,可在应用层自定义(非协议字段)。
|
||||
|
||||
### 5.4 校验流程
|
||||
|
||||
**发送端(Client):**
|
||||
|
||||
1. 发送前计算整个文件的 SHA-256 → `file_start.checksum`
|
||||
2. 分块直接发送原始二进制(无 Base64)
|
||||
|
||||
**接收端(Server):**
|
||||
|
||||
1. 收到 `file_chunk` 后直接使用二进制数据
|
||||
2. 边收边落盘并增量计算 SHA-256(推荐)
|
||||
3. 所有块接收完成后,计算/完成增量哈希,得到最终 SHA-256
|
||||
4. 与 `file_start.checksum` 比对,结果写入 `file_complete`
|
||||
|
||||
### 5.5 数据块大小计算
|
||||
|
||||
```typescript
|
||||
const CHUNK_SIZE = 512 * 1024; // 512KB
|
||||
|
||||
const totalChunks = Math.ceil(fileSize / CHUNK_SIZE);
|
||||
|
||||
// 最后一个块可能小于 CHUNK_SIZE
|
||||
const lastChunkSize = fileSize % CHUNK_SIZE || CHUNK_SIZE;
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 6. 心跳与连接保活
|
||||
|
||||
### 6.1 心跳消息
|
||||
|
||||
#### `ping`
|
||||
|
||||
**方向:** Client → Server
|
||||
|
||||
```typescript
|
||||
type LanTransferPingMessage = {
|
||||
type: "ping";
|
||||
payload?: string; // 可选载荷
|
||||
};
|
||||
```
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "ping",
|
||||
"payload": "heartbeat"
|
||||
}
|
||||
```
|
||||
|
||||
#### `pong`
|
||||
|
||||
**方向:** Server → Client
|
||||
|
||||
```typescript
|
||||
type LanTransferPongMessage = {
|
||||
type: "pong";
|
||||
received: boolean; // 确认收到
|
||||
payload?: string; // 回传 ping 的载荷
|
||||
};
|
||||
```
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "pong",
|
||||
"received": true,
|
||||
"payload": "heartbeat"
|
||||
}
|
||||
```
|
||||
|
||||
### 6.2 心跳策略
|
||||
|
||||
- 握手成功后立即发送一次 `ping` 验证连接
|
||||
- 可选:定期发送心跳保持连接活跃
|
||||
- `pong` 应返回 `ping` 中的 `payload`(可选)
|
||||
|
||||
---
|
||||
|
||||
## 7. 错误处理
|
||||
|
||||
### 7.1 超时配置
|
||||
|
||||
| 操作 | 超时时间 | 说明 |
|
||||
| ---------- | -------- | --------------------- |
|
||||
| TCP 连接 | 10 秒 | 连接建立超时 |
|
||||
| 握手等待 | 10 秒 | 等待 `handshake_ack` |
|
||||
| 传输完成 | 60 秒 | 等待 `file_complete` |
|
||||
|
||||
### 7.2 错误场景处理
|
||||
|
||||
| 场景 | Client 处理 | Server 处理 |
|
||||
| --------------- | ------------------ | ---------------------- |
|
||||
| TCP 连接失败 | 通知 UI,允许重试 | - |
|
||||
| 握手超时 | 断开连接,通知 UI | 关闭 socket |
|
||||
| 握手被拒绝 | 显示拒绝原因 | - |
|
||||
| 数据块处理失败 | 中止传输,清理状态 | 清理临时文件 |
|
||||
| 连接意外断开 | 清理状态,通知 UI | 清理临时文件 |
|
||||
| 存储空间不足 | - | 发送 `accepted: false` |
|
||||
|
||||
### 7.3 资源清理
|
||||
|
||||
**Client 端:**
|
||||
|
||||
```typescript
|
||||
function cleanup(): void {
|
||||
// 1. 销毁文件读取流
|
||||
if (readStream) {
|
||||
readStream.destroy();
|
||||
}
|
||||
// 2. 清理传输状态
|
||||
activeTransfer = undefined;
|
||||
// 3. 关闭 socket(如需要)
|
||||
socket?.destroy();
|
||||
}
|
||||
```
|
||||
|
||||
**Server 端:**
|
||||
|
||||
```typescript
|
||||
function cleanup(): void {
|
||||
// 1. 关闭文件写入流
|
||||
if (writeStream) {
|
||||
writeStream.end();
|
||||
}
|
||||
// 2. 删除未完成的临时文件
|
||||
if (tempFilePath) {
|
||||
fs.unlinkSync(tempFilePath);
|
||||
}
|
||||
// 3. 清理传输状态
|
||||
activeTransfer = undefined;
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 8. 常量与配置
|
||||
|
||||
### 8.1 协议常量
|
||||
|
||||
```typescript
|
||||
// 协议版本(v1 = 控制 JSON + 二进制 chunk + 流式传输)
|
||||
export const LAN_TRANSFER_PROTOCOL_VERSION = "1";
|
||||
|
||||
// 服务发现
|
||||
export const LAN_TRANSFER_SERVICE_TYPE = "cherrystudio";
|
||||
export const LAN_TRANSFER_SERVICE_FULL_NAME = "_cherrystudio._tcp";
|
||||
|
||||
// TCP 端口
|
||||
export const LAN_TRANSFER_TCP_PORT = 53317;
|
||||
|
||||
// 文件传输(与二进制帧一致)
|
||||
export const LAN_TRANSFER_CHUNK_SIZE = 512 * 1024; // 512KB
|
||||
export const LAN_TRANSFER_GLOBAL_TIMEOUT_MS = 10 * 60 * 1000; // 10 分钟
|
||||
|
||||
// 超时设置
|
||||
export const LAN_TRANSFER_HANDSHAKE_TIMEOUT_MS = 10_000; // 10秒
|
||||
export const LAN_TRANSFER_CHUNK_TIMEOUT_MS = 30_000; // 30秒
|
||||
export const LAN_TRANSFER_COMPLETE_TIMEOUT_MS = 60_000; // 60秒
|
||||
```
|
||||
|
||||
### 8.2 支持的文件类型
|
||||
|
||||
当前仅支持 ZIP 文件:
|
||||
|
||||
```typescript
|
||||
export const LAN_TRANSFER_ALLOWED_EXTENSIONS = [".zip"];
|
||||
export const LAN_TRANSFER_ALLOWED_MIME_TYPES = [
|
||||
"application/zip",
|
||||
"application/x-zip-compressed",
|
||||
];
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 9. 完整时序图
|
||||
|
||||
### 9.1 完整传输流程(v1,流式传输)
|
||||
|
||||
```
|
||||
┌─────────┐ ┌─────────┐ ┌─────────┐
|
||||
│ Renderer│ │ Main │ │ Mobile │
|
||||
│ (UI) │ │ Process │ │ Server │
|
||||
└────┬────┘ └────┬────┘ └────┬────┘
|
||||
│ │ │
|
||||
│ ════════════ 服务发现阶段 ════════════ │
|
||||
│ │ │
|
||||
│ startScan() │ │
|
||||
│────────────────────────────────────>│ │
|
||||
│ │ mDNS browse │
|
||||
│ │ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─>│
|
||||
│ │ │
|
||||
│ │<─ ─ ─ service discovered ─ ─ ─ ─ ─ ─│
|
||||
│ │ │
|
||||
│<────── onServicesUpdated ───────────│ │
|
||||
│ │ │
|
||||
│ ════════════ 握手连接阶段 ════════════ │
|
||||
│ │ │
|
||||
│ connect(peer) │ │
|
||||
│────────────────────────────────────>│ │
|
||||
│ │──────── TCP Connect ───────────────>│
|
||||
│ │ │
|
||||
│ │──────── handshake ─────────────────>│
|
||||
│ │ │
|
||||
│ │<─────── handshake_ack ──────────────│
|
||||
│ │ │
|
||||
│ │──────── ping ──────────────────────>│
|
||||
│ │<─────── pong ───────────────────────│
|
||||
│ │ │
|
||||
│<────── connect result ──────────────│ │
|
||||
│ │ │
|
||||
│ ════════════ 文件传输阶段 ════════════ │
|
||||
│ │ │
|
||||
│ sendFile(path) │ │
|
||||
│────────────────────────────────────>│ │
|
||||
│ │──────── file_start ────────────────>│
|
||||
│ │ │
|
||||
│ │<─────── file_start_ack ─────────────│
|
||||
│ │ │
|
||||
│ │ │
|
||||
│ │══════ 循环发送数据块 ═══════════════│
|
||||
│ │ │
|
||||
│ │──────── file_chunk[0] (binary) ────>│
|
||||
│<────── progress event ──────────────│ │
|
||||
│ │ │
|
||||
│ │──────── file_chunk[1] (binary) ────>│
|
||||
│<────── progress event ──────────────│ │
|
||||
│ │ │
|
||||
│ │ ... 重复 ... │
|
||||
│ │ │
|
||||
│ │══════════════════════════════════════│
|
||||
│ │ │
|
||||
│ │──────── file_end ──────────────────>│
|
||||
│ │ │
|
||||
│ │<─────── file_complete ──────────────│
|
||||
│ │ │
|
||||
│<────── complete event ──────────────│ │
|
||||
│<────── sendFile result ─────────────│ │
|
||||
│ │ │
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 10. 移动端实现指南(v1 要点)
|
||||
|
||||
### 10.1 必须实现的功能
|
||||
|
||||
1. **mDNS 服务发布**
|
||||
|
||||
- 发布 `_cherrystudio._tcp` 服务
|
||||
- 提供 TCP 端口号 `53317`
|
||||
- 可选:TXT 记录(版本、平台信息)
|
||||
|
||||
2. **TCP 服务端**
|
||||
|
||||
- 监听指定端口
|
||||
- 支持单连接或多连接
|
||||
|
||||
3. **消息解析**
|
||||
|
||||
- 控制消息:UTF-8 + `\n` JSON
|
||||
- 数据消息:二进制帧(Magic+TotalLen 分帧)
|
||||
|
||||
4. **握手处理**
|
||||
|
||||
- 验证 `handshake` 消息
|
||||
- 发送 `handshake_ack` 响应
|
||||
- 响应 `ping` 消息
|
||||
|
||||
5. **文件接收(流式模式)**
|
||||
- 解析 `file_start`,准备接收
|
||||
- 接收 `file_chunk` 二进制帧,直接写入文件/缓冲并增量哈希
|
||||
- v1 不发送 per-chunk ACK(流式传输)
|
||||
- 处理 `file_end`,完成增量哈希并校验 checksum
|
||||
- 发送 `file_complete` 结果
|
||||
|
||||
### 10.2 推荐的库
|
||||
|
||||
**React Native / Expo:**
|
||||
|
||||
- mDNS: `react-native-zeroconf` 或 `@homielab/react-native-bonjour`
|
||||
- TCP: `react-native-tcp-socket`
|
||||
- Crypto: `expo-crypto` 或 `react-native-quick-crypto`
|
||||
|
||||
### 10.3 接收端伪代码
|
||||
|
||||
```typescript
|
||||
class FileReceiver {
|
||||
private transfer?: {
|
||||
id: string;
|
||||
fileName: string;
|
||||
fileSize: number;
|
||||
checksum: string;
|
||||
totalChunks: number;
|
||||
receivedChunks: number;
|
||||
tempPath: string;
|
||||
// v1: 边收边写文件,避免大文件 OOM
|
||||
// stream: FileSystem writable stream (平台相关封装)
|
||||
};
|
||||
|
||||
handleMessage(message: any) {
|
||||
switch (message.type) {
|
||||
case "handshake":
|
||||
this.handleHandshake(message);
|
||||
break;
|
||||
case "ping":
|
||||
this.sendPong(message);
|
||||
break;
|
||||
case "file_start":
|
||||
this.handleFileStart(message);
|
||||
break;
|
||||
// v1: file_chunk 为二进制帧,不再走 JSON 分支
|
||||
case "file_end":
|
||||
this.handleFileEnd(message);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
handleFileStart(msg: LanTransferFileStartMessage) {
|
||||
// 1. 检查存储空间
|
||||
// 2. 创建临时文件
|
||||
// 3. 初始化传输状态
|
||||
// 4. 发送 file_start_ack
|
||||
}
|
||||
|
||||
// v1: 二进制帧处理在 socket data 流中解析,随后调用 handleBinaryFileChunk
|
||||
handleBinaryFileChunk(transferId: string, chunkIndex: number, data: Buffer) {
|
||||
// 直接使用二进制数据,按 chunkSize/lastChunk 计算长度
|
||||
// 写入文件流并更新增量 SHA-256
|
||||
this.transfer.receivedChunks++;
|
||||
// v1: 流式传输,不发送 per-chunk ACK
|
||||
}
|
||||
|
||||
handleFileEnd(msg: LanTransferFileEndMessage) {
|
||||
// 1. 合并所有数据块
|
||||
// 2. 验证完整文件 checksum
|
||||
// 3. 写入最终位置
|
||||
// 4. 发送 file_complete
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 附录 A:TypeScript 类型定义
|
||||
|
||||
完整的类型定义位于 `packages/shared/config/types.ts`:
|
||||
|
||||
```typescript
|
||||
// 握手消息
|
||||
export interface LanTransferHandshakeMessage {
|
||||
type: "handshake";
|
||||
deviceName: string;
|
||||
version: string;
|
||||
platform?: string;
|
||||
appVersion?: string;
|
||||
}
|
||||
|
||||
export interface LanTransferHandshakeAckMessage {
|
||||
type: "handshake_ack";
|
||||
accepted: boolean;
|
||||
message?: string;
|
||||
}
|
||||
|
||||
// 心跳消息
|
||||
export interface LanTransferPingMessage {
|
||||
type: "ping";
|
||||
payload?: string;
|
||||
}
|
||||
|
||||
export interface LanTransferPongMessage {
|
||||
type: "pong";
|
||||
received: boolean;
|
||||
payload?: string;
|
||||
}
|
||||
|
||||
// 文件传输消息 (Client -> Server)
|
||||
export interface LanTransferFileStartMessage {
|
||||
type: "file_start";
|
||||
transferId: string;
|
||||
fileName: string;
|
||||
fileSize: number;
|
||||
mimeType: string;
|
||||
checksum: string;
|
||||
totalChunks: number;
|
||||
chunkSize: number;
|
||||
}
|
||||
|
||||
export interface LanTransferFileChunkMessage {
|
||||
type: "file_chunk";
|
||||
transferId: string;
|
||||
chunkIndex: number;
|
||||
data: string; // Base64 encoded (v1: 二进制帧模式下不使用)
|
||||
}
|
||||
|
||||
export interface LanTransferFileEndMessage {
|
||||
type: "file_end";
|
||||
transferId: string;
|
||||
}
|
||||
|
||||
// 文件传输响应消息 (Server -> Client)
|
||||
export interface LanTransferFileStartAckMessage {
|
||||
type: "file_start_ack";
|
||||
transferId: string;
|
||||
accepted: boolean;
|
||||
message?: string;
|
||||
}
|
||||
|
||||
// v1 流式不发送 per-chunk ACK,以下类型仅用于向后兼容参考
|
||||
export interface LanTransferFileChunkAckMessage {
|
||||
type: "file_chunk_ack";
|
||||
transferId: string;
|
||||
chunkIndex: number;
|
||||
received: boolean;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export interface LanTransferFileCompleteMessage {
|
||||
type: "file_complete";
|
||||
transferId: string;
|
||||
success: boolean;
|
||||
filePath?: string;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
// 常量
|
||||
export const LAN_TRANSFER_TCP_PORT = 53317;
|
||||
export const LAN_TRANSFER_CHUNK_SIZE = 512 * 1024;
|
||||
export const LAN_TRANSFER_CHUNK_TIMEOUT_MS = 30_000;
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 附录 B:版本历史
|
||||
|
||||
| 版本 | 日期 | 变更 |
|
||||
| ---- | ------- | ---------------------------------------- |
|
||||
| 1.0 | 2025-12 | 初始发布版本,支持二进制帧格式与流式传输 |
|
||||
@ -28,6 +28,12 @@ files:
|
||||
- "!**/{tsconfig.json,tsconfig.tsbuildinfo,tsconfig.node.json,tsconfig.web.json}"
|
||||
- "!**/{.editorconfig,.jekyll-metadata}"
|
||||
- "!src"
|
||||
- "!config"
|
||||
- "!patches"
|
||||
- "!app-upgrade-config.json"
|
||||
- "!**/node_modules/**/*.cpp"
|
||||
- "!**/node_modules/node-addon-api/**"
|
||||
- "!**/node_modules/prebuild-install/**"
|
||||
- "!scripts"
|
||||
- "!local"
|
||||
- "!docs"
|
||||
@ -134,54 +140,44 @@ artifactBuildCompleted: scripts/artifact-build-completed.js
|
||||
releaseInfo:
|
||||
releaseNotes: |
|
||||
<!--LANG:en-->
|
||||
Cherry Studio 1.7.4 - New Browser MCP & Model Updates
|
||||
|
||||
This release adds a powerful browser automation MCP server, new web search provider, and model support updates.
|
||||
Cherry Studio 1.7.9 - New Features & Bug Fixes
|
||||
|
||||
✨ New Features
|
||||
- [MCP] Add @cherry/browser CDP MCP server with session management for browser automation
|
||||
- [Web Search] Add ExaMCP free web search provider (no API key required)
|
||||
- [Model] Support GPT 5.2 series models
|
||||
- [Model] Add capabilities support for Doubao Seed Code models (tool calling, reasoning, vision)
|
||||
|
||||
🔧 Improvements
|
||||
- [Translate] Add reasoning effort option to translate service
|
||||
- [i18n] Improve zh-TW Traditional Chinese locale
|
||||
- [Settings] Update MCP Settings layout and styling
|
||||
- [Agent] Add 302.AI provider support
|
||||
- [Browser] Browser data now persists and supports multiple tabs
|
||||
- [Language] Add Romanian language support
|
||||
- [Search] Add fuzzy search for file list
|
||||
- [Models] Add latest Zhipu models
|
||||
- [Image] Improve text-to-image functionality
|
||||
|
||||
🐛 Bug Fixes
|
||||
- [Chat] Fix line numbers being wrongly copied from code blocks
|
||||
- [Translate] Fix default to first supported reasoning effort when translating
|
||||
- [Chat] Fix preserve thinking block in assistant messages
|
||||
- [Web Search] Fix max search result limit
|
||||
- [Embedding] Fix embedding dimensions retrieval for ModernAiProvider
|
||||
- [Chat] Fix token calculation in prompt tool use plugin
|
||||
- [Model] Fix Ollama provider options for Qwen model support
|
||||
- [UI] Fix Chat component marginRight calculation for improved layout
|
||||
- [Mac] Fix mini window unexpected closing issue
|
||||
- [Preview] Fix HTML preview controls not working in fullscreen
|
||||
- [Translate] Fix translation duplicate execution issue
|
||||
- [Zoom] Fix page zoom reset issue during navigation
|
||||
- [Agent] Fix crash when switching between agent and assistant
|
||||
- [Agent] Fix navigation in agent mode
|
||||
- [Copy] Fix markdown copy button issue
|
||||
- [Windows] Fix compatibility issues on non-Windows systems
|
||||
|
||||
<!--LANG:zh-CN-->
|
||||
Cherry Studio 1.7.4 - 新增浏览器 MCP 与模型更新
|
||||
|
||||
本次更新新增强大的浏览器自动化 MCP 服务器、新的网页搜索提供商以及模型支持更新。
|
||||
Cherry Studio 1.7.9 - 新功能与问题修复
|
||||
|
||||
✨ 新功能
|
||||
- [MCP] 新增 @cherry/browser CDP MCP 服务器,支持会话管理的浏览器自动化
|
||||
- [网页搜索] 新增 ExaMCP 免费网页搜索提供商(无需 API 密钥)
|
||||
- [模型] 支持 GPT 5.2 系列模型
|
||||
- [模型] 为豆包 Seed Code 模型添加能力支持(工具调用、推理、视觉)
|
||||
|
||||
🔧 功能改进
|
||||
- [翻译] 为翻译服务添加推理强度选项
|
||||
- [国际化] 改进繁体中文(zh-TW)本地化
|
||||
- [设置] 优化 MCP 设置布局和样式
|
||||
- [Agent] 新增 302.AI 服务商支持
|
||||
- [浏览器] 浏览器数据现在可以保存,支持多标签页
|
||||
- [语言] 新增罗马尼亚语支持
|
||||
- [搜索] 文件列表新增模糊搜索功能
|
||||
- [模型] 新增最新智谱模型
|
||||
- [图片] 优化文生图功能
|
||||
|
||||
🐛 问题修复
|
||||
- [聊天] 修复代码块中行号被错误复制的问题
|
||||
- [翻译] 修复翻译时默认使用第一个支持的推理强度
|
||||
- [聊天] 修复助手消息中思考块的保留问题
|
||||
- [网页搜索] 修复最大搜索结果数限制
|
||||
- [嵌入] 修复 ModernAiProvider 嵌入维度获取问题
|
||||
- [聊天] 修复提示词工具使用插件的 token 计算问题
|
||||
- [模型] 修复 Ollama 提供商对 Qwen 模型的支持选项
|
||||
- [界面] 修复聊天组件右边距计算以改善布局
|
||||
- [Mac] 修复迷你窗口意外关闭的问题
|
||||
- [预览] 修复全屏模式下 HTML 预览控件无法使用的问题
|
||||
- [翻译] 修复翻译重复执行的问题
|
||||
- [缩放] 修复页面导航时缩放被重置的问题
|
||||
- [智能体] 修复在智能体和助手间切换时崩溃的问题
|
||||
- [智能体] 修复智能体模式下的导航问题
|
||||
- [复制] 修复 Markdown 复制按钮问题
|
||||
- [兼容性] 修复非 Windows 系统的兼容性问题
|
||||
<!--LANG:END-->
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import react from '@vitejs/plugin-react-swc'
|
||||
import { CodeInspectorPlugin } from 'code-inspector-plugin'
|
||||
import { defineConfig, externalizeDepsPlugin } from 'electron-vite'
|
||||
import { defineConfig } from 'electron-vite'
|
||||
import { resolve } from 'path'
|
||||
import { visualizer } from 'rollup-plugin-visualizer'
|
||||
|
||||
@ -17,7 +17,7 @@ const isProd = process.env.NODE_ENV === 'production'
|
||||
|
||||
export default defineConfig({
|
||||
main: {
|
||||
plugins: [externalizeDepsPlugin(), ...visualizerPlugin('main')],
|
||||
plugins: [...visualizerPlugin('main')],
|
||||
resolve: {
|
||||
alias: {
|
||||
'@main': resolve('src/main'),
|
||||
@ -51,8 +51,7 @@ export default defineConfig({
|
||||
plugins: [
|
||||
react({
|
||||
tsDecorators: true
|
||||
}),
|
||||
externalizeDepsPlugin()
|
||||
})
|
||||
],
|
||||
resolve: {
|
||||
alias: {
|
||||
@ -68,18 +67,7 @@ export default defineConfig({
|
||||
plugins: [
|
||||
(async () => (await import('@tailwindcss/vite')).default())(),
|
||||
react({
|
||||
tsDecorators: true,
|
||||
plugins: [
|
||||
[
|
||||
'@swc/plugin-styled-components',
|
||||
{
|
||||
displayName: true, // 开发环境下启用组件名称
|
||||
fileName: false, // 不在类名中包含文件名
|
||||
pure: true, // 优化性能
|
||||
ssr: false // 不需要服务端渲染
|
||||
}
|
||||
]
|
||||
]
|
||||
tsDecorators: true
|
||||
}),
|
||||
...(isDev ? [CodeInspectorPlugin({ bundler: 'vite' })] : []), // 只在开发环境下启用 CodeInspectorPlugin
|
||||
...visualizerPlugin('renderer')
|
||||
|
||||
318
package.json
318
package.json
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "CherryStudio",
|
||||
"version": "1.7.4",
|
||||
"version": "1.7.9",
|
||||
"private": true,
|
||||
"description": "A powerful AI assistant for producer.",
|
||||
"main": "./out/main/index.js",
|
||||
@ -9,27 +9,13 @@
|
||||
"engines": {
|
||||
"node": ">=22.0.0"
|
||||
},
|
||||
"workspaces": {
|
||||
"packages": [
|
||||
"local",
|
||||
"packages/*"
|
||||
],
|
||||
"installConfig": {
|
||||
"hoistingLimits": [
|
||||
"packages/database",
|
||||
"packages/mcp-trace/trace-core",
|
||||
"packages/mcp-trace/trace-node",
|
||||
"packages/mcp-trace/trace-web",
|
||||
"packages/extension-table-plus"
|
||||
]
|
||||
}
|
||||
},
|
||||
"scripts": {
|
||||
"start": "electron-vite preview",
|
||||
"dev": "dotenv electron-vite dev",
|
||||
"dev:watch": "dotenv electron-vite dev -- -w",
|
||||
"debug": "electron-vite -- --inspect --sourcemap --remote-debugging-port=9222",
|
||||
"build": "npm run typecheck && electron-vite build",
|
||||
"build:check": "yarn lint && yarn test",
|
||||
"build:check": "pnpm lint && pnpm test",
|
||||
"build:unpack": "dotenv npm run build && electron-builder --dir",
|
||||
"build:win": "dotenv npm run build && electron-builder --win --x64 --arm64",
|
||||
"build:win:x64": "dotenv npm run build && electron-builder --win --x64",
|
||||
@ -41,108 +27,116 @@
|
||||
"build:linux:arm64": "dotenv npm run build && electron-builder --linux --arm64",
|
||||
"build:linux:x64": "dotenv npm run build && electron-builder --linux --x64",
|
||||
"release": "node scripts/version.js",
|
||||
"publish": "yarn build:check && yarn release patch push",
|
||||
"publish": "pnpm build:check && pnpm release patch push",
|
||||
"pulish:artifacts": "cd packages/artifacts && npm publish && cd -",
|
||||
"agents:generate": "NODE_ENV='development' drizzle-kit generate --config src/main/services/agents/drizzle.config.ts",
|
||||
"agents:push": "NODE_ENV='development' drizzle-kit push --config src/main/services/agents/drizzle.config.ts",
|
||||
"agents:studio": "NODE_ENV='development' drizzle-kit studio --config src/main/services/agents/drizzle.config.ts",
|
||||
"agents:drop": "NODE_ENV='development' drizzle-kit drop --config src/main/services/agents/drizzle.config.ts",
|
||||
"generate:icons": "electron-icon-builder --input=./build/logo.png --output=build",
|
||||
"analyze:renderer": "VISUALIZER_RENDERER=true yarn build",
|
||||
"analyze:main": "VISUALIZER_MAIN=true yarn build",
|
||||
"analyze:renderer": "VISUALIZER_RENDERER=true pnpm build",
|
||||
"analyze:main": "VISUALIZER_MAIN=true pnpm build",
|
||||
"typecheck": "concurrently -n \"node,web\" -c \"cyan,magenta\" \"npm run typecheck:node\" \"npm run typecheck:web\"",
|
||||
"typecheck:node": "tsgo --noEmit -p tsconfig.node.json --composite false",
|
||||
"typecheck:web": "tsgo --noEmit -p tsconfig.web.json --composite false",
|
||||
"check:i18n": "dotenv -e .env -- tsx scripts/check-i18n.ts",
|
||||
"sync:i18n": "dotenv -e .env -- tsx scripts/sync-i18n.ts",
|
||||
"update:i18n": "dotenv -e .env -- tsx scripts/update-i18n.ts",
|
||||
"auto:i18n": "dotenv -e .env -- tsx scripts/auto-translate-i18n.ts",
|
||||
"i18n:check": "dotenv -e .env -- tsx scripts/check-i18n.ts",
|
||||
"i18n:sync": "dotenv -e .env -- tsx scripts/sync-i18n.ts",
|
||||
"i18n:translate": "dotenv -e .env -- tsx scripts/auto-translate-i18n.ts",
|
||||
"i18n:all": "pnpm i18n:check && pnpm i18n:sync && pnpm i18n:translate",
|
||||
"update:languages": "tsx scripts/update-languages.ts",
|
||||
"update:upgrade-config": "tsx scripts/update-app-upgrade-config.ts",
|
||||
"test": "vitest run --silent",
|
||||
"test:main": "vitest run --project main",
|
||||
"test:renderer": "vitest run --project renderer",
|
||||
"test:aicore": "vitest run --project aiCore",
|
||||
"test:update": "yarn test:renderer --update",
|
||||
"test:update": "pnpm test:renderer --update",
|
||||
"test:coverage": "vitest run --coverage --silent",
|
||||
"test:ui": "vitest --ui",
|
||||
"test:watch": "vitest",
|
||||
"test:e2e": "yarn playwright test",
|
||||
"test:e2e": "pnpm playwright test",
|
||||
"test:lint": "oxlint --deny-warnings && eslint . --ext .js,.jsx,.cjs,.mjs,.ts,.tsx,.cts,.mts --cache",
|
||||
"test:scripts": "vitest scripts",
|
||||
"lint": "oxlint --fix && eslint . --ext .js,.jsx,.cjs,.mjs,.ts,.tsx,.cts,.mts --fix --cache && yarn typecheck && yarn check:i18n && yarn format:check",
|
||||
"lint": "oxlint --fix && eslint . --ext .js,.jsx,.cjs,.mjs,.ts,.tsx,.cts,.mts --fix --cache && pnpm typecheck && pnpm i18n:check && pnpm format:check",
|
||||
"format": "biome format --write && biome lint --write",
|
||||
"format:check": "biome format && biome lint",
|
||||
"prepare": "git config blame.ignoreRevsFile .git-blame-ignore-revs && husky",
|
||||
"claude": "dotenv -e .env -- claude",
|
||||
"release:aicore:alpha": "yarn workspace @cherrystudio/ai-core version prerelease --preid alpha --immediate && yarn workspace @cherrystudio/ai-core build && yarn workspace @cherrystudio/ai-core npm publish --tag alpha --access public",
|
||||
"release:aicore:beta": "yarn workspace @cherrystudio/ai-core version prerelease --preid beta --immediate && yarn workspace @cherrystudio/ai-core build && yarn workspace @cherrystudio/ai-core npm publish --tag beta --access public",
|
||||
"release:aicore": "yarn workspace @cherrystudio/ai-core version patch --immediate && yarn workspace @cherrystudio/ai-core build && yarn workspace @cherrystudio/ai-core npm publish --access public",
|
||||
"release:ai-sdk-provider": "yarn workspace @cherrystudio/ai-sdk-provider version patch --immediate && yarn workspace @cherrystudio/ai-sdk-provider build && yarn workspace @cherrystudio/ai-sdk-provider npm publish --access public"
|
||||
"release:aicore:alpha": "pnpm --filter @cherrystudio/ai-core version prerelease --preid alpha && pnpm --filter @cherrystudio/ai-core build && pnpm --filter @cherrystudio/ai-core publish --tag alpha --access public",
|
||||
"release:aicore:beta": "pnpm --filter @cherrystudio/ai-core version prerelease --preid beta && pnpm --filter @cherrystudio/ai-core build && pnpm --filter @cherrystudio/ai-core publish --tag beta --access public",
|
||||
"release:aicore": "pnpm --filter @cherrystudio/ai-core version patch && pnpm --filter @cherrystudio/ai-core build && pnpm --filter @cherrystudio/ai-core publish --access public",
|
||||
"release:ai-sdk-provider": "pnpm --filter @cherrystudio/ai-sdk-provider version patch && pnpm --filter @cherrystudio/ai-sdk-provider build && pnpm --filter @cherrystudio/ai-sdk-provider publish --access public"
|
||||
},
|
||||
"dependencies": {
|
||||
"@anthropic-ai/claude-agent-sdk": "patch:@anthropic-ai/claude-agent-sdk@npm%3A0.1.62#~/.yarn/patches/@anthropic-ai-claude-agent-sdk-npm-0.1.62-23ae56f8c8.patch",
|
||||
"@anthropic-ai/claude-agent-sdk": "0.1.62",
|
||||
"@libsql/client": "0.14.0",
|
||||
"@libsql/win32-x64-msvc": "^0.4.7",
|
||||
"@napi-rs/system-ocr": "patch:@napi-rs/system-ocr@npm%3A1.0.2#~/.yarn/patches/@napi-rs-system-ocr-npm-1.0.2-59e7a78e8b.patch",
|
||||
"@paymoapp/electron-shutdown-handler": "^1.1.2",
|
||||
"@strongtz/win32-arm64-msvc": "^0.4.7",
|
||||
"emoji-picker-element-data": "^1",
|
||||
"express": "^5.1.0",
|
||||
"font-list": "^2.0.0",
|
||||
"graceful-fs": "^4.2.11",
|
||||
"gray-matter": "^4.0.3",
|
||||
"js-yaml": "^4.1.0",
|
||||
"@napi-rs/system-ocr": "1.0.2",
|
||||
"@paymoapp/electron-shutdown-handler": "1.1.2",
|
||||
"express": "5.1.0",
|
||||
"font-list": "2.0.0",
|
||||
"graceful-fs": "4.2.11",
|
||||
"gray-matter": "4.0.3",
|
||||
"jsdom": "26.1.0",
|
||||
"node-stream-zip": "^1.15.0",
|
||||
"officeparser": "^4.2.0",
|
||||
"os-proxy-config": "^1.1.2",
|
||||
"qrcode.react": "^4.2.0",
|
||||
"selection-hook": "^1.0.12",
|
||||
"sharp": "^0.34.3",
|
||||
"socket.io": "^4.8.1",
|
||||
"swagger-jsdoc": "^6.2.8",
|
||||
"swagger-ui-express": "^5.0.1",
|
||||
"tesseract.js": "patch:tesseract.js@npm%3A6.0.1#~/.yarn/patches/tesseract.js-npm-6.0.1-2562a7e46d.patch",
|
||||
"node-stream-zip": "1.15.0",
|
||||
"officeparser": "4.2.0",
|
||||
"os-proxy-config": "1.1.2",
|
||||
"selection-hook": "1.0.12",
|
||||
"sharp": "0.34.3",
|
||||
"swagger-jsdoc": "6.2.8",
|
||||
"swagger-ui-express": "5.0.1",
|
||||
"tesseract.js": "6.0.1",
|
||||
"turndown": "7.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"js-yaml": "4.1.0",
|
||||
"bonjour-service": "1.3.0",
|
||||
"emoji-picker-element-data": "1",
|
||||
"@agentic/exa": "^7.3.3",
|
||||
"@agentic/searxng": "^7.3.3",
|
||||
"@agentic/tavily": "^7.3.3",
|
||||
"@ai-sdk/amazon-bedrock": "^3.0.61",
|
||||
"@ai-sdk/anthropic": "^2.0.49",
|
||||
"@ai-sdk/azure": "2.0.87",
|
||||
"@ai-sdk/cerebras": "^1.0.31",
|
||||
"@ai-sdk/gateway": "^2.0.15",
|
||||
"@ai-sdk/google": "patch:@ai-sdk/google@npm%3A2.0.43#~/.yarn/patches/@ai-sdk-google-npm-2.0.43-689ed559b3.patch",
|
||||
"@ai-sdk/google-vertex": "^3.0.79",
|
||||
"@ai-sdk/google": "2.0.49",
|
||||
"@ai-sdk/google-vertex": "^3.0.94",
|
||||
"@ai-sdk/huggingface": "^0.0.10",
|
||||
"@ai-sdk/mistral": "^2.0.24",
|
||||
"@ai-sdk/openai": "patch:@ai-sdk/openai@npm%3A2.0.85#~/.yarn/patches/@ai-sdk-openai-npm-2.0.85-27483d1d6a.patch",
|
||||
"@ai-sdk/openai": "2.0.85",
|
||||
"@ai-sdk/perplexity": "^2.0.20",
|
||||
"@ai-sdk/provider": "2.0.0",
|
||||
"@ai-sdk/provider-utils": "3.0.17",
|
||||
"@ai-sdk/test-server": "^0.0.1",
|
||||
"@ai-sdk/xai": "2.0.36",
|
||||
"@ant-design/cssinjs": "1.23.0",
|
||||
"@ant-design/icons": "5.6.1",
|
||||
"@ant-design/v5-patch-for-react-19": "^1.0.3",
|
||||
"@anthropic-ai/sdk": "^0.41.0",
|
||||
"@anthropic-ai/vertex-sdk": "patch:@anthropic-ai/vertex-sdk@npm%3A0.11.4#~/.yarn/patches/@anthropic-ai-vertex-sdk-npm-0.11.4-c19cb41edb.patch",
|
||||
"@anthropic-ai/vertex-sdk": "0.11.4",
|
||||
"@aws-sdk/client-bedrock": "^3.910.0",
|
||||
"@aws-sdk/client-bedrock-runtime": "^3.910.0",
|
||||
"@aws-sdk/client-s3": "^3.910.0",
|
||||
"@biomejs/biome": "2.2.4",
|
||||
"@cherrystudio/ai-core": "workspace:^1.0.9",
|
||||
"@cherrystudio/embedjs": "^0.1.31",
|
||||
"@cherrystudio/embedjs-libsql": "^0.1.31",
|
||||
"@cherrystudio/embedjs-loader-csv": "^0.1.31",
|
||||
"@cherrystudio/embedjs-loader-image": "^0.1.31",
|
||||
"@cherrystudio/embedjs-loader-markdown": "^0.1.31",
|
||||
"@cherrystudio/embedjs-loader-msoffice": "^0.1.31",
|
||||
"@cherrystudio/embedjs-loader-pdf": "^0.1.31",
|
||||
"@cherrystudio/embedjs-loader-sitemap": "^0.1.31",
|
||||
"@cherrystudio/embedjs-loader-web": "^0.1.31",
|
||||
"@cherrystudio/embedjs-loader-xml": "^0.1.31",
|
||||
"@cherrystudio/embedjs-ollama": "^0.1.31",
|
||||
"@cherrystudio/embedjs-openai": "^0.1.31",
|
||||
"@cherrystudio/embedjs": "0.1.31",
|
||||
"@cherrystudio/embedjs-interfaces": "0.1.31",
|
||||
"@cherrystudio/embedjs-libsql": "0.1.31",
|
||||
"@cherrystudio/embedjs-loader-csv": "0.1.31",
|
||||
"@cherrystudio/embedjs-loader-image": "0.1.31",
|
||||
"@cherrystudio/embedjs-loader-markdown": "0.1.31",
|
||||
"@cherrystudio/embedjs-loader-msoffice": "0.1.31",
|
||||
"@cherrystudio/embedjs-loader-pdf": "0.1.31",
|
||||
"@cherrystudio/embedjs-loader-sitemap": "0.1.31",
|
||||
"@cherrystudio/embedjs-loader-web": "0.1.31",
|
||||
"@cherrystudio/embedjs-loader-xml": "0.1.31",
|
||||
"@cherrystudio/embedjs-ollama": "0.1.31",
|
||||
"@cherrystudio/embedjs-openai": "0.1.31",
|
||||
"@cherrystudio/embedjs-utils": "0.1.31",
|
||||
"@cherrystudio/extension-table-plus": "workspace:^",
|
||||
"@cherrystudio/openai": "^6.12.0",
|
||||
"@cherrystudio/openai": "6.15.0",
|
||||
"@codemirror/lang-json": "6.0.1",
|
||||
"@codemirror/lint": "6.8.5",
|
||||
"@codemirror/view": "6.38.1",
|
||||
"@dnd-kit/core": "^6.3.1",
|
||||
"@dnd-kit/modifiers": "^9.0.0",
|
||||
"@dnd-kit/sortable": "^10.0.0",
|
||||
@ -155,18 +149,21 @@
|
||||
"@emotion/is-prop-valid": "^1.3.1",
|
||||
"@eslint-react/eslint-plugin": "^1.36.1",
|
||||
"@eslint/js": "^9.22.0",
|
||||
"@google/genai": "patch:@google/genai@npm%3A1.0.1#~/.yarn/patches/@google-genai-npm-1.0.1-e26f0f9af7.patch",
|
||||
"@floating-ui/dom": "1.7.3",
|
||||
"@google/genai": "1.0.1",
|
||||
"@hello-pangea/dnd": "^18.0.1",
|
||||
"@kangfenmao/keyv-storage": "^0.1.0",
|
||||
"@kangfenmao/keyv-storage": "^0.1.3",
|
||||
"@langchain/community": "^1.0.0",
|
||||
"@langchain/core": "patch:@langchain/core@npm%3A1.0.2#~/.yarn/patches/@langchain-core-npm-1.0.2-183ef83fe4.patch",
|
||||
"@langchain/openai": "patch:@langchain/openai@npm%3A1.0.0#~/.yarn/patches/@langchain-openai-npm-1.0.0-474d0ad9d4.patch",
|
||||
"@langchain/core": "1.0.2",
|
||||
"@langchain/openai": "1.0.0",
|
||||
"@langchain/textsplitters": "0.1.0",
|
||||
"@mistralai/mistralai": "^1.7.5",
|
||||
"@modelcontextprotocol/sdk": "^1.23.0",
|
||||
"@modelcontextprotocol/sdk": "1.23.0",
|
||||
"@mozilla/readability": "^0.6.0",
|
||||
"@notionhq/client": "^2.2.15",
|
||||
"@openrouter/ai-sdk-provider": "^1.2.8",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@opentelemetry/context-async-hooks": "2.0.1",
|
||||
"@opentelemetry/core": "2.0.0",
|
||||
"@opentelemetry/exporter-trace-otlp-http": "^0.200.0",
|
||||
"@opentelemetry/sdk-trace-base": "^2.0.0",
|
||||
@ -177,6 +174,7 @@
|
||||
"@radix-ui/react-context-menu": "^2.2.16",
|
||||
"@reduxjs/toolkit": "^2.2.5",
|
||||
"@shikijs/markdown-it": "^3.12.0",
|
||||
"@swc/core": "^1.15.8",
|
||||
"@swc/plugin-styled-components": "^8.0.4",
|
||||
"@tailwindcss/vite": "^4.1.13",
|
||||
"@tanstack/react-query": "^5.85.5",
|
||||
@ -185,21 +183,25 @@
|
||||
"@testing-library/jest-dom": "^6.6.3",
|
||||
"@testing-library/react": "^16.3.0",
|
||||
"@testing-library/user-event": "^14.6.1",
|
||||
"@tiptap/extension-collaboration": "^3.2.0",
|
||||
"@tiptap/extension-drag-handle": "patch:@tiptap/extension-drag-handle@npm%3A3.2.0#~/.yarn/patches/@tiptap-extension-drag-handle-npm-3.2.0-5a9ebff7c9.patch",
|
||||
"@tiptap/extension-drag-handle-react": "^3.2.0",
|
||||
"@tiptap/extension-image": "^3.2.0",
|
||||
"@tiptap/extension-list": "^3.2.0",
|
||||
"@tiptap/extension-mathematics": "^3.2.0",
|
||||
"@tiptap/extension-mention": "^3.2.0",
|
||||
"@tiptap/extension-node-range": "^3.2.0",
|
||||
"@tiptap/extension-table-of-contents": "^3.2.0",
|
||||
"@tiptap/extension-typography": "^3.2.0",
|
||||
"@tiptap/extension-underline": "^3.2.0",
|
||||
"@tiptap/pm": "^3.2.0",
|
||||
"@tiptap/react": "^3.2.0",
|
||||
"@tiptap/starter-kit": "^3.2.0",
|
||||
"@tiptap/suggestion": "^3.2.0",
|
||||
"@tiptap/core": "3.2.0",
|
||||
"@tiptap/extension-code-block": "3.2.0",
|
||||
"@tiptap/extension-collaboration": "3.2.0",
|
||||
"@tiptap/extension-drag-handle": "3.2.0",
|
||||
"@tiptap/extension-drag-handle-react": "3.2.0",
|
||||
"@tiptap/extension-heading": "3.2.0",
|
||||
"@tiptap/extension-image": "3.2.0",
|
||||
"@tiptap/extension-link": "3.2.0",
|
||||
"@tiptap/extension-list": "3.2.0",
|
||||
"@tiptap/extension-mathematics": "3.2.0",
|
||||
"@tiptap/extension-mention": "3.2.0",
|
||||
"@tiptap/extension-node-range": "3.2.0",
|
||||
"@tiptap/extension-table-of-contents": "3.2.0",
|
||||
"@tiptap/extension-typography": "3.2.0",
|
||||
"@tiptap/extension-underline": "3.2.0",
|
||||
"@tiptap/pm": "3.2.0",
|
||||
"@tiptap/react": "3.2.0",
|
||||
"@tiptap/starter-kit": "3.2.0",
|
||||
"@tiptap/suggestion": "3.2.0",
|
||||
"@tiptap/y-tiptap": "^3.0.0",
|
||||
"@truto/turndown-plugin-gfm": "^1.0.2",
|
||||
"@tryfabric/martian": "^1.2.4",
|
||||
@ -210,14 +212,17 @@
|
||||
"@types/dotenv": "^8.2.3",
|
||||
"@types/express": "^5",
|
||||
"@types/fs-extra": "^11",
|
||||
"@types/hast": "^3.0.4",
|
||||
"@types/he": "^1",
|
||||
"@types/html-to-text": "^9",
|
||||
"@types/js-yaml": "^4.0.9",
|
||||
"@types/json-schema": "7.0.15",
|
||||
"@types/lodash": "^4.17.5",
|
||||
"@types/markdown-it": "^14",
|
||||
"@types/md5": "^2.3.5",
|
||||
"@types/mdast": "4.0.4",
|
||||
"@types/mime-types": "^3",
|
||||
"@types/node": "^22.17.1",
|
||||
"@types/node": "22.17.2",
|
||||
"@types/pako": "^1.0.2",
|
||||
"@types/react": "^19.2.7",
|
||||
"@types/react-dom": "^19.2.3",
|
||||
@ -228,9 +233,10 @@
|
||||
"@types/swagger-ui-express": "^4.1.8",
|
||||
"@types/tinycolor2": "^1",
|
||||
"@types/turndown": "^5.0.5",
|
||||
"@types/unist": "3.0.3",
|
||||
"@types/uuid": "^10.0.0",
|
||||
"@types/word-extractor": "^1",
|
||||
"@typescript/native-preview": "latest",
|
||||
"@typescript/native-preview": "7.0.0-dev.20250915.1",
|
||||
"@uiw/codemirror-extensions-langs": "^4.25.1",
|
||||
"@uiw/codemirror-themes-all": "^4.25.1",
|
||||
"@uiw/react-codemirror": "^4.25.1",
|
||||
@ -242,12 +248,15 @@
|
||||
"@viz-js/lang-dot": "^1.0.5",
|
||||
"@viz-js/viz": "^3.14.0",
|
||||
"@xyflow/react": "^12.4.4",
|
||||
"adm-zip": "0.4.16",
|
||||
"ai": "^5.0.98",
|
||||
"antd": "patch:antd@npm%3A5.27.0#~/.yarn/patches/antd-npm-5.27.0-aa91c36546.patch",
|
||||
"antd": "5.27.0",
|
||||
"archiver": "^7.0.1",
|
||||
"async-mutex": "^0.5.0",
|
||||
"axios": "^1.7.3",
|
||||
"browser-image-compression": "^2.0.2",
|
||||
"builder-util-runtime": "9.5.0",
|
||||
"chalk": "4.1.2",
|
||||
"chardet": "^2.1.0",
|
||||
"check-disk-space": "3.4.0",
|
||||
"cheerio": "^1.1.2",
|
||||
@ -256,8 +265,10 @@
|
||||
"cli-progress": "^3.12.0",
|
||||
"clsx": "^2.1.1",
|
||||
"code-inspector-plugin": "^0.20.14",
|
||||
"codemirror-lang-mermaid": "0.5.0",
|
||||
"color": "^5.0.0",
|
||||
"concurrently": "^9.2.1",
|
||||
"cors": "2.8.5",
|
||||
"country-flag-emoji-polyfill": "0.1.8",
|
||||
"dayjs": "^1.11.11",
|
||||
"dexie": "^4.0.8",
|
||||
@ -265,6 +276,7 @@
|
||||
"diff": "^8.0.2",
|
||||
"docx": "^9.0.2",
|
||||
"dompurify": "^3.2.6",
|
||||
"dotenv": "16.6.1",
|
||||
"dotenv-cli": "^7.4.2",
|
||||
"drizzle-kit": "^0.31.4",
|
||||
"drizzle-orm": "^0.44.5",
|
||||
@ -273,12 +285,12 @@
|
||||
"electron-devtools-installer": "^3.2.0",
|
||||
"electron-reload": "^2.0.0-alpha.1",
|
||||
"electron-store": "^8.2.0",
|
||||
"electron-updater": "patch:electron-updater@npm%3A6.7.0#~/.yarn/patches/electron-updater-npm-6.7.0-47b11bb0d4.patch",
|
||||
"electron-vite": "4.0.1",
|
||||
"electron-updater": "6.7.0",
|
||||
"electron-vite": "5.0.0",
|
||||
"electron-window-state": "^5.0.3",
|
||||
"emittery": "^1.0.3",
|
||||
"emoji-picker-element": "^1.22.1",
|
||||
"epub": "patch:epub@npm%3A1.3.0#~/.yarn/patches/epub-npm-1.3.0-8325494ffe.patch",
|
||||
"epub": "1.3.0",
|
||||
"eslint": "^9.22.0",
|
||||
"eslint-plugin-import-zod": "^1.2.0",
|
||||
"eslint-plugin-oxlint": "^1.15.0",
|
||||
@ -289,6 +301,7 @@
|
||||
"fast-diff": "^1.3.0",
|
||||
"fast-xml-parser": "^5.2.0",
|
||||
"fetch-socks": "1.3.2",
|
||||
"form-data": "4.0.4",
|
||||
"framer-motion": "^12.23.12",
|
||||
"franc-min": "^6.2.0",
|
||||
"fs-extra": "^11.2.0",
|
||||
@ -305,6 +318,10 @@
|
||||
"isbinaryfile": "5.0.4",
|
||||
"jaison": "^2.0.2",
|
||||
"jest-styled-components": "^7.2.0",
|
||||
"js-base64": "3.7.7",
|
||||
"json-schema": "0.4.0",
|
||||
"katex": "0.16.22",
|
||||
"ky": "1.8.1",
|
||||
"linguist-languages": "^8.1.0",
|
||||
"lint-staged": "^15.5.0",
|
||||
"lodash": "^4.17.21",
|
||||
@ -312,19 +329,27 @@
|
||||
"lucide-react": "^0.525.0",
|
||||
"macos-release": "^3.4.0",
|
||||
"markdown-it": "^14.1.0",
|
||||
"md5": "2.3.0",
|
||||
"mermaid": "^11.10.1",
|
||||
"mime": "^4.0.4",
|
||||
"mime-types": "^3.0.1",
|
||||
"motion": "^12.10.5",
|
||||
"nanoid": "3.3.11",
|
||||
"notion-helper": "^1.3.22",
|
||||
"npx-scope-finder": "^1.2.0",
|
||||
"ollama-ai-provider-v2": "patch:ollama-ai-provider-v2@npm%3A1.5.5#~/.yarn/patches/ollama-ai-provider-v2-npm-1.5.5-8bef249af9.patch",
|
||||
"ollama-ai-provider-v2": "1.5.5",
|
||||
"open": "^8.4.2",
|
||||
"oxlint": "^1.22.0",
|
||||
"oxlint-tsgolint": "^0.2.0",
|
||||
"p-queue": "^8.1.0",
|
||||
"pako": "1.0.11",
|
||||
"pdf-lib": "^1.17.1",
|
||||
"pdf-parse": "^1.1.1",
|
||||
"prosemirror-model": "1.25.2",
|
||||
"proxy-agent": "^6.5.0",
|
||||
"rc-input": "1.8.0",
|
||||
"rc-select": "14.16.6",
|
||||
"rc-virtual-list": "3.18.6",
|
||||
"react": "^19.2.0",
|
||||
"react-dom": "^19.2.0",
|
||||
"react-error-boundary": "^6.0.0",
|
||||
@ -351,8 +376,11 @@
|
||||
"remark-gfm": "^4.0.1",
|
||||
"remark-github-blockquote-alert": "^2.0.0",
|
||||
"remark-math": "^6.0.0",
|
||||
"remark-parse": "11.0.0",
|
||||
"remark-stringify": "11.0.0",
|
||||
"remove-markdown": "^0.6.2",
|
||||
"rollup-plugin-visualizer": "^5.12.0",
|
||||
"semver": "7.7.1",
|
||||
"shiki": "^3.12.0",
|
||||
"strict-url-sanitise": "^0.0.1",
|
||||
"string-width": "^7.2.0",
|
||||
@ -367,11 +395,12 @@
|
||||
"tsx": "^4.20.3",
|
||||
"turndown-plugin-gfm": "^1.0.2",
|
||||
"tw-animate-css": "^1.3.8",
|
||||
"typescript": "~5.8.2",
|
||||
"typescript": "~5.8.3",
|
||||
"undici": "6.21.2",
|
||||
"unified": "^11.0.5",
|
||||
"unist-util-visit": "5.0.0",
|
||||
"uuid": "^13.0.0",
|
||||
"vite": "npm:rolldown-vite@7.1.5",
|
||||
"vite": "npm:rolldown-vite@7.3.0",
|
||||
"vitest": "^3.2.4",
|
||||
"webdav": "^5.8.0",
|
||||
"winston": "^3.17.0",
|
||||
@ -384,41 +413,66 @@
|
||||
"zipread": "^1.3.3",
|
||||
"zod": "^4.1.5"
|
||||
},
|
||||
"resolutions": {
|
||||
"@smithy/types": "4.7.1",
|
||||
"@codemirror/language": "6.11.3",
|
||||
"@codemirror/lint": "6.8.5",
|
||||
"@codemirror/view": "6.38.1",
|
||||
"@langchain/core@npm:^0.3.26": "patch:@langchain/core@npm%3A1.0.2#~/.yarn/patches/@langchain-core-npm-1.0.2-183ef83fe4.patch",
|
||||
"atomically@npm:^1.7.0": "patch:atomically@npm%3A1.7.0#~/.yarn/patches/atomically-npm-1.7.0-e742e5293b.patch",
|
||||
"esbuild": "^0.25.0",
|
||||
"file-stream-rotator@npm:^0.6.1": "patch:file-stream-rotator@npm%3A0.6.1#~/.yarn/patches/file-stream-rotator-npm-0.6.1-eab45fb13d.patch",
|
||||
"libsql@npm:^0.4.4": "patch:libsql@npm%3A0.4.7#~/.yarn/patches/libsql-npm-0.4.7-444e260fb1.patch",
|
||||
"node-abi": "4.24.0",
|
||||
"openai@npm:^4.77.0": "npm:@cherrystudio/openai@6.5.0",
|
||||
"openai@npm:^4.87.3": "npm:@cherrystudio/openai@6.5.0",
|
||||
"pdf-parse@npm:1.1.1": "patch:pdf-parse@npm%3A1.1.1#~/.yarn/patches/pdf-parse-npm-1.1.1-04a6109b2a.patch",
|
||||
"pkce-challenge@npm:^4.1.0": "patch:pkce-challenge@npm%3A4.1.0#~/.yarn/patches/pkce-challenge-npm-4.1.0-fbc51695a3.patch",
|
||||
"tar-fs": "^2.1.4",
|
||||
"undici": "6.21.2",
|
||||
"vite": "npm:rolldown-vite@7.1.5",
|
||||
"tesseract.js@npm:*": "patch:tesseract.js@npm%3A6.0.1#~/.yarn/patches/tesseract.js-npm-6.0.1-2562a7e46d.patch",
|
||||
"@ai-sdk/openai@npm:^2.0.52": "patch:@ai-sdk/openai@npm%3A2.0.52#~/.yarn/patches/@ai-sdk-openai-npm-2.0.52-b36d949c76.patch",
|
||||
"@img/sharp-darwin-arm64": "0.34.3",
|
||||
"@img/sharp-darwin-x64": "0.34.3",
|
||||
"@img/sharp-linux-arm": "0.34.3",
|
||||
"@img/sharp-linux-arm64": "0.34.3",
|
||||
"@img/sharp-linux-x64": "0.34.3",
|
||||
"@img/sharp-win32-x64": "0.34.3",
|
||||
"openai@npm:5.12.2": "npm:@cherrystudio/openai@6.5.0",
|
||||
"@langchain/openai@npm:>=0.1.0 <0.6.0": "patch:@langchain/openai@npm%3A1.0.0#~/.yarn/patches/@langchain-openai-npm-1.0.0-474d0ad9d4.patch",
|
||||
"@langchain/openai@npm:^0.3.16": "patch:@langchain/openai@npm%3A1.0.0#~/.yarn/patches/@langchain-openai-npm-1.0.0-474d0ad9d4.patch",
|
||||
"@langchain/openai@npm:>=0.2.0 <0.7.0": "patch:@langchain/openai@npm%3A1.0.0#~/.yarn/patches/@langchain-openai-npm-1.0.0-474d0ad9d4.patch",
|
||||
"@ai-sdk/openai@npm:^2.0.42": "patch:@ai-sdk/openai@npm%3A2.0.85#~/.yarn/patches/@ai-sdk-openai-npm-2.0.85-27483d1d6a.patch",
|
||||
"@ai-sdk/google@npm:^2.0.40": "patch:@ai-sdk/google@npm%3A2.0.40#~/.yarn/patches/@ai-sdk-google-npm-2.0.40-47e0eeee83.patch",
|
||||
"@ai-sdk/openai-compatible@npm:^1.0.27": "patch:@ai-sdk/openai-compatible@npm%3A1.0.27#~/.yarn/patches/@ai-sdk-openai-compatible-npm-1.0.27-06f74278cf.patch"
|
||||
"pnpm": {
|
||||
"overrides": {
|
||||
"@smithy/types": "4.7.1",
|
||||
"@codemirror/language": "6.11.3",
|
||||
"@codemirror/lint": "6.8.5",
|
||||
"@codemirror/view": "6.38.1",
|
||||
"esbuild": "^0.25.0",
|
||||
"node-abi": "4.24.0",
|
||||
"openai": "npm:@cherrystudio/openai@6.15.0",
|
||||
"tar-fs": "^2.1.4",
|
||||
"undici": "6.21.2",
|
||||
"vite": "npm:rolldown-vite@7.3.0",
|
||||
"@img/sharp-darwin-arm64": "0.34.3",
|
||||
"@img/sharp-darwin-x64": "0.34.3",
|
||||
"@img/sharp-linux-arm": "0.34.3",
|
||||
"@img/sharp-linux-arm64": "0.34.3",
|
||||
"@img/sharp-linux-x64": "0.34.3",
|
||||
"@img/sharp-win32-x64": "0.34.3",
|
||||
"@langchain/core": "1.0.2",
|
||||
"@ai-sdk/openai-compatible@1.0.27": "1.0.28"
|
||||
},
|
||||
"patchedDependencies": {
|
||||
"@anthropic-ai/claude-agent-sdk@0.1.62": "patches/@anthropic-ai-claude-agent-sdk-npm-0.1.62-23ae56f8c8.patch",
|
||||
"@napi-rs/system-ocr@1.0.2": "patches/@napi-rs-system-ocr-npm-1.0.2-59e7a78e8b.patch",
|
||||
"tesseract.js@6.0.1": "patches/tesseract.js-npm-6.0.1-2562a7e46d.patch",
|
||||
"@ai-sdk/google@2.0.49": "patches/@ai-sdk-google-npm-2.0.49-84720f41bd.patch",
|
||||
"@ai-sdk/openai@2.0.85": "patches/@ai-sdk-openai-npm-2.0.85-27483d1d6a.patch",
|
||||
"@anthropic-ai/vertex-sdk@0.11.4": "patches/@anthropic-ai-vertex-sdk-npm-0.11.4-c19cb41edb.patch",
|
||||
"@google/genai@1.0.1": "patches/@google-genai-npm-1.0.1-e26f0f9af7.patch",
|
||||
"@langchain/core@1.0.2": "patches/@langchain-core-npm-1.0.2-183ef83fe4.patch",
|
||||
"@langchain/openai@1.0.0": "patches/@langchain-openai-npm-1.0.0-474d0ad9d4.patch",
|
||||
"@tiptap/extension-drag-handle@3.2.0": "patches/@tiptap-extension-drag-handle-npm-3.2.0-5a9ebff7c9.patch",
|
||||
"antd@5.27.0": "patches/antd-npm-5.27.0-aa91c36546.patch",
|
||||
"electron-updater@6.7.0": "patches/electron-updater-npm-6.7.0-47b11bb0d4.patch",
|
||||
"epub@1.3.0": "patches/epub-npm-1.3.0-8325494ffe.patch",
|
||||
"ollama-ai-provider-v2@1.5.5": "patches/ollama-ai-provider-v2-npm-1.5.5-8bef249af9.patch",
|
||||
"atomically@1.7.0": "patches/atomically-npm-1.7.0-e742e5293b.patch",
|
||||
"file-stream-rotator@0.6.1": "patches/file-stream-rotator-npm-0.6.1-eab45fb13d.patch",
|
||||
"libsql@0.4.7": "patches/libsql-npm-0.4.7-444e260fb1.patch",
|
||||
"pdf-parse@1.1.1": "patches/pdf-parse-npm-1.1.1-04a6109b2a.patch",
|
||||
"@ai-sdk/openai-compatible@1.0.28": "patches/@ai-sdk-openai-compatible-npm-1.0.28-5705188855.patch"
|
||||
},
|
||||
"onlyBuiltDependencies": [
|
||||
"@kangfenmao/keyv-storage",
|
||||
"@paymoapp/electron-shutdown-handler",
|
||||
"@scarf/scarf",
|
||||
"@swc/core",
|
||||
"electron",
|
||||
"electron-winstaller",
|
||||
"esbuild",
|
||||
"msw",
|
||||
"protobufjs",
|
||||
"registry-js",
|
||||
"selection-hook",
|
||||
"sharp",
|
||||
"tesseract.js",
|
||||
"zipfile"
|
||||
]
|
||||
},
|
||||
"packageManager": "yarn@4.9.1",
|
||||
"packageManager": "pnpm@10.27.0",
|
||||
"lint-staged": {
|
||||
"*.{js,jsx,ts,tsx,cjs,mjs,cts,mts}": [
|
||||
"biome format --write --no-errors-on-unmatched",
|
||||
|
||||
@ -8,7 +8,7 @@ It exposes the CherryIN OpenAI-compatible entrypoints and dynamically routes Ant
|
||||
```bash
|
||||
npm install ai @cherrystudio/ai-sdk-provider @ai-sdk/anthropic @ai-sdk/google @ai-sdk/openai
|
||||
# or
|
||||
yarn add ai @cherrystudio/ai-sdk-provider @ai-sdk/anthropic @ai-sdk/google @ai-sdk/openai
|
||||
pnpm add ai @cherrystudio/ai-sdk-provider @ai-sdk/anthropic @ai-sdk/google @ai-sdk/openai
|
||||
```
|
||||
|
||||
> **Note**: This package requires peer dependencies `ai`, `@ai-sdk/anthropic`, `@ai-sdk/google`, and `@ai-sdk/openai` to be installed.
|
||||
|
||||
@ -41,7 +41,7 @@
|
||||
"ai": "^5.0.26"
|
||||
},
|
||||
"dependencies": {
|
||||
"@ai-sdk/openai-compatible": "^1.0.28",
|
||||
"@ai-sdk/openai-compatible": "1.0.28",
|
||||
"@ai-sdk/provider": "^2.0.0",
|
||||
"@ai-sdk/provider-utils": "^3.0.17"
|
||||
},
|
||||
|
||||
@ -42,7 +42,7 @@
|
||||
"@ai-sdk/anthropic": "^2.0.49",
|
||||
"@ai-sdk/azure": "^2.0.87",
|
||||
"@ai-sdk/deepseek": "^1.0.31",
|
||||
"@ai-sdk/openai-compatible": "patch:@ai-sdk/openai-compatible@npm%3A1.0.27#~/.yarn/patches/@ai-sdk-openai-compatible-npm-1.0.27-06f74278cf.patch",
|
||||
"@ai-sdk/openai-compatible": "1.0.28",
|
||||
"@ai-sdk/provider": "^2.0.0",
|
||||
"@ai-sdk/provider-utils": "^3.0.17",
|
||||
"@ai-sdk/xai": "^2.0.36",
|
||||
|
||||
@ -22,10 +22,10 @@ const TOOL_USE_TAG_CONFIG: TagConfig = {
|
||||
}
|
||||
|
||||
/**
|
||||
* 默认系统提示符模板(提取自 Cherry Studio)
|
||||
* 默认系统提示符模板
|
||||
*/
|
||||
const DEFAULT_SYSTEM_PROMPT = `In this environment you have access to a set of tools you can use to answer the user's question. \\
|
||||
You can use one tool per message, and will receive the result of that tool use in the user's response. You use tools step-by-step to accomplish a given task, with each tool use informed by the result of the previous tool use.
|
||||
export const DEFAULT_SYSTEM_PROMPT = `In this environment you have access to a set of tools you can use to answer the user's question. \
|
||||
You can use one or more tools per message, and will receive the result of that tool use in the user's response. You use tools step-by-step to accomplish a given task, with each tool use informed by the result of the previous tool use.
|
||||
|
||||
## Tool Use Formatting
|
||||
|
||||
@ -74,10 +74,13 @@ Here are the rules you should always follow to solve your task:
|
||||
4. Never re-do a tool call that you previously did with the exact same parameters.
|
||||
5. For tool use, MAKE SURE use XML tag format as shown in the examples above. Do not use any other format.
|
||||
|
||||
## Response rules
|
||||
|
||||
Respond in the language of the user's query, unless the user instructions specify additional requirements for the language to be used.
|
||||
|
||||
# User Instructions
|
||||
{{ USER_SYSTEM_PROMPT }}
|
||||
|
||||
Now Begin! If you solve the task correctly, you will receive a reward of $1,000,000.`
|
||||
`
|
||||
|
||||
/**
|
||||
* 默认工具使用示例(提取自 Cherry Studio)
|
||||
@ -151,7 +154,8 @@ User: <tool_use_result>
|
||||
<name>search</name>
|
||||
<result>26 million (2019)</result>
|
||||
</tool_use_result>
|
||||
Assistant: The population of Shanghai is 26 million, while Guangzhou has a population of 15 million. Therefore, Shanghai has the highest population.`
|
||||
|
||||
A: The population of Shanghai is 26 million, while Guangzhou has a population of 15 million. Therefore, Shanghai has the highest population.`
|
||||
|
||||
/**
|
||||
* 构建可用工具部分(提取自 Cherry Studio)
|
||||
|
||||
@ -68,8 +68,8 @@
|
||||
],
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "2.2.4",
|
||||
"@tiptap/core": "^3.2.0",
|
||||
"@tiptap/pm": "^3.2.0",
|
||||
"@tiptap/core": "3.2.0",
|
||||
"@tiptap/pm": "3.2.0",
|
||||
"eslint": "^9.22.0",
|
||||
"eslint-plugin-react-hooks": "^5.2.0",
|
||||
"eslint-plugin-simple-import-sort": "^12.1.1",
|
||||
@ -89,5 +89,5 @@
|
||||
"build": "tsdown",
|
||||
"lint": "biome format ./src/ --write && eslint --fix ./src/"
|
||||
},
|
||||
"packageManager": "yarn@4.9.1"
|
||||
"packageManager": "pnpm@10.27.0"
|
||||
}
|
||||
|
||||
@ -233,6 +233,8 @@ export enum IpcChannel {
|
||||
Backup_ListS3Files = 'backup:listS3Files',
|
||||
Backup_DeleteS3File = 'backup:deleteS3File',
|
||||
Backup_CheckS3Connection = 'backup:checkS3Connection',
|
||||
Backup_CreateLanTransferBackup = 'backup:createLanTransferBackup',
|
||||
Backup_DeleteTempBackup = 'backup:deleteTempBackup',
|
||||
|
||||
// zip
|
||||
Zip_Compress = 'zip:compress',
|
||||
@ -244,6 +246,7 @@ export enum IpcChannel {
|
||||
System_GetCpuName = 'system:getCpuName',
|
||||
System_CheckGitBash = 'system:checkGitBash',
|
||||
System_GetGitBashPath = 'system:getGitBashPath',
|
||||
System_GetGitBashPathInfo = 'system:getGitBashPathInfo',
|
||||
System_SetGitBashPath = 'system:setGitBashPath',
|
||||
|
||||
// DevTools
|
||||
@ -315,6 +318,7 @@ export enum IpcChannel {
|
||||
Memory_DeleteUser = 'memory:delete-user',
|
||||
Memory_DeleteAllMemoriesForUser = 'memory:delete-all-memories-for-user',
|
||||
Memory_GetUsersList = 'memory:get-users-list',
|
||||
Memory_MigrateMemoryDb = 'memory:migrate-memory-db',
|
||||
|
||||
// TRACE
|
||||
TRACE_SAVE_DATA = 'trace:saveData',
|
||||
@ -360,6 +364,7 @@ export enum IpcChannel {
|
||||
OCR_ListProviders = 'ocr:list-providers',
|
||||
|
||||
// OVMS
|
||||
Ovms_IsSupported = 'ovms:is-supported',
|
||||
Ovms_AddModel = 'ovms:add-model',
|
||||
Ovms_StopAddModel = 'ovms:stop-addmodel',
|
||||
Ovms_GetModels = 'ovms:get-models',
|
||||
@ -380,10 +385,14 @@ export enum IpcChannel {
|
||||
ClaudeCodePlugin_ReadContent = 'claudeCodePlugin:read-content',
|
||||
ClaudeCodePlugin_WriteContent = 'claudeCodePlugin:write-content',
|
||||
|
||||
// WebSocket
|
||||
WebSocket_Start = 'webSocket:start',
|
||||
WebSocket_Stop = 'webSocket:stop',
|
||||
WebSocket_Status = 'webSocket:status',
|
||||
WebSocket_SendFile = 'webSocket:send-file',
|
||||
WebSocket_GetAllCandidates = 'webSocket:get-all-candidates'
|
||||
// Local Transfer
|
||||
LocalTransfer_ListServices = 'local-transfer:list',
|
||||
LocalTransfer_StartScan = 'local-transfer:start-scan',
|
||||
LocalTransfer_StopScan = 'local-transfer:stop-scan',
|
||||
LocalTransfer_ServicesUpdated = 'local-transfer:services-updated',
|
||||
LocalTransfer_Connect = 'local-transfer:connect',
|
||||
LocalTransfer_Disconnect = 'local-transfer:disconnect',
|
||||
LocalTransfer_ClientEvent = 'local-transfer:client-event',
|
||||
LocalTransfer_SendFile = 'local-transfer:send-file',
|
||||
LocalTransfer_CancelTransfer = 'local-transfer:cancel-transfer'
|
||||
}
|
||||
|
||||
138
packages/shared/__tests__/utils.test.ts
Normal file
138
packages/shared/__tests__/utils.test.ts
Normal file
@ -0,0 +1,138 @@
|
||||
import { describe, expect, it } from 'vitest'
|
||||
|
||||
import { isBase64ImageDataUrl, isDataUrl, parseDataUrl } from '../utils'
|
||||
|
||||
describe('parseDataUrl', () => {
|
||||
it('parses a standard base64 image data URL', () => {
|
||||
const result = parseDataUrl('data:image/png;base64,iVBORw0KGgo=')
|
||||
expect(result).toEqual({
|
||||
mediaType: 'image/png',
|
||||
isBase64: true,
|
||||
data: 'iVBORw0KGgo='
|
||||
})
|
||||
})
|
||||
|
||||
it('parses a base64 data URL with additional parameters', () => {
|
||||
const result = parseDataUrl('data:image/jpeg;name=foo;base64,/9j/4AAQ')
|
||||
expect(result).toEqual({
|
||||
mediaType: 'image/jpeg',
|
||||
isBase64: true,
|
||||
data: '/9j/4AAQ'
|
||||
})
|
||||
})
|
||||
|
||||
it('parses a plain text data URL (non-base64)', () => {
|
||||
const result = parseDataUrl('data:text/plain,Hello%20World')
|
||||
expect(result).toEqual({
|
||||
mediaType: 'text/plain',
|
||||
isBase64: false,
|
||||
data: 'Hello%20World'
|
||||
})
|
||||
})
|
||||
|
||||
it('parses a data URL with empty media type', () => {
|
||||
const result = parseDataUrl('data:;base64,SGVsbG8=')
|
||||
expect(result).toEqual({
|
||||
mediaType: undefined,
|
||||
isBase64: true,
|
||||
data: 'SGVsbG8='
|
||||
})
|
||||
})
|
||||
|
||||
it('returns null for non-data URLs', () => {
|
||||
const result = parseDataUrl('https://example.com/image.png')
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
|
||||
it('returns null for malformed data URL without comma', () => {
|
||||
const result = parseDataUrl('data:image/png;base64')
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
|
||||
it('handles empty string', () => {
|
||||
const result = parseDataUrl('')
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
|
||||
it('handles large base64 data without performance issues', () => {
|
||||
// Simulate a 4K image base64 string (about 1MB)
|
||||
const largeData = 'A'.repeat(1024 * 1024)
|
||||
const dataUrl = `data:image/png;base64,${largeData}`
|
||||
|
||||
const start = performance.now()
|
||||
const result = parseDataUrl(dataUrl)
|
||||
const duration = performance.now() - start
|
||||
|
||||
expect(result).not.toBeNull()
|
||||
expect(result?.mediaType).toBe('image/png')
|
||||
expect(result?.isBase64).toBe(true)
|
||||
expect(result?.data).toBe(largeData)
|
||||
// Should complete in under 10ms (string operations are fast)
|
||||
expect(duration).toBeLessThan(10)
|
||||
})
|
||||
|
||||
it('parses SVG data URL', () => {
|
||||
const result = parseDataUrl('data:image/svg+xml;base64,PHN2Zz4=')
|
||||
expect(result).toEqual({
|
||||
mediaType: 'image/svg+xml',
|
||||
isBase64: true,
|
||||
data: 'PHN2Zz4='
|
||||
})
|
||||
})
|
||||
|
||||
it('parses JSON data URL', () => {
|
||||
const result = parseDataUrl('data:application/json,{"key":"value"}')
|
||||
expect(result).toEqual({
|
||||
mediaType: 'application/json',
|
||||
isBase64: false,
|
||||
data: '{"key":"value"}'
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('isDataUrl', () => {
|
||||
it('returns true for valid data URLs', () => {
|
||||
expect(isDataUrl('data:image/png;base64,ABC')).toBe(true)
|
||||
expect(isDataUrl('data:text/plain,hello')).toBe(true)
|
||||
expect(isDataUrl('data:,simple')).toBe(true)
|
||||
})
|
||||
|
||||
it('returns false for non-data URLs', () => {
|
||||
expect(isDataUrl('https://example.com')).toBe(false)
|
||||
expect(isDataUrl('file:///path/to/file')).toBe(false)
|
||||
expect(isDataUrl('')).toBe(false)
|
||||
})
|
||||
|
||||
it('returns false for malformed data URLs', () => {
|
||||
expect(isDataUrl('data:')).toBe(false)
|
||||
expect(isDataUrl('data:image/png')).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('isBase64ImageDataUrl', () => {
|
||||
it('returns true for base64 image data URLs', () => {
|
||||
expect(isBase64ImageDataUrl('data:image/png;base64,ABC')).toBe(true)
|
||||
expect(isBase64ImageDataUrl('data:image/jpeg;base64,/9j/')).toBe(true)
|
||||
expect(isBase64ImageDataUrl('data:image/gif;base64,R0lG')).toBe(true)
|
||||
expect(isBase64ImageDataUrl('data:image/webp;base64,UklG')).toBe(true)
|
||||
})
|
||||
|
||||
it('returns false for non-base64 image data URLs', () => {
|
||||
expect(isBase64ImageDataUrl('data:image/svg+xml,<svg></svg>')).toBe(false)
|
||||
})
|
||||
|
||||
it('returns false for non-image data URLs', () => {
|
||||
expect(isBase64ImageDataUrl('data:text/plain;base64,SGVsbG8=')).toBe(false)
|
||||
expect(isBase64ImageDataUrl('data:application/json,{}')).toBe(false)
|
||||
})
|
||||
|
||||
it('returns false for regular URLs', () => {
|
||||
expect(isBase64ImageDataUrl('https://example.com/image.png')).toBe(false)
|
||||
expect(isBase64ImageDataUrl('file:///image.png')).toBe(false)
|
||||
})
|
||||
|
||||
it('returns false for malformed data URLs', () => {
|
||||
expect(isBase64ImageDataUrl('data:image/png')).toBe(false)
|
||||
expect(isBase64ImageDataUrl('')).toBe(false)
|
||||
})
|
||||
})
|
||||
@ -488,3 +488,11 @@ export const MACOS_TERMINALS_WITH_COMMANDS: TerminalConfigWithCommand[] = [
|
||||
|
||||
// resources/scripts should be maintained manually
|
||||
export const HOME_CHERRY_DIR = '.cherrystudio'
|
||||
|
||||
// Git Bash path configuration types
|
||||
export type GitBashPathSource = 'manual' | 'auto'
|
||||
|
||||
export interface GitBashPathInfo {
|
||||
path: string | null
|
||||
source: GitBashPathSource | null
|
||||
}
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
*
|
||||
* ⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️
|
||||
* THIS FILE IS AUTOMATICALLY GENERATED BY A SCRIPT. DO NOT EDIT IT MANUALLY!
|
||||
* Run `yarn update:languages` to update this file.
|
||||
* Run `pnpm update:languages` to update this file.
|
||||
* ⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️
|
||||
*
|
||||
*/
|
||||
|
||||
@ -52,3 +52,196 @@ export interface WebSocketCandidatesResponse {
|
||||
interface: string
|
||||
priority: number
|
||||
}
|
||||
|
||||
export type LocalTransferPeer = {
|
||||
id: string
|
||||
name: string
|
||||
host?: string
|
||||
fqdn?: string
|
||||
port?: number
|
||||
type?: string
|
||||
protocol?: 'tcp' | 'udp'
|
||||
addresses: string[]
|
||||
txt?: Record<string, string>
|
||||
updatedAt: number
|
||||
}
|
||||
|
||||
export type LocalTransferState = {
|
||||
services: LocalTransferPeer[]
|
||||
isScanning: boolean
|
||||
lastScanStartedAt?: number
|
||||
lastUpdatedAt: number
|
||||
lastError?: string
|
||||
}
|
||||
|
||||
export type LanHandshakeRequestMessage = {
|
||||
type: 'handshake'
|
||||
deviceName: string
|
||||
version: string
|
||||
platform?: string
|
||||
appVersion?: string
|
||||
}
|
||||
|
||||
export type LanHandshakeAckMessage = {
|
||||
type: 'handshake_ack'
|
||||
accepted: boolean
|
||||
message?: string
|
||||
}
|
||||
|
||||
export type LocalTransferConnectPayload = {
|
||||
peerId: string
|
||||
metadata?: Record<string, string>
|
||||
timeoutMs?: number
|
||||
}
|
||||
|
||||
export type LanClientEvent =
|
||||
| {
|
||||
type: 'ping_sent'
|
||||
payload: string
|
||||
timestamp: number
|
||||
peerId?: string
|
||||
peerName?: string
|
||||
}
|
||||
| {
|
||||
type: 'pong'
|
||||
payload?: string
|
||||
received?: boolean
|
||||
timestamp: number
|
||||
peerId?: string
|
||||
peerName?: string
|
||||
}
|
||||
| {
|
||||
type: 'socket_closed'
|
||||
reason?: string
|
||||
timestamp: number
|
||||
peerId?: string
|
||||
peerName?: string
|
||||
}
|
||||
| {
|
||||
type: 'error'
|
||||
message: string
|
||||
timestamp: number
|
||||
peerId?: string
|
||||
peerName?: string
|
||||
}
|
||||
| {
|
||||
type: 'file_transfer_progress'
|
||||
transferId: string
|
||||
fileName: string
|
||||
bytesSent: number
|
||||
totalBytes: number
|
||||
chunkIndex: number
|
||||
totalChunks: number
|
||||
progress: number // 0-100
|
||||
speed: number // bytes/sec
|
||||
timestamp: number
|
||||
peerId?: string
|
||||
peerName?: string
|
||||
}
|
||||
| {
|
||||
type: 'file_transfer_complete'
|
||||
transferId: string
|
||||
fileName: string
|
||||
success: boolean
|
||||
filePath?: string
|
||||
error?: string
|
||||
timestamp: number
|
||||
peerId?: string
|
||||
peerName?: string
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// LAN File Transfer Protocol Types
|
||||
// =============================================================================
|
||||
|
||||
// Constants for file transfer
|
||||
export const LAN_TRANSFER_TCP_PORT = 53317
|
||||
export const LAN_TRANSFER_CHUNK_SIZE = 512 * 1024 // 512KB
|
||||
export const LAN_TRANSFER_MAX_FILE_SIZE = 500 * 1024 * 1024 // 500MB
|
||||
export const LAN_TRANSFER_COMPLETE_TIMEOUT_MS = 60_000 // 60s - wait for file_complete after file_end
|
||||
export const LAN_TRANSFER_GLOBAL_TIMEOUT_MS = 10 * 60 * 1000 // 10 minutes - global transfer timeout
|
||||
|
||||
// Binary protocol constants (v1)
|
||||
export const LAN_TRANSFER_PROTOCOL_VERSION = '1'
|
||||
export const LAN_BINARY_FRAME_MAGIC = 0x4353 // "CS" as uint16
|
||||
export const LAN_BINARY_TYPE_FILE_CHUNK = 0x01
|
||||
|
||||
// Messages from Electron (Client/Sender) to Mobile (Server/Receiver)
|
||||
|
||||
/** Request to start file transfer */
|
||||
export type LanFileStartMessage = {
|
||||
type: 'file_start'
|
||||
transferId: string
|
||||
fileName: string
|
||||
fileSize: number
|
||||
mimeType: string // 'application/zip'
|
||||
checksum: string // SHA-256 of entire file
|
||||
totalChunks: number
|
||||
chunkSize: number
|
||||
}
|
||||
|
||||
/**
|
||||
* File chunk data (JSON format)
|
||||
* @deprecated Use binary frame format in protocol v1. This type is kept for reference only.
|
||||
*/
|
||||
export type LanFileChunkMessage = {
|
||||
type: 'file_chunk'
|
||||
transferId: string
|
||||
chunkIndex: number
|
||||
data: string // Base64 encoded
|
||||
chunkChecksum: string // SHA-256 of this chunk
|
||||
}
|
||||
|
||||
/** Notification that all chunks have been sent */
|
||||
export type LanFileEndMessage = {
|
||||
type: 'file_end'
|
||||
transferId: string
|
||||
}
|
||||
|
||||
/** Request to cancel file transfer */
|
||||
export type LanFileCancelMessage = {
|
||||
type: 'file_cancel'
|
||||
transferId: string
|
||||
reason?: string
|
||||
}
|
||||
|
||||
// Messages from Mobile (Server/Receiver) to Electron (Client/Sender)
|
||||
|
||||
/** Acknowledgment of file transfer request */
|
||||
export type LanFileStartAckMessage = {
|
||||
type: 'file_start_ack'
|
||||
transferId: string
|
||||
accepted: boolean
|
||||
message?: string // Rejection reason
|
||||
}
|
||||
|
||||
/**
|
||||
* Acknowledgment of file chunk received
|
||||
* @deprecated Protocol v1 uses streaming mode without per-chunk acknowledgment.
|
||||
* This type is kept for backward compatibility reference only.
|
||||
*/
|
||||
export type LanFileChunkAckMessage = {
|
||||
type: 'file_chunk_ack'
|
||||
transferId: string
|
||||
chunkIndex: number
|
||||
received: boolean
|
||||
message?: string
|
||||
}
|
||||
|
||||
/** Final result of file transfer */
|
||||
export type LanFileCompleteMessage = {
|
||||
type: 'file_complete'
|
||||
transferId: string
|
||||
success: boolean
|
||||
filePath?: string // Path where file was saved on mobile
|
||||
error?: string
|
||||
// Enhanced error diagnostics
|
||||
errorCode?: 'CHECKSUM_MISMATCH' | 'INCOMPLETE_TRANSFER' | 'DISK_ERROR' | 'CANCELLED'
|
||||
receivedChunks?: number
|
||||
receivedBytes?: number
|
||||
}
|
||||
|
||||
/** Payload for sending a file via IPC */
|
||||
export type LanFileSendPayload = {
|
||||
filePath: string
|
||||
}
|
||||
|
||||
@ -35,3 +35,134 @@ export const defaultAppHeaders = () => {
|
||||
// return value
|
||||
// }
|
||||
// }
|
||||
|
||||
/**
|
||||
* Extracts the trailing API version segment from a URL path.
|
||||
*
|
||||
* This function extracts API version patterns (e.g., `v1`, `v2beta`) from the end of a URL.
|
||||
* Only versions at the end of the path are extracted, not versions in the middle.
|
||||
* The returned version string does not include leading or trailing slashes.
|
||||
*
|
||||
* @param {string} url - The URL string to parse.
|
||||
* @returns {string | undefined} The trailing API version found (e.g., 'v1', 'v2beta'), or undefined if none found.
|
||||
*
|
||||
* @example
|
||||
* getTrailingApiVersion('https://api.example.com/v1') // 'v1'
|
||||
* getTrailingApiVersion('https://api.example.com/v2beta/') // 'v2beta'
|
||||
* getTrailingApiVersion('https://api.example.com/v1/chat') // undefined (version not at end)
|
||||
* getTrailingApiVersion('https://gateway.ai.cloudflare.com/v1/xxx/v1beta') // 'v1beta'
|
||||
* getTrailingApiVersion('https://api.example.com') // undefined
|
||||
*/
|
||||
export function getTrailingApiVersion(url: string): string | undefined {
|
||||
const match = url.match(TRAILING_VERSION_REGEX)
|
||||
|
||||
if (match) {
|
||||
// Extract version without leading slash and trailing slash
|
||||
return match[0].replace(/^\//, '').replace(/\/$/, '')
|
||||
}
|
||||
|
||||
return undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Matches an API version at the end of a URL (with optional trailing slash).
|
||||
* Used to detect and extract versions only from the trailing position.
|
||||
*/
|
||||
const TRAILING_VERSION_REGEX = /\/v\d+(?:alpha|beta)?\/?$/i
|
||||
|
||||
/**
|
||||
* Removes the trailing API version segment from a URL path.
|
||||
*
|
||||
* This function removes API version patterns (e.g., `/v1`, `/v2beta`) from the end of a URL.
|
||||
* Only versions at the end of the path are removed, not versions in the middle.
|
||||
*
|
||||
* @param {string} url - The URL string to process.
|
||||
* @returns {string} The URL with the trailing API version removed, or the original URL if no trailing version found.
|
||||
*
|
||||
* @example
|
||||
* withoutTrailingApiVersion('https://api.example.com/v1') // 'https://api.example.com'
|
||||
* withoutTrailingApiVersion('https://api.example.com/v2beta/') // 'https://api.example.com'
|
||||
* withoutTrailingApiVersion('https://api.example.com/v1/chat') // 'https://api.example.com/v1/chat' (no change)
|
||||
* withoutTrailingApiVersion('https://api.example.com') // 'https://api.example.com'
|
||||
*/
|
||||
export function withoutTrailingApiVersion(url: string): string {
|
||||
return url.replace(TRAILING_VERSION_REGEX, '')
|
||||
}
|
||||
|
||||
export interface DataUrlParts {
|
||||
/** The media type (e.g., 'image/png', 'text/plain') */
|
||||
mediaType?: string
|
||||
/** Whether the data is base64 encoded */
|
||||
isBase64: boolean
|
||||
/** The data portion (everything after the comma). This is the raw string, not decoded. */
|
||||
data: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a data URL into its component parts without using regex on the data portion.
|
||||
* This is memory-safe for large data URLs (e.g., 4K images) as it uses indexOf instead of regex.
|
||||
*
|
||||
* Data URL format: data:[<mediatype>][;base64],<data>
|
||||
*
|
||||
* @param url - The data URL string to parse
|
||||
* @returns DataUrlParts if valid, null if invalid
|
||||
*
|
||||
* @example
|
||||
* parseDataUrl('data:image/png;base64,iVBORw0KGgo...')
|
||||
* // { mediaType: 'image/png', isBase64: true, data: 'iVBORw0KGgo...' }
|
||||
*
|
||||
* parseDataUrl('data:text/plain,Hello')
|
||||
* // { mediaType: 'text/plain', isBase64: false, data: 'Hello' }
|
||||
*
|
||||
* parseDataUrl('invalid-url')
|
||||
* // null
|
||||
*/
|
||||
export function parseDataUrl(url: string): DataUrlParts | null {
|
||||
if (!url.startsWith('data:')) {
|
||||
return null
|
||||
}
|
||||
|
||||
const commaIndex = url.indexOf(',')
|
||||
if (commaIndex === -1) {
|
||||
return null
|
||||
}
|
||||
|
||||
const header = url.slice(5, commaIndex)
|
||||
|
||||
const isBase64 = header.includes(';base64')
|
||||
|
||||
const semicolonIndex = header.indexOf(';')
|
||||
const mediaType = (semicolonIndex === -1 ? header : header.slice(0, semicolonIndex)).trim() || undefined
|
||||
|
||||
const data = url.slice(commaIndex + 1)
|
||||
|
||||
return { mediaType, isBase64, data }
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a string is a data URL.
|
||||
*
|
||||
* @param url - The string to check
|
||||
* @returns true if the string is a valid data URL
|
||||
*/
|
||||
export function isDataUrl(url: string): boolean {
|
||||
return url.startsWith('data:') && url.includes(',')
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a data URL contains base64-encoded image data.
|
||||
*
|
||||
* @param url - The data URL to check
|
||||
* @returns true if the URL is a base64-encoded image data URL
|
||||
*/
|
||||
export function isBase64ImageDataUrl(url: string): boolean {
|
||||
if (!url.startsWith('data:image/')) {
|
||||
return false
|
||||
}
|
||||
const commaIndex = url.indexOf(',')
|
||||
if (commaIndex === -1) {
|
||||
return false
|
||||
}
|
||||
const header = url.slice(5, commaIndex)
|
||||
return header.includes(';base64')
|
||||
}
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
diff --git a/dist/index.js b/dist/index.js
|
||||
index 51ce7e423934fb717cb90245cdfcdb3dae6780e6..0f7f7009e2f41a79a8669d38c8a44867bbff5e1f 100644
|
||||
index d004b415c5841a1969705823614f395265ea5a8a..6b1e0dad4610b0424393ecc12e9114723bbe316b 100644
|
||||
--- a/dist/index.js
|
||||
+++ b/dist/index.js
|
||||
@@ -474,7 +474,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
|
||||
@ -12,7 +12,7 @@ index 51ce7e423934fb717cb90245cdfcdb3dae6780e6..0f7f7009e2f41a79a8669d38c8a44867
|
||||
|
||||
// src/google-generative-ai-options.ts
|
||||
diff --git a/dist/index.mjs b/dist/index.mjs
|
||||
index f4b77e35c0cbfece85a3ef0d4f4e67aa6dde6271..8d2fecf8155a226006a0bde72b00b6036d4014b6 100644
|
||||
index 1780dd2391b7f42224a0b8048c723d2f81222c44..1f12ed14399d6902107ce9b435d7d8e6cc61e06b 100644
|
||||
--- a/dist/index.mjs
|
||||
+++ b/dist/index.mjs
|
||||
@@ -480,7 +480,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
|
||||
@ -24,3 +24,14 @@ index f4b77e35c0cbfece85a3ef0d4f4e67aa6dde6271..8d2fecf8155a226006a0bde72b00b603
|
||||
}
|
||||
|
||||
// src/google-generative-ai-options.ts
|
||||
@@ -1909,8 +1909,7 @@ function createGoogleGenerativeAI(options = {}) {
|
||||
}
|
||||
var google = createGoogleGenerativeAI();
|
||||
export {
|
||||
- VERSION,
|
||||
createGoogleGenerativeAI,
|
||||
- google
|
||||
+ google, VERSION
|
||||
};
|
||||
//# sourceMappingURL=index.mjs.map
|
||||
\ No newline at end of file
|
||||
266
patches/@ai-sdk-openai-compatible-npm-1.0.28-5705188855.patch
Normal file
266
patches/@ai-sdk-openai-compatible-npm-1.0.28-5705188855.patch
Normal file
@ -0,0 +1,266 @@
|
||||
diff --git a/dist/index.d.ts b/dist/index.d.ts
|
||||
index 48e2f6263c6ee4c75d7e5c28733e64f6ebe92200..00d0729c4a3cbf9a48e8e1e962c7e2b256b75eba 100644
|
||||
--- a/dist/index.d.ts
|
||||
+++ b/dist/index.d.ts
|
||||
@@ -7,6 +7,7 @@ declare const openaiCompatibleProviderOptions: z.ZodObject<{
|
||||
user: z.ZodOptional<z.ZodString>;
|
||||
reasoningEffort: z.ZodOptional<z.ZodString>;
|
||||
textVerbosity: z.ZodOptional<z.ZodString>;
|
||||
+ sendReasoning: z.ZodOptional<z.ZodBoolean>;
|
||||
}, z.core.$strip>;
|
||||
type OpenAICompatibleProviderOptions = z.infer<typeof openaiCompatibleProviderOptions>;
|
||||
|
||||
diff --git a/dist/index.js b/dist/index.js
|
||||
index da237bb35b7fa8e24b37cd861ee73dfc51cdfc72..b3060fbaf010e30b64df55302807828e5bfe0f9a 100644
|
||||
--- a/dist/index.js
|
||||
+++ b/dist/index.js
|
||||
@@ -41,7 +41,7 @@ function getOpenAIMetadata(message) {
|
||||
var _a, _b;
|
||||
return (_b = (_a = message == null ? void 0 : message.providerOptions) == null ? void 0 : _a.openaiCompatible) != null ? _b : {};
|
||||
}
|
||||
-function convertToOpenAICompatibleChatMessages(prompt) {
|
||||
+function convertToOpenAICompatibleChatMessages({prompt, options}) {
|
||||
const messages = [];
|
||||
for (const { role, content, ...message } of prompt) {
|
||||
const metadata = getOpenAIMetadata({ ...message });
|
||||
@@ -91,6 +91,7 @@ function convertToOpenAICompatibleChatMessages(prompt) {
|
||||
}
|
||||
case "assistant": {
|
||||
let text = "";
|
||||
+ let reasoning_text = "";
|
||||
const toolCalls = [];
|
||||
for (const part of content) {
|
||||
const partMetadata = getOpenAIMetadata(part);
|
||||
@@ -99,6 +100,12 @@ function convertToOpenAICompatibleChatMessages(prompt) {
|
||||
text += part.text;
|
||||
break;
|
||||
}
|
||||
+ case "reasoning": {
|
||||
+ if (options.sendReasoning) {
|
||||
+ reasoning_text += part.text;
|
||||
+ }
|
||||
+ break;
|
||||
+ }
|
||||
case "tool-call": {
|
||||
toolCalls.push({
|
||||
id: part.toolCallId,
|
||||
@@ -116,6 +123,7 @@ function convertToOpenAICompatibleChatMessages(prompt) {
|
||||
messages.push({
|
||||
role: "assistant",
|
||||
content: text,
|
||||
+ reasoning_content: reasoning_text || undefined,
|
||||
tool_calls: toolCalls.length > 0 ? toolCalls : void 0,
|
||||
...metadata
|
||||
});
|
||||
@@ -200,7 +208,8 @@ var openaiCompatibleProviderOptions = import_v4.z.object({
|
||||
/**
|
||||
* Controls the verbosity of the generated text. Defaults to `medium`.
|
||||
*/
|
||||
- textVerbosity: import_v4.z.string().optional()
|
||||
+ textVerbosity: import_v4.z.string().optional(),
|
||||
+ sendReasoning: import_v4.z.boolean().optional()
|
||||
});
|
||||
|
||||
// src/openai-compatible-error.ts
|
||||
@@ -378,7 +387,7 @@ var OpenAICompatibleChatLanguageModel = class {
|
||||
reasoning_effort: compatibleOptions.reasoningEffort,
|
||||
verbosity: compatibleOptions.textVerbosity,
|
||||
// messages:
|
||||
- messages: convertToOpenAICompatibleChatMessages(prompt),
|
||||
+ messages: convertToOpenAICompatibleChatMessages({prompt, options: compatibleOptions}),
|
||||
// tools:
|
||||
tools: openaiTools,
|
||||
tool_choice: openaiToolChoice
|
||||
@@ -421,6 +430,17 @@ var OpenAICompatibleChatLanguageModel = class {
|
||||
text: reasoning
|
||||
});
|
||||
}
|
||||
+ if (choice.message.images) {
|
||||
+ for (const image of choice.message.images) {
|
||||
+ const match1 = image.image_url.url.match(/^data:([^;]+)/)
|
||||
+ const match2 = image.image_url.url.match(/^data:[^;]*;base64,(.+)$/);
|
||||
+ content.push({
|
||||
+ type: 'file',
|
||||
+ mediaType: match1 ? (match1[1] ?? 'image/jpeg') : 'image/jpeg',
|
||||
+ data: match2 ? match2[1] : image.image_url.url,
|
||||
+ });
|
||||
+ }
|
||||
+ }
|
||||
if (choice.message.tool_calls != null) {
|
||||
for (const toolCall of choice.message.tool_calls) {
|
||||
content.push({
|
||||
@@ -598,6 +618,17 @@ var OpenAICompatibleChatLanguageModel = class {
|
||||
delta: delta.content
|
||||
});
|
||||
}
|
||||
+ if (delta.images) {
|
||||
+ for (const image of delta.images) {
|
||||
+ const match1 = image.image_url.url.match(/^data:([^;]+)/)
|
||||
+ const match2 = image.image_url.url.match(/^data:[^;]*;base64,(.+)$/);
|
||||
+ controller.enqueue({
|
||||
+ type: 'file',
|
||||
+ mediaType: match1 ? (match1[1] ?? 'image/jpeg') : 'image/jpeg',
|
||||
+ data: match2 ? match2[1] : image.image_url.url,
|
||||
+ });
|
||||
+ }
|
||||
+ }
|
||||
if (delta.tool_calls != null) {
|
||||
for (const toolCallDelta of delta.tool_calls) {
|
||||
const index = toolCallDelta.index;
|
||||
@@ -765,6 +796,14 @@ var OpenAICompatibleChatResponseSchema = import_v43.z.object({
|
||||
arguments: import_v43.z.string()
|
||||
})
|
||||
})
|
||||
+ ).nullish(),
|
||||
+ images: import_v43.z.array(
|
||||
+ import_v43.z.object({
|
||||
+ type: import_v43.z.literal('image_url'),
|
||||
+ image_url: import_v43.z.object({
|
||||
+ url: import_v43.z.string(),
|
||||
+ })
|
||||
+ })
|
||||
).nullish()
|
||||
}),
|
||||
finish_reason: import_v43.z.string().nullish()
|
||||
@@ -795,6 +834,14 @@ var createOpenAICompatibleChatChunkSchema = (errorSchema) => import_v43.z.union(
|
||||
arguments: import_v43.z.string().nullish()
|
||||
})
|
||||
})
|
||||
+ ).nullish(),
|
||||
+ images: import_v43.z.array(
|
||||
+ import_v43.z.object({
|
||||
+ type: import_v43.z.literal('image_url'),
|
||||
+ image_url: import_v43.z.object({
|
||||
+ url: import_v43.z.string(),
|
||||
+ })
|
||||
+ })
|
||||
).nullish()
|
||||
}).nullish(),
|
||||
finish_reason: import_v43.z.string().nullish()
|
||||
diff --git a/dist/index.mjs b/dist/index.mjs
|
||||
index a809a7aa0e148bfd43e01dd7b018568b151c8ad5..565b605eeacd9830b2b0e817e58ad0c5700264de 100644
|
||||
--- a/dist/index.mjs
|
||||
+++ b/dist/index.mjs
|
||||
@@ -23,7 +23,7 @@ function getOpenAIMetadata(message) {
|
||||
var _a, _b;
|
||||
return (_b = (_a = message == null ? void 0 : message.providerOptions) == null ? void 0 : _a.openaiCompatible) != null ? _b : {};
|
||||
}
|
||||
-function convertToOpenAICompatibleChatMessages(prompt) {
|
||||
+function convertToOpenAICompatibleChatMessages({prompt, options}) {
|
||||
const messages = [];
|
||||
for (const { role, content, ...message } of prompt) {
|
||||
const metadata = getOpenAIMetadata({ ...message });
|
||||
@@ -73,6 +73,7 @@ function convertToOpenAICompatibleChatMessages(prompt) {
|
||||
}
|
||||
case "assistant": {
|
||||
let text = "";
|
||||
+ let reasoning_text = "";
|
||||
const toolCalls = [];
|
||||
for (const part of content) {
|
||||
const partMetadata = getOpenAIMetadata(part);
|
||||
@@ -81,6 +82,12 @@ function convertToOpenAICompatibleChatMessages(prompt) {
|
||||
text += part.text;
|
||||
break;
|
||||
}
|
||||
+ case "reasoning": {
|
||||
+ if (options.sendReasoning) {
|
||||
+ reasoning_text += part.text;
|
||||
+ }
|
||||
+ break;
|
||||
+ }
|
||||
case "tool-call": {
|
||||
toolCalls.push({
|
||||
id: part.toolCallId,
|
||||
@@ -98,6 +105,7 @@ function convertToOpenAICompatibleChatMessages(prompt) {
|
||||
messages.push({
|
||||
role: "assistant",
|
||||
content: text,
|
||||
+ reasoning_content: reasoning_text || undefined,
|
||||
tool_calls: toolCalls.length > 0 ? toolCalls : void 0,
|
||||
...metadata
|
||||
});
|
||||
@@ -182,7 +190,8 @@ var openaiCompatibleProviderOptions = z.object({
|
||||
/**
|
||||
* Controls the verbosity of the generated text. Defaults to `medium`.
|
||||
*/
|
||||
- textVerbosity: z.string().optional()
|
||||
+ textVerbosity: z.string().optional(),
|
||||
+ sendReasoning: z.boolean().optional()
|
||||
});
|
||||
|
||||
// src/openai-compatible-error.ts
|
||||
@@ -362,7 +371,7 @@ var OpenAICompatibleChatLanguageModel = class {
|
||||
reasoning_effort: compatibleOptions.reasoningEffort,
|
||||
verbosity: compatibleOptions.textVerbosity,
|
||||
// messages:
|
||||
- messages: convertToOpenAICompatibleChatMessages(prompt),
|
||||
+ messages: convertToOpenAICompatibleChatMessages({prompt, options: compatibleOptions}),
|
||||
// tools:
|
||||
tools: openaiTools,
|
||||
tool_choice: openaiToolChoice
|
||||
@@ -405,6 +414,17 @@ var OpenAICompatibleChatLanguageModel = class {
|
||||
text: reasoning
|
||||
});
|
||||
}
|
||||
+ if (choice.message.images) {
|
||||
+ for (const image of choice.message.images) {
|
||||
+ const match1 = image.image_url.url.match(/^data:([^;]+)/)
|
||||
+ const match2 = image.image_url.url.match(/^data:[^;]*;base64,(.+)$/);
|
||||
+ content.push({
|
||||
+ type: 'file',
|
||||
+ mediaType: match1 ? (match1[1] ?? 'image/jpeg') : 'image/jpeg',
|
||||
+ data: match2 ? match2[1] : image.image_url.url,
|
||||
+ });
|
||||
+ }
|
||||
+ }
|
||||
if (choice.message.tool_calls != null) {
|
||||
for (const toolCall of choice.message.tool_calls) {
|
||||
content.push({
|
||||
@@ -582,6 +602,17 @@ var OpenAICompatibleChatLanguageModel = class {
|
||||
delta: delta.content
|
||||
});
|
||||
}
|
||||
+ if (delta.images) {
|
||||
+ for (const image of delta.images) {
|
||||
+ const match1 = image.image_url.url.match(/^data:([^;]+)/)
|
||||
+ const match2 = image.image_url.url.match(/^data:[^;]*;base64,(.+)$/);
|
||||
+ controller.enqueue({
|
||||
+ type: 'file',
|
||||
+ mediaType: match1 ? (match1[1] ?? 'image/jpeg') : 'image/jpeg',
|
||||
+ data: match2 ? match2[1] : image.image_url.url,
|
||||
+ });
|
||||
+ }
|
||||
+ }
|
||||
if (delta.tool_calls != null) {
|
||||
for (const toolCallDelta of delta.tool_calls) {
|
||||
const index = toolCallDelta.index;
|
||||
@@ -749,6 +780,14 @@ var OpenAICompatibleChatResponseSchema = z3.object({
|
||||
arguments: z3.string()
|
||||
})
|
||||
})
|
||||
+ ).nullish(),
|
||||
+ images: z3.array(
|
||||
+ z3.object({
|
||||
+ type: z3.literal('image_url'),
|
||||
+ image_url: z3.object({
|
||||
+ url: z3.string(),
|
||||
+ })
|
||||
+ })
|
||||
).nullish()
|
||||
}),
|
||||
finish_reason: z3.string().nullish()
|
||||
@@ -779,6 +818,14 @@ var createOpenAICompatibleChatChunkSchema = (errorSchema) => z3.union([
|
||||
arguments: z3.string().nullish()
|
||||
})
|
||||
})
|
||||
+ ).nullish(),
|
||||
+ images: z3.array(
|
||||
+ z3.object({
|
||||
+ type: z3.literal('image_url'),
|
||||
+ image_url: z3.object({
|
||||
+ url: z3.string(),
|
||||
+ })
|
||||
+ })
|
||||
).nullish()
|
||||
}).nullish(),
|
||||
finish_reason: z3.string().nullish()
|
||||
@ -7,7 +7,7 @@ index 8dd9b498050dbecd8dd6b901acf1aa8ca38a49af..ed644349c9d38fe2a66b2fb44214f7c1
|
||||
type OllamaChatModelId = "athene-v2" | "athene-v2:72b" | "aya-expanse" | "aya-expanse:8b" | "aya-expanse:32b" | "codegemma" | "codegemma:2b" | "codegemma:7b" | "codellama" | "codellama:7b" | "codellama:13b" | "codellama:34b" | "codellama:70b" | "codellama:code" | "codellama:python" | "command-r" | "command-r:35b" | "command-r-plus" | "command-r-plus:104b" | "command-r7b" | "command-r7b:7b" | "deepseek-r1" | "deepseek-r1:1.5b" | "deepseek-r1:7b" | "deepseek-r1:8b" | "deepseek-r1:14b" | "deepseek-r1:32b" | "deepseek-r1:70b" | "deepseek-r1:671b" | "deepseek-coder-v2" | "deepseek-coder-v2:16b" | "deepseek-coder-v2:236b" | "deepseek-v3" | "deepseek-v3:671b" | "devstral" | "devstral:24b" | "dolphin3" | "dolphin3:8b" | "exaone3.5" | "exaone3.5:2.4b" | "exaone3.5:7.8b" | "exaone3.5:32b" | "falcon2" | "falcon2:11b" | "falcon3" | "falcon3:1b" | "falcon3:3b" | "falcon3:7b" | "falcon3:10b" | "firefunction-v2" | "firefunction-v2:70b" | "gemma" | "gemma:2b" | "gemma:7b" | "gemma2" | "gemma2:2b" | "gemma2:9b" | "gemma2:27b" | "gemma3" | "gemma3:1b" | "gemma3:4b" | "gemma3:12b" | "gemma3:27b" | "granite3-dense" | "granite3-dense:2b" | "granite3-dense:8b" | "granite3-guardian" | "granite3-guardian:2b" | "granite3-guardian:8b" | "granite3-moe" | "granite3-moe:1b" | "granite3-moe:3b" | "granite3.1-dense" | "granite3.1-dense:2b" | "granite3.1-dense:8b" | "granite3.1-moe" | "granite3.1-moe:1b" | "granite3.1-moe:3b" | "llama2" | "llama2:7b" | "llama2:13b" | "llama2:70b" | "llama3" | "llama3:8b" | "llama3:70b" | "llama3-chatqa" | "llama3-chatqa:8b" | "llama3-chatqa:70b" | "llama3-gradient" | "llama3-gradient:8b" | "llama3-gradient:70b" | "llama3.1" | "llama3.1:8b" | "llama3.1:70b" | "llama3.1:405b" | "llama3.2" | "llama3.2:1b" | "llama3.2:3b" | "llama3.2-vision" | "llama3.2-vision:11b" | "llama3.2-vision:90b" | "llama3.3" | "llama3.3:70b" | "llama4" | "llama4:16x17b" | "llama4:128x17b" | "llama-guard3" | "llama-guard3:1b" | "llama-guard3:8b" | "llava" | "llava:7b" | "llava:13b" | "llava:34b" | "llava-llama3" | "llava-llama3:8b" | "llava-phi3" | "llava-phi3:3.8b" | "marco-o1" | "marco-o1:7b" | "mistral" | "mistral:7b" | "mistral-large" | "mistral-large:123b" | "mistral-nemo" | "mistral-nemo:12b" | "mistral-small" | "mistral-small:22b" | "mixtral" | "mixtral:8x7b" | "mixtral:8x22b" | "moondream" | "moondream:1.8b" | "openhermes" | "openhermes:v2.5" | "nemotron" | "nemotron:70b" | "nemotron-mini" | "nemotron-mini:4b" | "olmo" | "olmo:7b" | "olmo:13b" | "opencoder" | "opencoder:1.5b" | "opencoder:8b" | "phi3" | "phi3:3.8b" | "phi3:14b" | "phi3.5" | "phi3.5:3.8b" | "phi4" | "phi4:14b" | "qwen" | "qwen:7b" | "qwen:14b" | "qwen:32b" | "qwen:72b" | "qwen:110b" | "qwen2" | "qwen2:0.5b" | "qwen2:1.5b" | "qwen2:7b" | "qwen2:72b" | "qwen2.5" | "qwen2.5:0.5b" | "qwen2.5:1.5b" | "qwen2.5:3b" | "qwen2.5:7b" | "qwen2.5:14b" | "qwen2.5:32b" | "qwen2.5:72b" | "qwen2.5-coder" | "qwen2.5-coder:0.5b" | "qwen2.5-coder:1.5b" | "qwen2.5-coder:3b" | "qwen2.5-coder:7b" | "qwen2.5-coder:14b" | "qwen2.5-coder:32b" | "qwen3" | "qwen3:0.6b" | "qwen3:1.7b" | "qwen3:4b" | "qwen3:8b" | "qwen3:14b" | "qwen3:30b" | "qwen3:32b" | "qwen3:235b" | "qwq" | "qwq:32b" | "sailor2" | "sailor2:1b" | "sailor2:8b" | "sailor2:20b" | "shieldgemma" | "shieldgemma:2b" | "shieldgemma:9b" | "shieldgemma:27b" | "smallthinker" | "smallthinker:3b" | "smollm" | "smollm:135m" | "smollm:360m" | "smollm:1.7b" | "tinyllama" | "tinyllama:1.1b" | "tulu3" | "tulu3:8b" | "tulu3:70b" | (string & {});
|
||||
declare const ollamaProviderOptions: z.ZodObject<{
|
||||
- think: z.ZodOptional<z.ZodBoolean>;
|
||||
+ think: z.ZodOptional<z.ZodUnion<[z.ZodBoolean, z.ZodEnum<['low', 'medium', 'high']>]>>;
|
||||
+ think: z.ZodOptional<z.ZodUnion<[z.ZodBoolean, z.ZodLiteral<"low">, z.ZodLiteral<"medium">, z.ZodLiteral<"high">]>>;
|
||||
options: z.ZodOptional<z.ZodObject<{
|
||||
num_ctx: z.ZodOptional<z.ZodNumber>;
|
||||
repeat_last_n: z.ZodOptional<z.ZodNumber>;
|
||||
@ -29,7 +29,7 @@ index 8dd9b498050dbecd8dd6b901acf1aa8ca38a49af..ed644349c9d38fe2a66b2fb44214f7c1
|
||||
|
||||
declare const ollamaCompletionProviderOptions: z.ZodObject<{
|
||||
- think: z.ZodOptional<z.ZodBoolean>;
|
||||
+ think: z.ZodOptional<z.ZodUnion<[z.ZodBoolean, z.ZodEnum<['low', 'medium', 'high']>]>>;
|
||||
+ think: z.ZodOptional<z.ZodUnion<[z.ZodBoolean, z.ZodLiteral<"low">, z.ZodLiteral<"medium">, z.ZodLiteral<"high">]>>;
|
||||
user: z.ZodOptional<z.ZodString>;
|
||||
suffix: z.ZodOptional<z.ZodString>;
|
||||
echo: z.ZodOptional<z.ZodBoolean>;
|
||||
@ -42,7 +42,7 @@ index 35b5142ce8476ce2549ed7c2ec48e7d8c46c90d9..2ef64dc9a4c2be043e6af608241a6a83
|
||||
// src/completion/ollama-completion-language-model.ts
|
||||
var ollamaCompletionProviderOptions = import_v42.z.object({
|
||||
- think: import_v42.z.boolean().optional(),
|
||||
+ think: import_v42.z.union([import_v42.z.boolean(), import_v42.z.enum(['low', 'medium', 'high'])]).optional(),
|
||||
+ think: import_v42.z.union([import_v42.z.boolean(), import_v42.z.literal('low'), import_v42.z.literal('medium'), import_v42.z.literal('high')]).optional(),
|
||||
user: import_v42.z.string().optional(),
|
||||
suffix: import_v42.z.string().optional(),
|
||||
echo: import_v42.z.boolean().optional()
|
||||
@ -64,7 +64,7 @@ index 35b5142ce8476ce2549ed7c2ec48e7d8c46c90d9..2ef64dc9a4c2be043e6af608241a6a83
|
||||
* Only supported by certain models like DeepSeek R1 and Qwen 3.
|
||||
*/
|
||||
- think: import_v44.z.boolean().optional(),
|
||||
+ think: import_v44.z.union([import_v44.z.boolean(), import_v44.z.enum(['low', 'medium', 'high'])]).optional(),
|
||||
+ think: import_v44.z.union([import_v44.z.boolean(), import_v44.z.literal('low'), import_v44.z.literal('medium'), import_v44.z.literal('high')]).optional(),
|
||||
options: import_v44.z.object({
|
||||
num_ctx: import_v44.z.number().optional(),
|
||||
repeat_last_n: import_v44.z.number().optional(),
|
||||
@ -97,7 +97,7 @@ index e2a634a78d80ac9542f2cc4f96cf2291094b10cf..67b23efce3c1cf4f026693d3ff924698
|
||||
// src/completion/ollama-completion-language-model.ts
|
||||
var ollamaCompletionProviderOptions = z2.object({
|
||||
- think: z2.boolean().optional(),
|
||||
+ think: z2.union([z2.boolean(), z2.enum(['low', 'medium', 'high'])]).optional(),
|
||||
+ think: z2.union([z2.boolean(), z2.literal('low'), z2.literal('medium'), z2.literal('high')]).optional(),
|
||||
user: z2.string().optional(),
|
||||
suffix: z2.string().optional(),
|
||||
echo: z2.boolean().optional()
|
||||
@ -119,7 +119,7 @@ index e2a634a78d80ac9542f2cc4f96cf2291094b10cf..67b23efce3c1cf4f026693d3ff924698
|
||||
* Only supported by certain models like DeepSeek R1 and Qwen 3.
|
||||
*/
|
||||
- think: z4.boolean().optional(),
|
||||
+ think: z4.union([z4.boolean(), z4.enum(['low', 'medium', 'high'])]).optional(),
|
||||
+ think: z4.union([z4.boolean(), z4.literal('low'), z4.literal('medium'), z4.literal('high')]).optional(),
|
||||
options: z4.object({
|
||||
num_ctx: z4.number().optional(),
|
||||
repeat_last_n: z4.number().optional(),
|
||||
25417
pnpm-lock.yaml
Normal file
25417
pnpm-lock.yaml
Normal file
File diff suppressed because it is too large
Load Diff
2
pnpm-workspace.yaml
Normal file
2
pnpm-workspace.yaml
Normal file
@ -0,0 +1,2 @@
|
||||
packages:
|
||||
- 'packages/*'
|
||||
@ -6,8 +6,8 @@ const { downloadWithPowerShell } = require('./download')
|
||||
|
||||
// Base URL for downloading OVMS binaries
|
||||
const OVMS_RELEASE_BASE_URL =
|
||||
'https://storage.openvinotoolkit.org/repositories/openvino_model_server/packages/2025.3.0/ovms_windows_python_on.zip'
|
||||
const OVMS_EX_URL = 'https://gitcode.com/gcw_ggDjjkY3/kjfile/releases/download/download/ovms_25.3_ex.zip'
|
||||
'https://storage.openvinotoolkit.org/repositories/openvino_model_server/packages/2025.4.1/ovms_windows_python_on.zip'
|
||||
const OVMS_EX_URL = 'https://gitcode.com/gcw_ggDjjkY3/kjfile/releases/download/download/ovms_25.4_ex.zip'
|
||||
|
||||
/**
|
||||
* error code:
|
||||
|
||||
@ -50,7 +50,7 @@ Usage Instructions:
|
||||
- pt-pt (Portuguese)
|
||||
|
||||
Run Command:
|
||||
yarn auto:i18n
|
||||
pnpm i18n:translate
|
||||
|
||||
Performance Optimization Recommendations:
|
||||
- For stable API services: MAX_CONCURRENT_TRANSLATIONS=8, TRANSLATION_DELAY_MS=50
|
||||
@ -152,7 +152,8 @@ const languageMap = {
|
||||
'es-es': 'Spanish',
|
||||
'fr-fr': 'French',
|
||||
'pt-pt': 'Portuguese',
|
||||
'de-de': 'German'
|
||||
'de-de': 'German',
|
||||
'ro-ro': 'Romanian'
|
||||
}
|
||||
|
||||
const PROMPT = `
|
||||
|
||||
@ -2,14 +2,14 @@ const { Arch } = require('electron-builder')
|
||||
const { downloadNpmPackage } = require('./utils')
|
||||
|
||||
// if you want to add new prebuild binaries packages with different architectures, you can add them here
|
||||
// please add to allX64 and allArm64 from yarn.lock
|
||||
// please add to allX64 and allArm64 from pnpm-lock.yaml
|
||||
const allArm64 = {
|
||||
'@img/sharp-darwin-arm64': '0.34.3',
|
||||
'@img/sharp-win32-arm64': '0.34.3',
|
||||
'@img/sharp-linux-arm64': '0.34.3',
|
||||
|
||||
'@img/sharp-libvips-darwin-arm64': '1.2.0',
|
||||
'@img/sharp-libvips-linux-arm64': '1.2.0',
|
||||
'@img/sharp-libvips-darwin-arm64': '1.2.4',
|
||||
'@img/sharp-libvips-linux-arm64': '1.2.4',
|
||||
|
||||
'@libsql/darwin-arm64': '0.4.7',
|
||||
'@libsql/linux-arm64-gnu': '0.4.7',
|
||||
@ -24,8 +24,8 @@ const allX64 = {
|
||||
'@img/sharp-linux-x64': '0.34.3',
|
||||
'@img/sharp-win32-x64': '0.34.3',
|
||||
|
||||
'@img/sharp-libvips-darwin-x64': '1.2.0',
|
||||
'@img/sharp-libvips-linux-x64': '1.2.0',
|
||||
'@img/sharp-libvips-darwin-x64': '1.2.4',
|
||||
'@img/sharp-libvips-linux-x64': '1.2.4',
|
||||
|
||||
'@libsql/darwin-x64': '0.4.7',
|
||||
'@libsql/linux-x64-gnu': '0.4.7',
|
||||
|
||||
@ -145,7 +145,7 @@ export function main() {
|
||||
console.log('i18n 检查已通过')
|
||||
} catch (e) {
|
||||
console.error(e)
|
||||
throw new Error(`检查未通过。尝试运行 yarn sync:i18n 以解决问题。`)
|
||||
throw new Error(`检查未通过。尝试运行 pnpm i18n:sync 以解决问题。`)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -57,7 +57,7 @@ function generateLanguagesFileContent(languages: Record<string, LanguageData>):
|
||||
*
|
||||
* ⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️
|
||||
* THIS FILE IS AUTOMATICALLY GENERATED BY A SCRIPT. DO NOT EDIT IT MANUALLY!
|
||||
* Run \`yarn update:languages\` to update this file.
|
||||
* Run \`pnpm update:languages\` to update this file.
|
||||
* ⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️⚠️
|
||||
*
|
||||
*/
|
||||
@ -81,7 +81,7 @@ export const languages: Record<string, LanguageData> = ${languagesObjectString};
|
||||
async function format(filePath: string): Promise<void> {
|
||||
console.log('🎨 Formatting file with Biome...')
|
||||
try {
|
||||
await execAsync(`yarn biome format --write ${filePath}`)
|
||||
await execAsync(`pnpm biome format --write ${filePath}`)
|
||||
console.log('✅ Biome formatting complete.')
|
||||
} catch (e: any) {
|
||||
console.error('❌ Biome formatting failed:', e.stdout || e.stderr)
|
||||
@ -96,7 +96,7 @@ async function format(filePath: string): Promise<void> {
|
||||
async function checkTypeScript(filePath: string): Promise<void> {
|
||||
console.log('🧐 Checking file with TypeScript compiler...')
|
||||
try {
|
||||
await execAsync(`yarn tsc --noEmit --skipLibCheck ${filePath}`)
|
||||
await execAsync(`pnpm tsc --noEmit --skipLibCheck ${filePath}`)
|
||||
console.log('✅ TypeScript check passed.')
|
||||
} catch (e: any) {
|
||||
console.error('❌ TypeScript check failed:', e.stdout || e.stderr)
|
||||
|
||||
@ -18,7 +18,7 @@ if (!['patch', 'minor', 'major'].includes(versionType)) {
|
||||
}
|
||||
|
||||
// 更新版本
|
||||
exec(`yarn version ${versionType} --immediate`)
|
||||
exec(`pnpm version ${versionType}`)
|
||||
|
||||
// 读取更新后的 package.json 获取新版本号
|
||||
const updatedPackageJson = JSON.parse(fs.readFileSync('package.json', 'utf8'))
|
||||
|
||||
@ -19,8 +19,10 @@ import { agentService } from './services/agents'
|
||||
import { apiServerService } from './services/ApiServerService'
|
||||
import { appMenuService } from './services/AppMenuService'
|
||||
import { configManager } from './services/ConfigManager'
|
||||
import { nodeTraceService } from './services/NodeTraceService'
|
||||
import { lanTransferClientService } from './services/lanTransfer'
|
||||
import mcpService from './services/MCPService'
|
||||
import { localTransferService } from './services/LocalTransferService'
|
||||
import { nodeTraceService } from './services/NodeTraceService'
|
||||
import powerMonitorService from './services/PowerMonitorService'
|
||||
import {
|
||||
CHERRY_STUDIO_PROTOCOL,
|
||||
@ -35,6 +37,7 @@ import { versionService } from './services/VersionService'
|
||||
import { windowService } from './services/WindowService'
|
||||
import { initWebviewHotkeys } from './services/WebviewService'
|
||||
import { runAsyncFunction } from './utils'
|
||||
import { isOvmsSupported } from './services/OvmsManager'
|
||||
|
||||
const logger = loggerService.withContext('MainEntry')
|
||||
|
||||
@ -155,7 +158,8 @@ if (!app.requestSingleInstanceLock()) {
|
||||
|
||||
registerShortcuts(mainWindow)
|
||||
|
||||
registerIpc(mainWindow, app)
|
||||
await registerIpc(mainWindow, app)
|
||||
localTransferService.startDiscovery({ resetList: true })
|
||||
|
||||
replaceDevtoolsFont(mainWindow)
|
||||
|
||||
@ -237,16 +241,29 @@ if (!app.requestSingleInstanceLock()) {
|
||||
if (selectionService) {
|
||||
selectionService.quit()
|
||||
}
|
||||
|
||||
lanTransferClientService.dispose()
|
||||
localTransferService.dispose()
|
||||
})
|
||||
|
||||
app.on('will-quit', async () => {
|
||||
// 简单的资源清理,不阻塞退出流程
|
||||
if (isOvmsSupported) {
|
||||
const { ovmsManager } = await import('./services/OvmsManager')
|
||||
if (ovmsManager) {
|
||||
await ovmsManager.stopOvms()
|
||||
} else {
|
||||
logger.warn('Unexpected behavior: undefined ovmsManager, but OVMS should be supported.')
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
await mcpService.cleanup()
|
||||
await apiServerService.stop()
|
||||
} catch (error) {
|
||||
logger.warn('Error cleaning up MCP service:', error as Error)
|
||||
}
|
||||
|
||||
// finish the logger
|
||||
logger.finish()
|
||||
})
|
||||
|
||||
156
src/main/ipc.ts
156
src/main/ipc.ts
@ -6,11 +6,19 @@ import { loggerService } from '@logger'
|
||||
import { isLinux, isMac, isPortable, isWin } from '@main/constant'
|
||||
import { generateSignature } from '@main/integration/cherryai'
|
||||
import anthropicService from '@main/services/AnthropicService'
|
||||
import { findGitBash, getBinaryPath, isBinaryExists, runInstallScript, validateGitBashPath } from '@main/utils/process'
|
||||
import {
|
||||
autoDiscoverGitBash,
|
||||
getBinaryPath,
|
||||
getGitBashPathInfo,
|
||||
isBinaryExists,
|
||||
runInstallScript,
|
||||
validateGitBashPath
|
||||
} from '@main/utils/process'
|
||||
import { handleZoomFactor } from '@main/utils/zoom'
|
||||
import type { SpanEntity, TokenUsage } from '@mcp-trace/trace-core'
|
||||
import type { UpgradeChannel } from '@shared/config/constant'
|
||||
import { MIN_WINDOW_HEIGHT, MIN_WINDOW_WIDTH } from '@shared/config/constant'
|
||||
import type { LocalTransferConnectPayload } from '@shared/config/types'
|
||||
import { IpcChannel } from '@shared/IpcChannel'
|
||||
import type { PluginError } from '@types'
|
||||
import type {
|
||||
@ -42,6 +50,8 @@ import { ExportService } from './services/ExportService'
|
||||
import { fileStorage as fileManager } from './services/FileStorage'
|
||||
import FileService from './services/FileSystemService'
|
||||
import KnowledgeService from './services/KnowledgeService'
|
||||
import { lanTransferClientService } from './services/lanTransfer'
|
||||
import { localTransferService } from './services/LocalTransferService'
|
||||
import mcpService from './services/MCPService'
|
||||
import MemoryService from './services/memory/MemoryService'
|
||||
import { openTraceWindow, setTraceWindowTitle } from './services/NodeTraceService'
|
||||
@ -49,7 +59,7 @@ import NotificationService from './services/NotificationService'
|
||||
import * as NutstoreService from './services/NutstoreService'
|
||||
import ObsidianVaultService from './services/ObsidianVaultService'
|
||||
import { ocrService } from './services/ocr/OcrService'
|
||||
import OvmsManager from './services/OvmsManager'
|
||||
import { isOvmsSupported } from './services/OvmsManager'
|
||||
import powerMonitorService from './services/PowerMonitorService'
|
||||
import { proxyManager } from './services/ProxyManager'
|
||||
import { pythonService } from './services/PythonService'
|
||||
@ -73,7 +83,6 @@ import {
|
||||
import storeSyncService from './services/StoreSyncService'
|
||||
import { themeService } from './services/ThemeService'
|
||||
import VertexAIService from './services/VertexAIService'
|
||||
import WebSocketService from './services/WebSocketService'
|
||||
import { setOpenLinkExternal } from './services/WebviewService'
|
||||
import { windowService } from './services/WindowService'
|
||||
import { calculateDirectorySize, getResourcePath } from './utils'
|
||||
@ -88,6 +97,7 @@ import {
|
||||
untildify
|
||||
} from './utils/file'
|
||||
import { updateAppDataConfig } from './utils/init'
|
||||
import { getCpuName, getDeviceType, getHostname } from './utils/system'
|
||||
import { compress, decompress } from './utils/zip'
|
||||
|
||||
const logger = loggerService.withContext('IPC')
|
||||
@ -98,7 +108,6 @@ const obsidianVaultService = new ObsidianVaultService()
|
||||
const vertexAIService = VertexAIService.getInstance()
|
||||
const memoryService = MemoryService.getInstance()
|
||||
const dxtService = new DxtService()
|
||||
const ovmsManager = new OvmsManager()
|
||||
const pluginService = PluginService.getInstance()
|
||||
|
||||
function normalizeError(error: unknown): Error {
|
||||
@ -112,7 +121,7 @@ function extractPluginError(error: unknown): PluginError | null {
|
||||
return null
|
||||
}
|
||||
|
||||
export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
export async function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
const appUpdater = new AppUpdater()
|
||||
const notificationService = new NotificationService()
|
||||
|
||||
@ -490,18 +499,17 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
ipcMain.handle(IpcChannel.Zip_Decompress, (_, text: Buffer) => decompress(text))
|
||||
|
||||
// system
|
||||
ipcMain.handle(IpcChannel.System_GetDeviceType, () => (isMac ? 'mac' : isWin ? 'windows' : 'linux'))
|
||||
ipcMain.handle(IpcChannel.System_GetHostname, () => require('os').hostname())
|
||||
ipcMain.handle(IpcChannel.System_GetCpuName, () => require('os').cpus()[0].model)
|
||||
ipcMain.handle(IpcChannel.System_GetDeviceType, getDeviceType)
|
||||
ipcMain.handle(IpcChannel.System_GetHostname, getHostname)
|
||||
ipcMain.handle(IpcChannel.System_GetCpuName, getCpuName)
|
||||
ipcMain.handle(IpcChannel.System_CheckGitBash, () => {
|
||||
if (!isWin) {
|
||||
return true // Non-Windows systems don't need Git Bash
|
||||
}
|
||||
|
||||
try {
|
||||
const customPath = configManager.get(ConfigKeys.GitBashPath) as string | undefined
|
||||
const bashPath = findGitBash(customPath)
|
||||
|
||||
// Use autoDiscoverGitBash to handle auto-discovery and persistence
|
||||
const bashPath = autoDiscoverGitBash()
|
||||
if (bashPath) {
|
||||
logger.info('Git Bash is available', { path: bashPath })
|
||||
return true
|
||||
@ -524,13 +532,22 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
return customPath ?? null
|
||||
})
|
||||
|
||||
// Returns { path, source } where source is 'manual' | 'auto' | null
|
||||
ipcMain.handle(IpcChannel.System_GetGitBashPathInfo, () => {
|
||||
return getGitBashPathInfo()
|
||||
})
|
||||
|
||||
ipcMain.handle(IpcChannel.System_SetGitBashPath, (_, newPath: string | null) => {
|
||||
if (!isWin) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (!newPath) {
|
||||
// Clear manual setting and re-run auto-discovery
|
||||
configManager.set(ConfigKeys.GitBashPath, null)
|
||||
configManager.set(ConfigKeys.GitBashPathSource, null)
|
||||
// Re-run auto-discovery to restore auto-discovered path if available
|
||||
autoDiscoverGitBash()
|
||||
return true
|
||||
}
|
||||
|
||||
@ -539,7 +556,9 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Set path with 'manual' source
|
||||
configManager.set(ConfigKeys.GitBashPath, validated)
|
||||
configManager.set(ConfigKeys.GitBashPathSource, 'manual')
|
||||
return true
|
||||
})
|
||||
|
||||
@ -566,6 +585,8 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
ipcMain.handle(IpcChannel.Backup_ListS3Files, backupManager.listS3Files.bind(backupManager))
|
||||
ipcMain.handle(IpcChannel.Backup_DeleteS3File, backupManager.deleteS3File.bind(backupManager))
|
||||
ipcMain.handle(IpcChannel.Backup_CheckS3Connection, backupManager.checkS3Connection.bind(backupManager))
|
||||
ipcMain.handle(IpcChannel.Backup_CreateLanTransferBackup, backupManager.createLanTransferBackup.bind(backupManager))
|
||||
ipcMain.handle(IpcChannel.Backup_DeleteTempBackup, backupManager.deleteTempBackup.bind(backupManager))
|
||||
|
||||
// file
|
||||
ipcMain.handle(IpcChannel.File_Open, fileManager.open.bind(fileManager))
|
||||
@ -665,36 +686,19 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
ipcMain.handle(IpcChannel.KnowledgeBase_Check_Quota, KnowledgeService.checkQuota.bind(KnowledgeService))
|
||||
|
||||
// memory
|
||||
ipcMain.handle(IpcChannel.Memory_Add, async (_, messages, config) => {
|
||||
return await memoryService.add(messages, config)
|
||||
})
|
||||
ipcMain.handle(IpcChannel.Memory_Search, async (_, query, config) => {
|
||||
return await memoryService.search(query, config)
|
||||
})
|
||||
ipcMain.handle(IpcChannel.Memory_List, async (_, config) => {
|
||||
return await memoryService.list(config)
|
||||
})
|
||||
ipcMain.handle(IpcChannel.Memory_Delete, async (_, id) => {
|
||||
return await memoryService.delete(id)
|
||||
})
|
||||
ipcMain.handle(IpcChannel.Memory_Update, async (_, id, memory, metadata) => {
|
||||
return await memoryService.update(id, memory, metadata)
|
||||
})
|
||||
ipcMain.handle(IpcChannel.Memory_Get, async (_, memoryId) => {
|
||||
return await memoryService.get(memoryId)
|
||||
})
|
||||
ipcMain.handle(IpcChannel.Memory_SetConfig, async (_, config) => {
|
||||
memoryService.setConfig(config)
|
||||
})
|
||||
ipcMain.handle(IpcChannel.Memory_DeleteUser, async (_, userId) => {
|
||||
return await memoryService.deleteUser(userId)
|
||||
})
|
||||
ipcMain.handle(IpcChannel.Memory_DeleteAllMemoriesForUser, async (_, userId) => {
|
||||
return await memoryService.deleteAllMemoriesForUser(userId)
|
||||
})
|
||||
ipcMain.handle(IpcChannel.Memory_GetUsersList, async () => {
|
||||
return await memoryService.getUsersList()
|
||||
})
|
||||
ipcMain.handle(IpcChannel.Memory_Add, (_, messages, config) => memoryService.add(messages, config))
|
||||
ipcMain.handle(IpcChannel.Memory_Search, (_, query, config) => memoryService.search(query, config))
|
||||
ipcMain.handle(IpcChannel.Memory_List, (_, config) => memoryService.list(config))
|
||||
ipcMain.handle(IpcChannel.Memory_Delete, (_, id) => memoryService.delete(id))
|
||||
ipcMain.handle(IpcChannel.Memory_Update, (_, id, memory, metadata) => memoryService.update(id, memory, metadata))
|
||||
ipcMain.handle(IpcChannel.Memory_Get, (_, memoryId) => memoryService.get(memoryId))
|
||||
ipcMain.handle(IpcChannel.Memory_SetConfig, (_, config) => memoryService.setConfig(config))
|
||||
ipcMain.handle(IpcChannel.Memory_DeleteUser, (_, userId) => memoryService.deleteUser(userId))
|
||||
ipcMain.handle(IpcChannel.Memory_DeleteAllMemoriesForUser, (_, userId) =>
|
||||
memoryService.deleteAllMemoriesForUser(userId)
|
||||
)
|
||||
ipcMain.handle(IpcChannel.Memory_GetUsersList, () => memoryService.getUsersList())
|
||||
ipcMain.handle(IpcChannel.Memory_MigrateMemoryDb, () => memoryService.migrateMemoryDb())
|
||||
|
||||
// window
|
||||
ipcMain.handle(IpcChannel.Windows_SetMinimumSize, (_, width: number, height: number) => {
|
||||
@ -854,8 +858,8 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
)
|
||||
|
||||
// search window
|
||||
ipcMain.handle(IpcChannel.SearchWindow_Open, async (_, uid: string) => {
|
||||
await searchService.openSearchWindow(uid)
|
||||
ipcMain.handle(IpcChannel.SearchWindow_Open, async (_, uid: string, show?: boolean) => {
|
||||
await searchService.openSearchWindow(uid, show)
|
||||
})
|
||||
ipcMain.handle(IpcChannel.SearchWindow_Close, async (_, uid: string) => {
|
||||
await searchService.closeSearchWindow(uid)
|
||||
@ -971,15 +975,36 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
ipcMain.handle(IpcChannel.OCR_ListProviders, () => ocrService.listProviderIds())
|
||||
|
||||
// OVMS
|
||||
ipcMain.handle(IpcChannel.Ovms_AddModel, (_, modelName: string, modelId: string, modelSource: string, task: string) =>
|
||||
ovmsManager.addModel(modelName, modelId, modelSource, task)
|
||||
)
|
||||
ipcMain.handle(IpcChannel.Ovms_StopAddModel, () => ovmsManager.stopAddModel())
|
||||
ipcMain.handle(IpcChannel.Ovms_GetModels, () => ovmsManager.getModels())
|
||||
ipcMain.handle(IpcChannel.Ovms_IsRunning, () => ovmsManager.initializeOvms())
|
||||
ipcMain.handle(IpcChannel.Ovms_GetStatus, () => ovmsManager.getOvmsStatus())
|
||||
ipcMain.handle(IpcChannel.Ovms_RunOVMS, () => ovmsManager.runOvms())
|
||||
ipcMain.handle(IpcChannel.Ovms_StopOVMS, () => ovmsManager.stopOvms())
|
||||
ipcMain.handle(IpcChannel.Ovms_IsSupported, () => isOvmsSupported)
|
||||
if (isOvmsSupported) {
|
||||
const { ovmsManager } = await import('./services/OvmsManager')
|
||||
if (ovmsManager) {
|
||||
ipcMain.handle(
|
||||
IpcChannel.Ovms_AddModel,
|
||||
(_, modelName: string, modelId: string, modelSource: string, task: string) =>
|
||||
ovmsManager.addModel(modelName, modelId, modelSource, task)
|
||||
)
|
||||
ipcMain.handle(IpcChannel.Ovms_StopAddModel, () => ovmsManager.stopAddModel())
|
||||
ipcMain.handle(IpcChannel.Ovms_GetModels, () => ovmsManager.getModels())
|
||||
ipcMain.handle(IpcChannel.Ovms_IsRunning, () => ovmsManager.initializeOvms())
|
||||
ipcMain.handle(IpcChannel.Ovms_GetStatus, () => ovmsManager.getOvmsStatus())
|
||||
ipcMain.handle(IpcChannel.Ovms_RunOVMS, () => ovmsManager.runOvms())
|
||||
ipcMain.handle(IpcChannel.Ovms_StopOVMS, () => ovmsManager.stopOvms())
|
||||
} else {
|
||||
logger.error('Unexpected behavior: undefined ovmsManager, but OVMS should be supported.')
|
||||
}
|
||||
} else {
|
||||
const fallback = () => {
|
||||
throw new Error('OVMS is only supported on Windows with intel CPU.')
|
||||
}
|
||||
ipcMain.handle(IpcChannel.Ovms_AddModel, fallback)
|
||||
ipcMain.handle(IpcChannel.Ovms_StopAddModel, fallback)
|
||||
ipcMain.handle(IpcChannel.Ovms_GetModels, fallback)
|
||||
ipcMain.handle(IpcChannel.Ovms_IsRunning, fallback)
|
||||
ipcMain.handle(IpcChannel.Ovms_GetStatus, fallback)
|
||||
ipcMain.handle(IpcChannel.Ovms_RunOVMS, fallback)
|
||||
ipcMain.handle(IpcChannel.Ovms_StopOVMS, fallback)
|
||||
}
|
||||
|
||||
// CherryAI
|
||||
ipcMain.handle(IpcChannel.Cherryai_GetSignature, (_, params) => generateSignature(params))
|
||||
@ -1036,12 +1061,18 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
} catch (error) {
|
||||
const pluginError = extractPluginError(error)
|
||||
if (pluginError) {
|
||||
logger.error('Failed to list installed plugins', { agentId, error: pluginError })
|
||||
logger.error('Failed to list installed plugins', {
|
||||
agentId,
|
||||
error: pluginError
|
||||
})
|
||||
return { success: false, error: pluginError }
|
||||
}
|
||||
|
||||
const err = normalizeError(error)
|
||||
logger.error('Failed to list installed plugins', { agentId, error: err })
|
||||
logger.error('Failed to list installed plugins', {
|
||||
agentId,
|
||||
error: err
|
||||
})
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
@ -1097,12 +1128,17 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
}
|
||||
})
|
||||
|
||||
// WebSocket
|
||||
ipcMain.handle(IpcChannel.WebSocket_Start, WebSocketService.start)
|
||||
ipcMain.handle(IpcChannel.WebSocket_Stop, WebSocketService.stop)
|
||||
ipcMain.handle(IpcChannel.WebSocket_Status, WebSocketService.getStatus)
|
||||
ipcMain.handle(IpcChannel.WebSocket_SendFile, WebSocketService.sendFile)
|
||||
ipcMain.handle(IpcChannel.WebSocket_GetAllCandidates, WebSocketService.getAllCandidates)
|
||||
ipcMain.handle(IpcChannel.LocalTransfer_ListServices, () => localTransferService.getState())
|
||||
ipcMain.handle(IpcChannel.LocalTransfer_StartScan, () => localTransferService.startDiscovery({ resetList: true }))
|
||||
ipcMain.handle(IpcChannel.LocalTransfer_StopScan, () => localTransferService.stopDiscovery())
|
||||
ipcMain.handle(IpcChannel.LocalTransfer_Connect, (_, payload: LocalTransferConnectPayload) =>
|
||||
lanTransferClientService.connectAndHandshake(payload)
|
||||
)
|
||||
ipcMain.handle(IpcChannel.LocalTransfer_Disconnect, () => lanTransferClientService.disconnect())
|
||||
ipcMain.handle(IpcChannel.LocalTransfer_SendFile, (_, payload: { filePath: string }) =>
|
||||
lanTransferClientService.sendFile(payload.filePath)
|
||||
)
|
||||
ipcMain.handle(IpcChannel.LocalTransfer_CancelTransfer, () => lanTransferClientService.cancelTransfer())
|
||||
|
||||
ipcMain.handle(IpcChannel.APP_CrashRenderProcess, () => {
|
||||
mainWindow.webContents.forcefullyCrashRenderer()
|
||||
|
||||
@ -1,5 +1,14 @@
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('node:fs', () => ({
|
||||
default: {
|
||||
existsSync: vi.fn(() => false),
|
||||
mkdirSync: vi.fn()
|
||||
},
|
||||
existsSync: vi.fn(() => false),
|
||||
mkdirSync: vi.fn()
|
||||
}))
|
||||
|
||||
vi.mock('electron', () => {
|
||||
const sendCommand = vi.fn(async (command: string, params?: { expression?: string }) => {
|
||||
if (command === 'Runtime.evaluate') {
|
||||
@ -21,24 +30,31 @@ vi.mock('electron', () => {
|
||||
sendCommand
|
||||
}
|
||||
|
||||
const webContents = {
|
||||
const createWebContents = () => ({
|
||||
debugger: debuggerObj,
|
||||
setUserAgent: vi.fn(),
|
||||
getURL: vi.fn(() => 'https://example.com/'),
|
||||
getTitle: vi.fn(async () => 'Example Title'),
|
||||
loadURL: vi.fn(async () => {}),
|
||||
once: vi.fn(),
|
||||
removeListener: vi.fn(),
|
||||
on: vi.fn()
|
||||
}
|
||||
|
||||
const loadURL = vi.fn(async () => {})
|
||||
on: vi.fn(),
|
||||
isDestroyed: vi.fn(() => false),
|
||||
canGoBack: vi.fn(() => false),
|
||||
canGoForward: vi.fn(() => false),
|
||||
goBack: vi.fn(),
|
||||
goForward: vi.fn(),
|
||||
reload: vi.fn(),
|
||||
executeJavaScript: vi.fn(async () => null),
|
||||
setWindowOpenHandler: vi.fn()
|
||||
})
|
||||
|
||||
const windows: any[] = []
|
||||
const views: any[] = []
|
||||
|
||||
class MockBrowserWindow {
|
||||
private destroyed = false
|
||||
public webContents = webContents
|
||||
public loadURL = loadURL
|
||||
public webContents = createWebContents()
|
||||
public isDestroyed = vi.fn(() => this.destroyed)
|
||||
public close = vi.fn(() => {
|
||||
this.destroyed = true
|
||||
@ -47,31 +63,58 @@ vi.mock('electron', () => {
|
||||
this.destroyed = true
|
||||
})
|
||||
public on = vi.fn()
|
||||
public setBrowserView = vi.fn()
|
||||
public addBrowserView = vi.fn()
|
||||
public removeBrowserView = vi.fn()
|
||||
public getContentSize = vi.fn(() => [1200, 800])
|
||||
public show = vi.fn()
|
||||
|
||||
constructor() {
|
||||
windows.push(this)
|
||||
}
|
||||
}
|
||||
|
||||
class MockBrowserView {
|
||||
public webContents = createWebContents()
|
||||
public setBounds = vi.fn()
|
||||
public setAutoResize = vi.fn()
|
||||
public destroy = vi.fn()
|
||||
|
||||
constructor() {
|
||||
views.push(this)
|
||||
}
|
||||
}
|
||||
|
||||
const app = {
|
||||
isReady: vi.fn(() => true),
|
||||
whenReady: vi.fn(async () => {}),
|
||||
on: vi.fn()
|
||||
on: vi.fn(),
|
||||
getPath: vi.fn((key: string) => {
|
||||
if (key === 'userData') return '/mock/userData'
|
||||
if (key === 'temp') return '/tmp'
|
||||
return '/mock/unknown'
|
||||
}),
|
||||
getAppPath: vi.fn(() => '/mock/app'),
|
||||
setPath: vi.fn()
|
||||
}
|
||||
|
||||
const nativeTheme = {
|
||||
on: vi.fn(),
|
||||
shouldUseDarkColors: false
|
||||
}
|
||||
|
||||
return {
|
||||
BrowserWindow: MockBrowserWindow as any,
|
||||
BrowserView: MockBrowserView as any,
|
||||
app,
|
||||
nativeTheme,
|
||||
__mockDebugger: debuggerObj,
|
||||
__mockSendCommand: sendCommand,
|
||||
__mockLoadURL: loadURL,
|
||||
__mockWindows: windows
|
||||
__mockWindows: windows,
|
||||
__mockViews: views
|
||||
}
|
||||
})
|
||||
|
||||
import * as electron from 'electron'
|
||||
const { __mockWindows } = electron as typeof electron & { __mockWindows: any[] }
|
||||
|
||||
import { CdpBrowserController } from '../browser'
|
||||
|
||||
describe('CdpBrowserController', () => {
|
||||
@ -81,54 +124,249 @@ describe('CdpBrowserController', () => {
|
||||
expect(result).toBe('ok')
|
||||
})
|
||||
|
||||
it('opens a URL (hidden) and returns current page info', async () => {
|
||||
it('opens a URL in normal mode and returns current page info', async () => {
|
||||
const controller = new CdpBrowserController()
|
||||
const result = await controller.open('https://foo.bar/', 5000, false)
|
||||
expect(result.currentUrl).toBe('https://example.com/')
|
||||
expect(result.title).toBe('Example Title')
|
||||
})
|
||||
|
||||
it('opens a URL (visible) when show=true', async () => {
|
||||
it('opens a URL in private mode', async () => {
|
||||
const controller = new CdpBrowserController()
|
||||
const result = await controller.open('https://foo.bar/', 5000, true, 'session-a')
|
||||
const result = await controller.open('https://foo.bar/', 5000, true)
|
||||
expect(result.currentUrl).toBe('https://example.com/')
|
||||
expect(result.title).toBe('Example Title')
|
||||
})
|
||||
|
||||
it('reuses session for execute and supports multiline', async () => {
|
||||
const controller = new CdpBrowserController()
|
||||
await controller.open('https://foo.bar/', 5000, false, 'session-b')
|
||||
const result = await controller.execute('const a=1; const b=2; a+b;', 5000, 'session-b')
|
||||
await controller.open('https://foo.bar/', 5000, false)
|
||||
const result = await controller.execute('const a=1; const b=2; a+b;', 5000, false)
|
||||
expect(result).toBe('ok')
|
||||
})
|
||||
|
||||
it('evicts least recently used session when exceeding maxSessions', async () => {
|
||||
const controller = new CdpBrowserController({ maxSessions: 2, idleTimeoutMs: 1000 * 60 })
|
||||
await controller.open('https://foo.bar/', 5000, false, 's1')
|
||||
await controller.open('https://foo.bar/', 5000, false, 's2')
|
||||
await controller.open('https://foo.bar/', 5000, false, 's3')
|
||||
const destroyedCount = __mockWindows.filter(
|
||||
(w: any) => w.destroy.mock.calls.length > 0 || w.close.mock.calls.length > 0
|
||||
).length
|
||||
expect(destroyedCount).toBeGreaterThanOrEqual(1)
|
||||
it('normal and private modes are isolated', async () => {
|
||||
const controller = new CdpBrowserController()
|
||||
await controller.open('https://foo.bar/', 5000, false)
|
||||
await controller.open('https://foo.bar/', 5000, true)
|
||||
const normalResult = await controller.execute('1+1', 5000, false)
|
||||
const privateResult = await controller.execute('1+1', 5000, true)
|
||||
expect(normalResult).toBe('ok')
|
||||
expect(privateResult).toBe('ok')
|
||||
})
|
||||
|
||||
it('fetches URL and returns html format', async () => {
|
||||
it('fetches URL and returns html format with tabId', async () => {
|
||||
const controller = new CdpBrowserController()
|
||||
const result = await controller.fetch('https://example.com/', 'html')
|
||||
expect(result).toBe('<html><body><h1>Test</h1><p>Content</p></body></html>')
|
||||
expect(result.tabId).toBeDefined()
|
||||
expect(result.content).toBe('<html><body><h1>Test</h1><p>Content</p></body></html>')
|
||||
})
|
||||
|
||||
it('fetches URL and returns txt format', async () => {
|
||||
it('fetches URL and returns txt format with tabId', async () => {
|
||||
const controller = new CdpBrowserController()
|
||||
const result = await controller.fetch('https://example.com/', 'txt')
|
||||
expect(result).toBe('Test\nContent')
|
||||
expect(result.tabId).toBeDefined()
|
||||
expect(result.content).toBe('Test\nContent')
|
||||
})
|
||||
|
||||
it('fetches URL and returns markdown format (default)', async () => {
|
||||
it('fetches URL and returns markdown format (default) with tabId', async () => {
|
||||
const controller = new CdpBrowserController()
|
||||
const result = await controller.fetch('https://example.com/')
|
||||
expect(typeof result).toBe('string')
|
||||
expect(result).toContain('Test')
|
||||
expect(result.tabId).toBeDefined()
|
||||
expect(typeof result.content).toBe('string')
|
||||
expect(result.content).toContain('Test')
|
||||
})
|
||||
|
||||
it('fetches URL in private mode with tabId', async () => {
|
||||
const controller = new CdpBrowserController()
|
||||
const result = await controller.fetch('https://example.com/', 'html', 10000, true)
|
||||
expect(result.tabId).toBeDefined()
|
||||
expect(result.content).toBe('<html><body><h1>Test</h1><p>Content</p></body></html>')
|
||||
})
|
||||
|
||||
describe('Multi-tab support', () => {
|
||||
it('creates new tab with newTab parameter', async () => {
|
||||
const controller = new CdpBrowserController()
|
||||
const result1 = await controller.open('https://site1.com/', 5000, false, true)
|
||||
const result2 = await controller.open('https://site2.com/', 5000, false, true)
|
||||
|
||||
expect(result1.tabId).toBeDefined()
|
||||
expect(result2.tabId).toBeDefined()
|
||||
expect(result1.tabId).not.toBe(result2.tabId)
|
||||
})
|
||||
|
||||
it('reuses same tab without newTab parameter', async () => {
|
||||
const controller = new CdpBrowserController()
|
||||
const result1 = await controller.open('https://site1.com/', 5000, false)
|
||||
const result2 = await controller.open('https://site2.com/', 5000, false)
|
||||
|
||||
expect(result1.tabId).toBe(result2.tabId)
|
||||
})
|
||||
|
||||
it('fetches in new tab with newTab parameter', async () => {
|
||||
const controller = new CdpBrowserController()
|
||||
await controller.open('https://example.com/', 5000, false)
|
||||
const tabs = await controller.listTabs(false)
|
||||
const initialTabCount = tabs.length
|
||||
|
||||
await controller.fetch('https://other.com/', 'html', 10000, false, true)
|
||||
const tabsAfter = await controller.listTabs(false)
|
||||
|
||||
expect(tabsAfter.length).toBe(initialTabCount + 1)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Tab management', () => {
|
||||
it('lists tabs in a window', async () => {
|
||||
const controller = new CdpBrowserController()
|
||||
await controller.open('https://example.com/', 5000, false)
|
||||
|
||||
const tabs = await controller.listTabs(false)
|
||||
expect(tabs.length).toBeGreaterThan(0)
|
||||
expect(tabs[0].tabId).toBeDefined()
|
||||
})
|
||||
|
||||
it('lists tabs separately for normal and private modes', async () => {
|
||||
const controller = new CdpBrowserController()
|
||||
await controller.open('https://example.com/', 5000, false)
|
||||
await controller.open('https://example.com/', 5000, true)
|
||||
|
||||
const normalTabs = await controller.listTabs(false)
|
||||
const privateTabs = await controller.listTabs(true)
|
||||
|
||||
expect(normalTabs.length).toBe(1)
|
||||
expect(privateTabs.length).toBe(1)
|
||||
expect(normalTabs[0].tabId).not.toBe(privateTabs[0].tabId)
|
||||
})
|
||||
|
||||
it('closes specific tab', async () => {
|
||||
const controller = new CdpBrowserController()
|
||||
const result1 = await controller.open('https://site1.com/', 5000, false, true)
|
||||
await controller.open('https://site2.com/', 5000, false, true)
|
||||
|
||||
const tabsBefore = await controller.listTabs(false)
|
||||
expect(tabsBefore.length).toBe(2)
|
||||
|
||||
await controller.closeTab(false, result1.tabId)
|
||||
|
||||
const tabsAfter = await controller.listTabs(false)
|
||||
expect(tabsAfter.length).toBe(1)
|
||||
expect(tabsAfter.find((t) => t.tabId === result1.tabId)).toBeUndefined()
|
||||
})
|
||||
|
||||
it('switches active tab', async () => {
|
||||
const controller = new CdpBrowserController()
|
||||
const result1 = await controller.open('https://site1.com/', 5000, false, true)
|
||||
const result2 = await controller.open('https://site2.com/', 5000, false, true)
|
||||
|
||||
await controller.switchTab(false, result1.tabId)
|
||||
await controller.switchTab(false, result2.tabId)
|
||||
})
|
||||
|
||||
it('throws error when switching to non-existent tab', async () => {
|
||||
const controller = new CdpBrowserController()
|
||||
await controller.open('https://example.com/', 5000, false)
|
||||
|
||||
await expect(controller.switchTab(false, 'non-existent-tab')).rejects.toThrow('Tab non-existent-tab not found')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Reset behavior', () => {
|
||||
it('resets specific tab only', async () => {
|
||||
const controller = new CdpBrowserController()
|
||||
const result1 = await controller.open('https://site1.com/', 5000, false, true)
|
||||
await controller.open('https://site2.com/', 5000, false, true)
|
||||
|
||||
await controller.reset(false, result1.tabId)
|
||||
|
||||
const tabs = await controller.listTabs(false)
|
||||
expect(tabs.length).toBe(1)
|
||||
})
|
||||
|
||||
it('resets specific window only', async () => {
|
||||
const controller = new CdpBrowserController()
|
||||
await controller.open('https://example.com/', 5000, false)
|
||||
await controller.open('https://example.com/', 5000, true)
|
||||
|
||||
await controller.reset(false)
|
||||
|
||||
const normalTabs = await controller.listTabs(false)
|
||||
const privateTabs = await controller.listTabs(true)
|
||||
|
||||
expect(normalTabs.length).toBe(0)
|
||||
expect(privateTabs.length).toBe(1)
|
||||
})
|
||||
|
||||
it('resets all windows', async () => {
|
||||
const controller = new CdpBrowserController()
|
||||
await controller.open('https://example.com/', 5000, false)
|
||||
await controller.open('https://example.com/', 5000, true)
|
||||
|
||||
await controller.reset()
|
||||
|
||||
const normalTabs = await controller.listTabs(false)
|
||||
const privateTabs = await controller.listTabs(true)
|
||||
|
||||
expect(normalTabs.length).toBe(0)
|
||||
expect(privateTabs.length).toBe(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('showWindow parameter', () => {
|
||||
it('passes showWindow parameter through open', async () => {
|
||||
const controller = new CdpBrowserController()
|
||||
const result = await controller.open('https://example.com/', 5000, false, false, true)
|
||||
expect(result.currentUrl).toBe('https://example.com/')
|
||||
expect(result.tabId).toBeDefined()
|
||||
})
|
||||
|
||||
it('passes showWindow parameter through fetch', async () => {
|
||||
const controller = new CdpBrowserController()
|
||||
const result = await controller.fetch('https://example.com/', 'html', 10000, false, false, true)
|
||||
expect(result.tabId).toBeDefined()
|
||||
expect(result.content).toBe('<html><body><h1>Test</h1><p>Content</p></body></html>')
|
||||
})
|
||||
|
||||
it('passes showWindow parameter through createTab', async () => {
|
||||
const controller = new CdpBrowserController()
|
||||
const { tabId, view } = await controller.createTab(false, true)
|
||||
expect(tabId).toBeDefined()
|
||||
expect(view).toBeDefined()
|
||||
})
|
||||
|
||||
it('shows existing window when showWindow=true on subsequent calls', async () => {
|
||||
const controller = new CdpBrowserController()
|
||||
// First call creates window
|
||||
await controller.open('https://example.com/', 5000, false, false, false)
|
||||
// Second call with showWindow=true should show existing window
|
||||
const result = await controller.open('https://example.com/', 5000, false, false, true)
|
||||
expect(result.currentUrl).toBe('https://example.com/')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Window limits and eviction', () => {
|
||||
it('respects maxWindows limit', async () => {
|
||||
const controller = new CdpBrowserController({ maxWindows: 1 })
|
||||
await controller.open('https://example.com/', 5000, false)
|
||||
await controller.open('https://example.com/', 5000, true)
|
||||
|
||||
const normalTabs = await controller.listTabs(false)
|
||||
const privateTabs = await controller.listTabs(true)
|
||||
|
||||
expect(privateTabs.length).toBe(1)
|
||||
expect(normalTabs.length).toBe(0)
|
||||
})
|
||||
|
||||
it('cleans up idle windows on next access', async () => {
|
||||
const controller = new CdpBrowserController({ idleTimeoutMs: 1 })
|
||||
await controller.open('https://example.com/', 5000, false)
|
||||
|
||||
await new Promise((r) => setTimeout(r, 10))
|
||||
|
||||
await controller.open('https://example.com/', 5000, true)
|
||||
|
||||
const normalTabs = await controller.listTabs(false)
|
||||
expect(normalTabs.length).toBe(0)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
177
src/main/mcpServers/browser/README.md
Normal file
177
src/main/mcpServers/browser/README.md
Normal file
@ -0,0 +1,177 @@
|
||||
# Browser MCP Server
|
||||
|
||||
A Model Context Protocol (MCP) server for controlling browser windows via Chrome DevTools Protocol (CDP).
|
||||
|
||||
## Features
|
||||
|
||||
### ✨ User Data Persistence
|
||||
- **Normal mode (default)**: Cookies, localStorage, and sessionStorage persist across browser restarts
|
||||
- **Private mode**: Ephemeral browsing - no data persists (like incognito mode)
|
||||
|
||||
### 🔄 Window Management
|
||||
- Two browsing modes: normal (persistent) and private (ephemeral)
|
||||
- Lazy idle timeout cleanup (cleaned on next window access)
|
||||
- Maximum window limits to prevent resource exhaustion
|
||||
|
||||
> **Note**: Normal mode uses a global `persist:default` partition shared by all clients. This means login sessions and stored data are accessible to any code using the MCP server.
|
||||
|
||||
## Architecture
|
||||
|
||||
### How It Works
|
||||
```
|
||||
Normal Mode (BrowserWindow)
|
||||
├─ Persistent Storage (partition: persist:default) ← Global, shared across all clients
|
||||
└─ Tabs (BrowserView) ← created via newTab or automatically
|
||||
|
||||
Private Mode (BrowserWindow)
|
||||
├─ Ephemeral Storage (partition: private) ← No disk persistence
|
||||
└─ Tabs (BrowserView) ← created via newTab or automatically
|
||||
```
|
||||
|
||||
- **One Window Per Mode**: Normal and private modes each have their own window
|
||||
- **Multi-Tab Support**: Use `newTab: true` for parallel URL requests
|
||||
- **Storage Isolation**: Normal and private modes have completely separate storage
|
||||
|
||||
## Available Tools
|
||||
|
||||
### `open`
|
||||
Open a URL in a browser window. Optionally return page content.
|
||||
```json
|
||||
{
|
||||
"url": "https://example.com",
|
||||
"format": "markdown",
|
||||
"timeout": 10000,
|
||||
"privateMode": false,
|
||||
"newTab": false,
|
||||
"showWindow": false
|
||||
}
|
||||
```
|
||||
- `format`: If set (`html`, `txt`, `markdown`, `json`), returns page content in that format along with tabId. If not set, just opens the page and returns navigation info.
|
||||
- `newTab`: Set to `true` to open in a new tab (required for parallel requests)
|
||||
- `showWindow`: Set to `true` to display the browser window (useful for debugging)
|
||||
- Returns (without format): `{ currentUrl, title, tabId }`
|
||||
- Returns (with format): `{ tabId, content }` where content is in the specified format
|
||||
|
||||
### `execute`
|
||||
Execute JavaScript code in the page context.
|
||||
```json
|
||||
{
|
||||
"code": "document.title",
|
||||
"timeout": 5000,
|
||||
"privateMode": false,
|
||||
"tabId": "optional-tab-id"
|
||||
}
|
||||
```
|
||||
- `tabId`: Target a specific tab (from `open` response)
|
||||
|
||||
### `reset`
|
||||
Reset browser windows and tabs.
|
||||
```json
|
||||
{
|
||||
"privateMode": false,
|
||||
"tabId": "optional-tab-id"
|
||||
}
|
||||
```
|
||||
- Omit all parameters to close all windows
|
||||
- Set `privateMode` to close a specific window
|
||||
- Set both `privateMode` and `tabId` to close a specific tab only
|
||||
|
||||
## Usage Examples
|
||||
|
||||
### Basic Navigation
|
||||
```typescript
|
||||
// Open a URL in normal mode (data persists)
|
||||
await controller.open('https://example.com')
|
||||
```
|
||||
|
||||
### Fetch Page Content
|
||||
```typescript
|
||||
// Open URL and get content as markdown
|
||||
await open({ url: 'https://example.com', format: 'markdown' })
|
||||
|
||||
// Open URL and get raw HTML
|
||||
await open({ url: 'https://example.com', format: 'html' })
|
||||
```
|
||||
|
||||
### Multi-Tab / Parallel Requests
|
||||
```typescript
|
||||
// Open multiple URLs in parallel using newTab
|
||||
const [page1, page2] = await Promise.all([
|
||||
controller.open('https://site1.com', 10000, false, true), // newTab: true
|
||||
controller.open('https://site2.com', 10000, false, true) // newTab: true
|
||||
])
|
||||
|
||||
// Execute on specific tab
|
||||
await controller.execute('document.title', 5000, false, page1.tabId)
|
||||
|
||||
// Close specific tab when done
|
||||
await controller.reset(false, page1.tabId)
|
||||
```
|
||||
|
||||
### Private Browsing
|
||||
```typescript
|
||||
// Open a URL in private mode (no data persistence)
|
||||
await controller.open('https://example.com', 10000, true)
|
||||
|
||||
// Cookies and localStorage won't persist after reset
|
||||
```
|
||||
|
||||
### Data Persistence (Normal Mode)
|
||||
```typescript
|
||||
// Set data
|
||||
await controller.open('https://example.com', 10000, false)
|
||||
await controller.execute('localStorage.setItem("key", "value")', 5000, false)
|
||||
|
||||
// Close window
|
||||
await controller.reset(false)
|
||||
|
||||
// Reopen - data persists!
|
||||
await controller.open('https://example.com', 10000, false)
|
||||
const value = await controller.execute('localStorage.getItem("key")', 5000, false)
|
||||
// Returns: "value"
|
||||
```
|
||||
|
||||
### No Persistence (Private Mode)
|
||||
```typescript
|
||||
// Set data in private mode
|
||||
await controller.open('https://example.com', 10000, true)
|
||||
await controller.execute('localStorage.setItem("key", "value")', 5000, true)
|
||||
|
||||
// Close private window
|
||||
await controller.reset(true)
|
||||
|
||||
// Reopen - data is gone!
|
||||
await controller.open('https://example.com', 10000, true)
|
||||
const value = await controller.execute('localStorage.getItem("key")', 5000, true)
|
||||
// Returns: null
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
```typescript
|
||||
const controller = new CdpBrowserController({
|
||||
maxWindows: 5, // Maximum concurrent windows
|
||||
idleTimeoutMs: 5 * 60 * 1000 // 5 minutes idle timeout (lazy cleanup)
|
||||
})
|
||||
```
|
||||
|
||||
> **Note on Idle Timeout**: Idle windows are cleaned up lazily when the next window is created or accessed, not on a background timer.
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Use Normal Mode for Authentication**: When you need to stay logged in across sessions
|
||||
2. **Use Private Mode for Sensitive Operations**: When you don't want data to persist
|
||||
3. **Use `newTab: true` for Parallel Requests**: Avoid race conditions when fetching multiple URLs
|
||||
4. **Resource Cleanup**: Call `reset()` when done, or `reset(privateMode, tabId)` to close specific tabs
|
||||
5. **Error Handling**: All tool handlers return error responses on failure
|
||||
6. **Timeout Configuration**: Adjust timeouts based on page complexity
|
||||
|
||||
## Technical Details
|
||||
|
||||
- **CDP Version**: 1.3
|
||||
- **User Agent**: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:145.0) Gecko/20100101 Firefox/145.0
|
||||
- **Storage**:
|
||||
- Normal mode: `persist:default` (disk-persisted, global)
|
||||
- Private mode: `private` (memory only)
|
||||
- **Window Size**: 1200x800 (default)
|
||||
- **Visibility**: Windows hidden by default (use `showWindow: true` to display)
|
||||
3
src/main/mcpServers/browser/constants.ts
Normal file
3
src/main/mcpServers/browser/constants.ts
Normal file
@ -0,0 +1,3 @@
|
||||
export const TAB_BAR_HEIGHT = 92 // Height for Chrome-style tab bar (42px) + address bar (50px)
|
||||
export const SESSION_KEY_DEFAULT = 'default'
|
||||
export const SESSION_KEY_PRIVATE = 'private'
|
||||
File diff suppressed because it is too large
Load Diff
567
src/main/mcpServers/browser/tabbar-html.ts
Normal file
567
src/main/mcpServers/browser/tabbar-html.ts
Normal file
@ -0,0 +1,567 @@
|
||||
export const TAB_BAR_HTML = `<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<style>
|
||||
* { margin: 0; padding: 0; box-sizing: border-box; }
|
||||
html, body {
|
||||
height: 100%;
|
||||
overflow: hidden;
|
||||
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
|
||||
font-size: 12px;
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
/* Light theme (default) */
|
||||
:root {
|
||||
--bg-tabrow: #dee1e6;
|
||||
--bg-toolbar: #fff;
|
||||
--bg-tab-hover: rgba(0,0,0,0.04);
|
||||
--bg-tab-active: #fff;
|
||||
--bg-url: #f1f3f4;
|
||||
--bg-url-focus: #fff;
|
||||
--bg-btn-hover: rgba(0,0,0,0.08);
|
||||
--bg-favicon: #9aa0a6;
|
||||
--color-text: #5f6368;
|
||||
--color-text-active: #202124;
|
||||
--color-separator: #c4c7cc;
|
||||
--shadow-url-focus: 0 1px 6px rgba(32,33,36,0.28);
|
||||
--window-close-hover: #e81123;
|
||||
}
|
||||
|
||||
/* Dark theme */
|
||||
body.theme-dark {
|
||||
--bg-tabrow: #202124;
|
||||
--bg-toolbar: #292a2d;
|
||||
--bg-tab-hover: rgba(255,255,255,0.06);
|
||||
--bg-tab-active: #292a2d;
|
||||
--bg-url: #35363a;
|
||||
--bg-url-focus: #202124;
|
||||
--bg-btn-hover: rgba(255,255,255,0.1);
|
||||
--bg-favicon: #5f6368;
|
||||
--color-text: #9aa0a6;
|
||||
--color-text-active: #e8eaed;
|
||||
--color-separator: #3c3d41;
|
||||
--shadow-url-focus: 0 1px 6px rgba(0,0,0,0.5);
|
||||
--window-close-hover: #e81123;
|
||||
}
|
||||
|
||||
body {
|
||||
background: var(--bg-tabrow);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
position: relative;
|
||||
}
|
||||
body.platform-mac { --traffic-light-width: 70px; --window-controls-width: 0px; }
|
||||
body.platform-win, body.platform-linux { --traffic-light-width: 0px; --window-controls-width: 138px; }
|
||||
|
||||
/* Chrome-style tab row */
|
||||
#tab-row {
|
||||
display: flex;
|
||||
align-items: flex-end;
|
||||
padding: 8px 8px 0 8px;
|
||||
padding-left: calc(8px + var(--traffic-light-width, 0px));
|
||||
padding-right: calc(8px + var(--window-controls-width, 0px));
|
||||
height: 42px;
|
||||
flex-shrink: 0;
|
||||
-webkit-app-region: drag;
|
||||
background: var(--bg-tabrow);
|
||||
position: relative;
|
||||
z-index: 1;
|
||||
}
|
||||
|
||||
#tabs-container {
|
||||
display: flex;
|
||||
align-items: flex-end;
|
||||
height: 34px;
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
/* New tab button - inside tabs container, right after last tab */
|
||||
#new-tab-btn {
|
||||
width: 28px;
|
||||
height: 28px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
border-radius: 50%;
|
||||
cursor: pointer;
|
||||
margin-left: 4px;
|
||||
margin-bottom: 3px;
|
||||
-webkit-app-region: no-drag;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
#new-tab-btn:hover { background: var(--bg-btn-hover); }
|
||||
#new-tab-btn svg { width: 18px; height: 18px; fill: var(--color-text); }
|
||||
|
||||
/* Chrome-style tabs - shrink instead of scroll */
|
||||
.tab {
|
||||
position: relative;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
height: 34px;
|
||||
min-width: 36px;
|
||||
max-width: 240px;
|
||||
flex: 1 1 240px;
|
||||
padding: 0 6px;
|
||||
background: transparent;
|
||||
cursor: pointer;
|
||||
-webkit-app-region: no-drag;
|
||||
border-radius: 8px 8px 0 0;
|
||||
transition: background 0.1s;
|
||||
}
|
||||
/* When tab is narrow, hide title, show favicon by default, show close on hover */
|
||||
.tab.narrow .tab-title { display: none; }
|
||||
.tab.narrow { justify-content: center; padding: 0; }
|
||||
.tab.narrow .tab-favicon { margin-right: 0; }
|
||||
.tab.narrow .tab-close { position: absolute; margin-left: 0; }
|
||||
/* On narrow tab hover, hide favicon and show close button */
|
||||
.tab.narrow:hover .tab-favicon { display: none; }
|
||||
.tab.narrow:hover .tab-close { opacity: 1; }
|
||||
/* Separator line using pseudo-element */
|
||||
.tab::after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
right: 0;
|
||||
top: 8px;
|
||||
bottom: 8px;
|
||||
width: 1px;
|
||||
background: var(--color-separator);
|
||||
pointer-events: none;
|
||||
}
|
||||
/* Hide separator for last tab */
|
||||
.tab:last-of-type::after { display: none; }
|
||||
/* Hide separator when tab is hovered (right side) */
|
||||
.tab:hover::after { display: none; }
|
||||
/* Hide separator on tab before hovered tab (left side of hovered) - managed by JS .before-hover class */
|
||||
.tab.before-hover::after { display: none; }
|
||||
/* Hide separator for active tab and its neighbors */
|
||||
.tab.active::after { display: none; }
|
||||
/* Hide separator on tab before active (left side of active) - managed by JS .before-active class */
|
||||
.tab.before-active::after { display: none; }
|
||||
|
||||
.tab:hover { background: var(--bg-tab-hover); }
|
||||
.tab.active {
|
||||
background: var(--bg-tab-active);
|
||||
z-index: 1;
|
||||
}
|
||||
|
||||
/* Tab favicon placeholder */
|
||||
.tab-favicon {
|
||||
width: 16px;
|
||||
height: 16px;
|
||||
margin-right: 8px;
|
||||
border-radius: 2px;
|
||||
background: var(--bg-favicon);
|
||||
flex-shrink: 0;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
.tab-favicon svg { width: 12px; height: 12px; fill: #fff; }
|
||||
body.theme-dark .tab-favicon svg { fill: #9aa0a6; }
|
||||
|
||||
.tab-title {
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
color: var(--color-text);
|
||||
font-size: 12px;
|
||||
font-weight: 400;
|
||||
}
|
||||
.tab.active .tab-title { color: var(--color-text-active); }
|
||||
|
||||
.tab-close {
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
border-radius: 50%;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
margin-left: 4px;
|
||||
opacity: 0;
|
||||
transition: opacity 0.1s, background 0.1s;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
.tab:hover .tab-close { opacity: 1; }
|
||||
.tab-close:hover { background: var(--bg-btn-hover); }
|
||||
.tab-close svg { width: 16px; height: 16px; fill: var(--color-text); }
|
||||
.tab-close:hover svg { fill: var(--color-text-active); }
|
||||
|
||||
/* Chrome-style address bar */
|
||||
#address-bar {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding: 6px 16px 8px 8px;
|
||||
gap: 4px;
|
||||
background: var(--bg-toolbar);
|
||||
-webkit-app-region: drag;
|
||||
}
|
||||
.nav-btn {
|
||||
width: 32px;
|
||||
height: 32px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
border-radius: 50%;
|
||||
cursor: pointer;
|
||||
background: transparent;
|
||||
border: none;
|
||||
flex-shrink: 0;
|
||||
-webkit-app-region: no-drag;
|
||||
}
|
||||
.nav-btn:hover { background: var(--bg-btn-hover); }
|
||||
.nav-btn:disabled { opacity: 0.3; cursor: default; }
|
||||
.nav-btn:disabled:hover { background: transparent; }
|
||||
.nav-btn svg { width: 20px; height: 20px; fill: var(--color-text); }
|
||||
|
||||
#url-container {
|
||||
flex: 1;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
background: var(--bg-url);
|
||||
border-radius: 24px;
|
||||
padding: 0 16px;
|
||||
height: 36px;
|
||||
-webkit-app-region: no-drag;
|
||||
transition: background 0.2s, box-shadow 0.2s;
|
||||
}
|
||||
#url-container:focus-within {
|
||||
background: var(--bg-url-focus);
|
||||
box-shadow: var(--shadow-url-focus);
|
||||
}
|
||||
#url-input {
|
||||
flex: 1;
|
||||
background: transparent;
|
||||
border: none;
|
||||
outline: none;
|
||||
color: var(--color-text-active);
|
||||
font-size: 14px;
|
||||
font-family: inherit;
|
||||
}
|
||||
#url-input::placeholder { color: var(--color-text); }
|
||||
#url-input::-webkit-input-placeholder { color: var(--color-text); }
|
||||
|
||||
/* Window controls for Windows/Linux - use inline-flex inside tab-row instead of fixed position */
|
||||
#window-controls {
|
||||
display: none;
|
||||
height: 42px;
|
||||
margin-left: auto;
|
||||
margin-right: calc(-8px - var(--window-controls-width, 0px));
|
||||
margin-top: -8px;
|
||||
-webkit-app-region: no-drag;
|
||||
}
|
||||
body.platform-win #window-controls,
|
||||
body.platform-linux #window-controls { display: flex; }
|
||||
.window-control-btn {
|
||||
width: 46px;
|
||||
height: 42px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
background: transparent;
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
transition: background 0.1s;
|
||||
-webkit-app-region: no-drag;
|
||||
}
|
||||
.window-control-btn:hover { background: var(--bg-btn-hover); }
|
||||
.window-control-btn.close:hover { background: var(--window-close-hover); }
|
||||
.window-control-btn svg { width: 10px; height: 10px; color: var(--color-text); fill: var(--color-text); stroke: var(--color-text); }
|
||||
.window-control-btn:hover svg { color: var(--color-text-active); fill: var(--color-text-active); stroke: var(--color-text-active); }
|
||||
.window-control-btn.close:hover svg { color: #fff; fill: #fff; stroke: #fff; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div id="tab-row">
|
||||
<div id="tabs-container">
|
||||
<div id="new-tab-btn" title="New tab">
|
||||
<svg viewBox="0 0 24 24"><path d="M19 13h-6v6h-2v-6H5v-2h6V5h2v6h6v2z"/></svg>
|
||||
</div>
|
||||
</div>
|
||||
<!-- Window controls for Windows/Linux - inside tab-row to avoid drag region issues -->
|
||||
<div id="window-controls">
|
||||
<button class="window-control-btn" id="minimize-btn" title="Minimize">
|
||||
<svg viewBox="0 0 10 1"><rect width="10" height="1"/></svg>
|
||||
</button>
|
||||
<button class="window-control-btn" id="maximize-btn" title="Maximize">
|
||||
<svg viewBox="0 0 10 10"><rect x="0.5" y="0.5" width="9" height="9" fill="none" stroke="currentColor" stroke-width="1.2"/></svg>
|
||||
</button>
|
||||
<button class="window-control-btn close" id="close-btn" title="Close">
|
||||
<svg viewBox="0 0 10 10"><path d="M0 0L10 10M10 0L0 10" stroke="currentColor" stroke-width="1.2"/></svg>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<div id="address-bar">
|
||||
<button class="nav-btn" id="back-btn" title="Back" disabled>
|
||||
<svg viewBox="0 0 24 24"><path d="M20 11H7.83l5.59-5.59L12 4l-8 8 8 8 1.41-1.41L7.83 13H20v-2z"/></svg>
|
||||
</button>
|
||||
<button class="nav-btn" id="forward-btn" title="Forward" disabled>
|
||||
<svg viewBox="0 0 24 24"><path d="M12 4l-1.41 1.41L16.17 11H4v2h12.17l-5.58 5.59L12 20l8-8z"/></svg>
|
||||
</button>
|
||||
<button class="nav-btn" id="refresh-btn" title="Refresh">
|
||||
<svg viewBox="0 0 24 24"><path d="M17.65 6.35A7.958 7.958 0 0012 4c-4.42 0-7.99 3.58-7.99 8s3.57 8 7.99 8c3.73 0 6.84-2.55 7.73-6h-2.08A5.99 5.99 0 0112 18c-3.31 0-6-2.69-6-6s2.69-6 6-6c1.66 0 3.14.69 4.22 1.78L13 11h7V4l-2.35 2.35z"/></svg>
|
||||
</button>
|
||||
<div id="url-container">
|
||||
<input type="text" id="url-input" placeholder="Search or enter URL" spellcheck="false" />
|
||||
</div>
|
||||
</div>
|
||||
<script>
|
||||
const tabsContainer = document.getElementById('tabs-container');
|
||||
const urlInput = document.getElementById('url-input');
|
||||
const backBtn = document.getElementById('back-btn');
|
||||
const forwardBtn = document.getElementById('forward-btn');
|
||||
const refreshBtn = document.getElementById('refresh-btn');
|
||||
|
||||
window.currentUrl = '';
|
||||
window.canGoBack = false;
|
||||
window.canGoForward = false;
|
||||
|
||||
// Helper function to update before-active class for separator hiding
|
||||
function updateBeforeActiveClass() {
|
||||
var tabs = tabsContainer.querySelectorAll('.tab');
|
||||
tabs.forEach(function(tab, index) {
|
||||
tab.classList.remove('before-active');
|
||||
if (index < tabs.length - 1 && tabs[index + 1].classList.contains('active')) {
|
||||
tab.classList.add('before-active');
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Helper function to update narrow class based on tab width
|
||||
function updateNarrowClass() {
|
||||
var tabs = tabsContainer.querySelectorAll('.tab');
|
||||
tabs.forEach(function(tab) {
|
||||
if (tab.offsetWidth < 72) {
|
||||
tab.classList.add('narrow');
|
||||
} else {
|
||||
tab.classList.remove('narrow');
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
var newTabBtnHtml = '<div id="new-tab-btn" title="New tab"><svg viewBox="0 0 24 24"><path d="M19 13h-6v6h-2v-6H5v-2h6V5h2v6h6v2z"/></svg></div>';
|
||||
|
||||
// Track if we're in "closing mode" where tab widths should be fixed
|
||||
var closingModeTimeout = null;
|
||||
var isInClosingMode = false;
|
||||
|
||||
function enterClosingMode() {
|
||||
isInClosingMode = true;
|
||||
// Clear any existing timeout
|
||||
if (closingModeTimeout) {
|
||||
clearTimeout(closingModeTimeout);
|
||||
}
|
||||
// Set timeout to exit closing mode after 1 second of no activity
|
||||
closingModeTimeout = setTimeout(function() {
|
||||
exitClosingMode();
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
function exitClosingMode() {
|
||||
isInClosingMode = false;
|
||||
if (closingModeTimeout) {
|
||||
clearTimeout(closingModeTimeout);
|
||||
closingModeTimeout = null;
|
||||
}
|
||||
// Remove fixed widths from tabs
|
||||
var tabs = tabsContainer.querySelectorAll('.tab');
|
||||
tabs.forEach(function(tab) {
|
||||
tab.style.flex = '';
|
||||
tab.style.width = '';
|
||||
});
|
||||
}
|
||||
|
||||
// Exit closing mode when mouse leaves the tab row
|
||||
document.getElementById('tab-row').addEventListener('mouseleave', function() {
|
||||
if (isInClosingMode) {
|
||||
exitClosingMode();
|
||||
}
|
||||
});
|
||||
|
||||
window.updateTabs = function(tabs, activeUrl, canGoBack, canGoForward) {
|
||||
// Capture current tab widths before update if in closing mode
|
||||
var previousWidths = {};
|
||||
if (isInClosingMode) {
|
||||
var existingTabs = tabsContainer.querySelectorAll('.tab');
|
||||
existingTabs.forEach(function(tab) {
|
||||
previousWidths[tab.dataset.id] = tab.offsetWidth;
|
||||
});
|
||||
}
|
||||
|
||||
if (!tabs || tabs.length === 0) {
|
||||
// Window will be closed by main process when last tab is closed
|
||||
// Just clear the UI in case this is called before window closes
|
||||
tabsContainer.innerHTML = newTabBtnHtml;
|
||||
urlInput.value = '';
|
||||
document.getElementById('new-tab-btn').addEventListener('click', function() {
|
||||
sendAction({ type: 'new' });
|
||||
});
|
||||
return;
|
||||
}
|
||||
tabsContainer.innerHTML = tabs.map(function(tab) {
|
||||
var cls = 'tab' + (tab.isActive ? ' active' : '');
|
||||
var title = (tab.title || 'New Tab').replace(/&/g,'&').replace(/</g,'<').replace(/>/g,'>').replace(/"/g,'"');
|
||||
var url = (tab.url || '').replace(/&/g,'&').replace(/</g,'<').replace(/>/g,'>').replace(/"/g,'"');
|
||||
return '<div class="' + cls + '" data-id="' + tab.id + '" title="' + url + '">' +
|
||||
'<div class="tab-favicon"><svg viewBox="0 0 24 24"><path d="M12 2C6.48 2 2 6.48 2 12s4.48 10 10 10 10-4.48 10-10S17.52 2 12 2zm-1 17.93c-3.95-.49-7-3.85-7-7.93 0-.62.08-1.21.21-1.79L9 15v1c0 1.1.9 2 2 2v1.93zm6.9-2.54c-.26-.81-1-1.39-1.9-1.39h-1v-3c0-.55-.45-1-1-1H8v-2h2c.55 0 1-.45 1-1V7h2c1.1 0 2-.9 2-2v-.41c2.93 1.19 5 4.06 5 7.41 0 2.08-.8 3.97-2.1 5.39z"/></svg></div>' +
|
||||
'<span class="tab-title">' + title + '</span>' +
|
||||
'<div class="tab-close" data-id="' + tab.id + '">' +
|
||||
'<svg viewBox="0 0 24 24"><path d="M19 6.41L17.59 5 12 10.59 6.41 5 5 6.41 10.59 12 5 17.59 6.41 19 12 13.41 17.59 19 19 17.59 13.41 12z"/></svg>' +
|
||||
'</div>' +
|
||||
'</div>';
|
||||
}).join('') + newTabBtnHtml;
|
||||
|
||||
// Re-attach event listener for new tab button
|
||||
document.getElementById('new-tab-btn').addEventListener('click', function() {
|
||||
sendAction({ type: 'new' });
|
||||
});
|
||||
|
||||
// If in closing mode, fix the widths of remaining tabs
|
||||
if (isInClosingMode) {
|
||||
var newTabs = tabsContainer.querySelectorAll('.tab');
|
||||
newTabs.forEach(function(tab) {
|
||||
var prevWidth = previousWidths[tab.dataset.id];
|
||||
if (prevWidth) {
|
||||
tab.style.flex = '0 0 ' + prevWidth + 'px';
|
||||
tab.style.width = prevWidth + 'px';
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Update before-active class for proper separator hiding
|
||||
updateBeforeActiveClass();
|
||||
// Update narrow class based on tab width
|
||||
updateNarrowClass();
|
||||
|
||||
if (activeUrl !== undefined) {
|
||||
window.currentUrl = activeUrl || '';
|
||||
if (document.activeElement !== urlInput) {
|
||||
urlInput.value = window.currentUrl;
|
||||
}
|
||||
}
|
||||
|
||||
if (canGoBack !== undefined) {
|
||||
window.canGoBack = canGoBack;
|
||||
backBtn.disabled = !canGoBack;
|
||||
}
|
||||
if (canGoForward !== undefined) {
|
||||
window.canGoForward = canGoForward;
|
||||
forwardBtn.disabled = !canGoForward;
|
||||
}
|
||||
};
|
||||
|
||||
function sendAction(action) {
|
||||
window.postMessage({ channel: 'tabbar-action', payload: action }, '*');
|
||||
}
|
||||
|
||||
tabsContainer.addEventListener('click', function(e) {
|
||||
var closeBtn = e.target.closest('.tab-close');
|
||||
if (closeBtn) {
|
||||
e.stopPropagation();
|
||||
enterClosingMode();
|
||||
sendAction({ type: 'close', tabId: closeBtn.dataset.id });
|
||||
return;
|
||||
}
|
||||
var tab = e.target.closest('.tab');
|
||||
if (tab) {
|
||||
sendAction({ type: 'switch', tabId: tab.dataset.id });
|
||||
}
|
||||
});
|
||||
|
||||
tabsContainer.addEventListener('auxclick', function(e) {
|
||||
if (e.button === 1) {
|
||||
var tab = e.target.closest('.tab');
|
||||
if (tab) {
|
||||
enterClosingMode();
|
||||
sendAction({ type: 'close', tabId: tab.dataset.id });
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Handle hover state for separator hiding (left side of hovered tab)
|
||||
tabsContainer.addEventListener('mouseover', function(e) {
|
||||
var tab = e.target.closest('.tab');
|
||||
// Clear all before-hover classes first
|
||||
tabsContainer.querySelectorAll('.before-hover').forEach(function(t) {
|
||||
t.classList.remove('before-hover');
|
||||
});
|
||||
if (tab) {
|
||||
var prev = tab.previousElementSibling;
|
||||
if (prev && prev.classList.contains('tab')) {
|
||||
prev.classList.add('before-hover');
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
tabsContainer.addEventListener('mouseleave', function() {
|
||||
tabsContainer.querySelectorAll('.before-hover').forEach(function(t) {
|
||||
t.classList.remove('before-hover');
|
||||
});
|
||||
});
|
||||
|
||||
urlInput.addEventListener('keydown', function(e) {
|
||||
if (e.key === 'Enter') {
|
||||
var url = urlInput.value.trim();
|
||||
if (url) {
|
||||
sendAction({ type: 'navigate', url: url });
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
urlInput.addEventListener('focus', function() {
|
||||
urlInput.select();
|
||||
});
|
||||
|
||||
backBtn.addEventListener('click', function() {
|
||||
if (window.canGoBack) {
|
||||
sendAction({ type: 'back' });
|
||||
}
|
||||
});
|
||||
|
||||
forwardBtn.addEventListener('click', function() {
|
||||
if (window.canGoForward) {
|
||||
sendAction({ type: 'forward' });
|
||||
}
|
||||
});
|
||||
|
||||
refreshBtn.addEventListener('click', function() {
|
||||
sendAction({ type: 'refresh' });
|
||||
});
|
||||
|
||||
// Window controls for Windows/Linux
|
||||
document.getElementById('minimize-btn').addEventListener('click', function() {
|
||||
sendAction({ type: 'window-minimize' });
|
||||
});
|
||||
document.getElementById('maximize-btn').addEventListener('click', function() {
|
||||
sendAction({ type: 'window-maximize' });
|
||||
});
|
||||
document.getElementById('close-btn').addEventListener('click', function() {
|
||||
sendAction({ type: 'window-close' });
|
||||
});
|
||||
|
||||
// Platform initialization - called from main process
|
||||
window.initPlatform = function(platform) {
|
||||
document.body.classList.add('platform-' + platform);
|
||||
};
|
||||
|
||||
// Theme initialization - called from main process
|
||||
window.setTheme = function(isDark) {
|
||||
if (isDark) {
|
||||
document.body.classList.add('theme-dark');
|
||||
} else {
|
||||
document.body.classList.remove('theme-dark');
|
||||
}
|
||||
};
|
||||
|
||||
// Update narrow class on window resize
|
||||
window.addEventListener('resize', function() {
|
||||
updateNarrowClass();
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>`
|
||||
@ -1,36 +1,39 @@
|
||||
import * as z from 'zod'
|
||||
|
||||
import type { CdpBrowserController } from '../controller'
|
||||
import { logger } from '../types'
|
||||
import { errorResponse, successResponse } from './utils'
|
||||
|
||||
export const ExecuteSchema = z.object({
|
||||
code: z
|
||||
.string()
|
||||
.describe(
|
||||
'JavaScript evaluated via Chrome DevTools Runtime.evaluate. Keep it short; prefer one-line with semicolons for multiple statements.'
|
||||
),
|
||||
timeout: z.number().default(5000).describe('Timeout in milliseconds for code execution (default: 5000ms)'),
|
||||
sessionId: z.string().optional().describe('Session identifier to target a specific page (default: default)')
|
||||
code: z.string().describe('JavaScript code to run in page context'),
|
||||
timeout: z.number().default(5000).describe('Execution timeout in ms (default: 5000)'),
|
||||
privateMode: z.boolean().optional().describe('Target private session (default: false)'),
|
||||
tabId: z.string().optional().describe('Target specific tab by ID')
|
||||
})
|
||||
|
||||
export const executeToolDefinition = {
|
||||
name: 'execute',
|
||||
description:
|
||||
'Run JavaScript in the current page via Runtime.evaluate. Prefer short, single-line snippets; use semicolons for multiple statements.',
|
||||
'Run JavaScript in the currently open page. Use after open to: click elements, fill forms, extract content (document.body.innerText), or interact with the page. The page must be opened first with open or fetch.',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
code: {
|
||||
type: 'string',
|
||||
description: 'One-line JS to evaluate in page context'
|
||||
description:
|
||||
'JavaScript to evaluate. Examples: document.body.innerText (get text), document.querySelector("button").click() (click), document.title (get title)'
|
||||
},
|
||||
timeout: {
|
||||
type: 'number',
|
||||
description: 'Timeout in milliseconds (default 5000)'
|
||||
description: 'Execution timeout in ms (default: 5000)'
|
||||
},
|
||||
sessionId: {
|
||||
privateMode: {
|
||||
type: 'boolean',
|
||||
description: 'Target private session (default: false)'
|
||||
},
|
||||
tabId: {
|
||||
type: 'string',
|
||||
description: 'Session identifier; targets a specific page (default: default)'
|
||||
description: 'Target specific tab by ID (from open response)'
|
||||
}
|
||||
},
|
||||
required: ['code']
|
||||
@ -38,11 +41,12 @@ export const executeToolDefinition = {
|
||||
}
|
||||
|
||||
export async function handleExecute(controller: CdpBrowserController, args: unknown) {
|
||||
const { code, timeout, sessionId } = ExecuteSchema.parse(args)
|
||||
const { code, timeout, privateMode, tabId } = ExecuteSchema.parse(args)
|
||||
try {
|
||||
const value = await controller.execute(code, timeout, sessionId ?? 'default')
|
||||
const value = await controller.execute(code, timeout, privateMode ?? false, tabId)
|
||||
return successResponse(typeof value === 'string' ? value : JSON.stringify(value))
|
||||
} catch (error) {
|
||||
logger.error('Execute failed', { error, code: code.slice(0, 100), privateMode, tabId })
|
||||
return errorResponse(error as Error)
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,49 +0,0 @@
|
||||
import * as z from 'zod'
|
||||
|
||||
import type { CdpBrowserController } from '../controller'
|
||||
import { errorResponse, successResponse } from './utils'
|
||||
|
||||
export const FetchSchema = z.object({
|
||||
url: z.url().describe('URL to fetch'),
|
||||
format: z.enum(['html', 'txt', 'markdown', 'json']).default('markdown').describe('Output format (default: markdown)'),
|
||||
timeout: z.number().optional().describe('Timeout in milliseconds for navigation (default: 10000)'),
|
||||
sessionId: z.string().optional().describe('Session identifier (default: default)')
|
||||
})
|
||||
|
||||
export const fetchToolDefinition = {
|
||||
name: 'fetch',
|
||||
description: 'Fetch a URL using the browser and return content in specified format (html, txt, markdown, json)',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
url: {
|
||||
type: 'string',
|
||||
description: 'URL to fetch'
|
||||
},
|
||||
format: {
|
||||
type: 'string',
|
||||
enum: ['html', 'txt', 'markdown', 'json'],
|
||||
description: 'Output format (default: markdown)'
|
||||
},
|
||||
timeout: {
|
||||
type: 'number',
|
||||
description: 'Navigation timeout in milliseconds (default: 10000)'
|
||||
},
|
||||
sessionId: {
|
||||
type: 'string',
|
||||
description: 'Session identifier (default: default)'
|
||||
}
|
||||
},
|
||||
required: ['url']
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleFetch(controller: CdpBrowserController, args: unknown) {
|
||||
const { url, format, timeout, sessionId } = FetchSchema.parse(args)
|
||||
try {
|
||||
const content = await controller.fetch(url, format, timeout ?? 10000, sessionId ?? 'default')
|
||||
return successResponse(typeof content === 'string' ? content : JSON.stringify(content))
|
||||
} catch (error) {
|
||||
return errorResponse(error as Error)
|
||||
}
|
||||
}
|
||||
@ -1,15 +1,13 @@
|
||||
export { ExecuteSchema, executeToolDefinition, handleExecute } from './execute'
|
||||
export { FetchSchema, fetchToolDefinition, handleFetch } from './fetch'
|
||||
export { handleOpen, OpenSchema, openToolDefinition } from './open'
|
||||
export { handleReset, resetToolDefinition } from './reset'
|
||||
|
||||
import type { CdpBrowserController } from '../controller'
|
||||
import { executeToolDefinition, handleExecute } from './execute'
|
||||
import { fetchToolDefinition, handleFetch } from './fetch'
|
||||
import { handleOpen, openToolDefinition } from './open'
|
||||
import { handleReset, resetToolDefinition } from './reset'
|
||||
|
||||
export const toolDefinitions = [openToolDefinition, executeToolDefinition, resetToolDefinition, fetchToolDefinition]
|
||||
export const toolDefinitions = [openToolDefinition, executeToolDefinition, resetToolDefinition]
|
||||
|
||||
export const toolHandlers: Record<
|
||||
string,
|
||||
@ -20,6 +18,5 @@ export const toolHandlers: Record<
|
||||
> = {
|
||||
open: handleOpen,
|
||||
execute: handleExecute,
|
||||
reset: handleReset,
|
||||
fetch: handleFetch
|
||||
reset: handleReset
|
||||
}
|
||||
|
||||
@ -1,39 +1,52 @@
|
||||
import * as z from 'zod'
|
||||
|
||||
import type { CdpBrowserController } from '../controller'
|
||||
import { successResponse } from './utils'
|
||||
import { logger } from '../types'
|
||||
import { errorResponse, successResponse } from './utils'
|
||||
|
||||
export const OpenSchema = z.object({
|
||||
url: z.url().describe('URL to open in the controlled Electron window'),
|
||||
timeout: z.number().optional().describe('Timeout in milliseconds for navigation (default: 10000)'),
|
||||
show: z.boolean().optional().describe('Whether to show the browser window (default: false)'),
|
||||
sessionId: z
|
||||
.string()
|
||||
url: z.url().describe('URL to navigate to'),
|
||||
format: z
|
||||
.enum(['html', 'txt', 'markdown', 'json'])
|
||||
.optional()
|
||||
.describe('Session identifier; separate sessions keep separate pages (default: default)')
|
||||
.describe('If set, return page content in this format. If not set, just open the page and return tabId.'),
|
||||
timeout: z.number().optional().describe('Navigation timeout in ms (default: 10000)'),
|
||||
privateMode: z.boolean().optional().describe('Use incognito mode, no data persisted (default: false)'),
|
||||
newTab: z.boolean().optional().describe('Open in new tab, required for parallel requests (default: false)'),
|
||||
showWindow: z.boolean().optional().default(true).describe('Show browser window (default: true)')
|
||||
})
|
||||
|
||||
export const openToolDefinition = {
|
||||
name: 'open',
|
||||
description: 'Open a URL in a hidden Electron window controlled via Chrome DevTools Protocol',
|
||||
description:
|
||||
'Navigate to a URL in a browser window. If format is specified, returns { tabId, content } with page content in that format. Otherwise, returns { currentUrl, title, tabId } for subsequent operations with execute tool. Set newTab=true when opening multiple URLs in parallel.',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
url: {
|
||||
type: 'string',
|
||||
description: 'URL to load'
|
||||
description: 'URL to navigate to'
|
||||
},
|
||||
format: {
|
||||
type: 'string',
|
||||
enum: ['html', 'txt', 'markdown', 'json'],
|
||||
description: 'If set, return page content in this format. If not set, just open the page and return tabId.'
|
||||
},
|
||||
timeout: {
|
||||
type: 'number',
|
||||
description: 'Navigation timeout in milliseconds (default 10000)'
|
||||
description: 'Navigation timeout in ms (default: 10000)'
|
||||
},
|
||||
show: {
|
||||
privateMode: {
|
||||
type: 'boolean',
|
||||
description: 'Whether to show the browser window (default false)'
|
||||
description: 'Use incognito mode, no data persisted (default: false)'
|
||||
},
|
||||
sessionId: {
|
||||
type: 'string',
|
||||
description: 'Session identifier; separate sessions keep separate pages (default: default)'
|
||||
newTab: {
|
||||
type: 'boolean',
|
||||
description: 'Open in new tab, required for parallel requests (default: false)'
|
||||
},
|
||||
showWindow: {
|
||||
type: 'boolean',
|
||||
description: 'Show browser window (default: true)'
|
||||
}
|
||||
},
|
||||
required: ['url']
|
||||
@ -41,7 +54,28 @@ export const openToolDefinition = {
|
||||
}
|
||||
|
||||
export async function handleOpen(controller: CdpBrowserController, args: unknown) {
|
||||
const { url, timeout, show, sessionId } = OpenSchema.parse(args)
|
||||
const res = await controller.open(url, timeout ?? 10000, show ?? false, sessionId ?? 'default')
|
||||
return successResponse(JSON.stringify(res))
|
||||
try {
|
||||
const { url, format, timeout, privateMode, newTab, showWindow } = OpenSchema.parse(args)
|
||||
|
||||
if (format) {
|
||||
const { tabId, content } = await controller.fetch(
|
||||
url,
|
||||
format,
|
||||
timeout ?? 10000,
|
||||
privateMode ?? false,
|
||||
newTab ?? false,
|
||||
showWindow
|
||||
)
|
||||
return successResponse(JSON.stringify({ tabId, content }))
|
||||
} else {
|
||||
const res = await controller.open(url, timeout ?? 10000, privateMode ?? false, newTab ?? false, showWindow)
|
||||
return successResponse(JSON.stringify(res))
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Open failed', {
|
||||
error,
|
||||
url: args && typeof args === 'object' && 'url' in args ? args.url : undefined
|
||||
})
|
||||
return errorResponse(error instanceof Error ? error : String(error))
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,34 +1,43 @@
|
||||
import * as z from 'zod'
|
||||
|
||||
import type { CdpBrowserController } from '../controller'
|
||||
import { successResponse } from './utils'
|
||||
import { logger } from '../types'
|
||||
import { errorResponse, successResponse } from './utils'
|
||||
|
||||
/** Zod schema for validating reset tool arguments */
|
||||
export const ResetSchema = z.object({
|
||||
sessionId: z.string().optional().describe('Session identifier to reset; omit to reset all sessions')
|
||||
privateMode: z.boolean().optional().describe('true=private window, false=normal window, omit=all windows'),
|
||||
tabId: z.string().optional().describe('Close specific tab only (requires privateMode)')
|
||||
})
|
||||
|
||||
/** MCP tool definition for the reset tool */
|
||||
export const resetToolDefinition = {
|
||||
name: 'reset',
|
||||
description: 'Reset the controlled window and detach debugger',
|
||||
description:
|
||||
'Close browser windows and clear state. Call when done browsing to free resources. Omit all parameters to close everything.',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
sessionId: {
|
||||
privateMode: {
|
||||
type: 'boolean',
|
||||
description: 'true=reset private window only, false=reset normal window only, omit=reset all'
|
||||
},
|
||||
tabId: {
|
||||
type: 'string',
|
||||
description: 'Session identifier to reset; omit to reset all sessions'
|
||||
description: 'Close specific tab only (requires privateMode to be set)'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handler for the reset MCP tool.
|
||||
* Closes browser window(s) and detaches debugger for the specified session or all sessions.
|
||||
*/
|
||||
export async function handleReset(controller: CdpBrowserController, args: unknown) {
|
||||
const { sessionId } = ResetSchema.parse(args)
|
||||
await controller.reset(sessionId)
|
||||
return successResponse('reset')
|
||||
try {
|
||||
const { privateMode, tabId } = ResetSchema.parse(args)
|
||||
await controller.reset(privateMode, tabId)
|
||||
return successResponse('reset')
|
||||
} catch (error) {
|
||||
logger.error('Reset failed', {
|
||||
error,
|
||||
privateMode: args && typeof args === 'object' && 'privateMode' in args ? args.privateMode : undefined
|
||||
})
|
||||
return errorResponse(error instanceof Error ? error : String(error))
|
||||
}
|
||||
}
|
||||
|
||||
@ -5,9 +5,10 @@ export function successResponse(text: string) {
|
||||
}
|
||||
}
|
||||
|
||||
export function errorResponse(error: Error) {
|
||||
export function errorResponse(error: Error | string) {
|
||||
const message = error instanceof Error ? error.message : error
|
||||
return {
|
||||
content: [{ type: 'text', text: error.message }],
|
||||
content: [{ type: 'text', text: message }],
|
||||
isError: true
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,4 +1,24 @@
|
||||
import { loggerService } from '@logger'
|
||||
import type { BrowserView, BrowserWindow } from 'electron'
|
||||
|
||||
export const logger = loggerService.withContext('MCPBrowserCDP')
|
||||
export const userAgent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:145.0) Gecko/20100101 Firefox/145.0'
|
||||
export const userAgent =
|
||||
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36'
|
||||
|
||||
export interface TabInfo {
|
||||
id: string
|
||||
view: BrowserView
|
||||
url: string
|
||||
title: string
|
||||
lastActive: number
|
||||
}
|
||||
|
||||
export interface WindowInfo {
|
||||
windowKey: string
|
||||
privateMode: boolean
|
||||
window: BrowserWindow
|
||||
tabs: Map<string, TabInfo>
|
||||
activeTabId: string | null
|
||||
lastActive: number
|
||||
tabBarView?: BrowserView
|
||||
}
|
||||
|
||||
@ -36,7 +36,7 @@ export function createInMemoryMCPServer(
|
||||
return new FetchServer().server
|
||||
}
|
||||
case BuiltinMCPServerNames.filesystem: {
|
||||
return new FileSystemServer(args).server
|
||||
return new FileSystemServer(envs.WORKSPACE_ROOT).server
|
||||
}
|
||||
case BuiltinMCPServerNames.difyKnowledge: {
|
||||
const difyKey = envs.DIFY_KEY
|
||||
|
||||
@ -1,652 +0,0 @@
|
||||
// port https://github.com/modelcontextprotocol/servers/blob/main/src/filesystem/index.ts
|
||||
|
||||
import { loggerService } from '@logger'
|
||||
import { Server } from '@modelcontextprotocol/sdk/server/index.js'
|
||||
import { CallToolRequestSchema, ListToolsRequestSchema } from '@modelcontextprotocol/sdk/types.js'
|
||||
import { createTwoFilesPatch } from 'diff'
|
||||
import fs from 'fs/promises'
|
||||
import { minimatch } from 'minimatch'
|
||||
import os from 'os'
|
||||
import path from 'path'
|
||||
import * as z from 'zod'
|
||||
|
||||
const logger = loggerService.withContext('MCP:FileSystemServer')
|
||||
|
||||
// Normalize all paths consistently
|
||||
function normalizePath(p: string): string {
|
||||
return path.normalize(p)
|
||||
}
|
||||
|
||||
function expandHome(filepath: string): string {
|
||||
if (filepath.startsWith('~/') || filepath === '~') {
|
||||
return path.join(os.homedir(), filepath.slice(1))
|
||||
}
|
||||
return filepath
|
||||
}
|
||||
|
||||
// Security utilities
|
||||
async function validatePath(allowedDirectories: string[], requestedPath: string): Promise<string> {
|
||||
const expandedPath = expandHome(requestedPath)
|
||||
const absolute = path.isAbsolute(expandedPath)
|
||||
? path.resolve(expandedPath)
|
||||
: path.resolve(process.cwd(), expandedPath)
|
||||
|
||||
const normalizedRequested = normalizePath(absolute)
|
||||
|
||||
// Check if path is within allowed directories
|
||||
const isAllowed = allowedDirectories.some((dir) => normalizedRequested.startsWith(dir))
|
||||
if (!isAllowed) {
|
||||
throw new Error(
|
||||
`Access denied - path outside allowed directories: ${absolute} not in ${allowedDirectories.join(', ')}`
|
||||
)
|
||||
}
|
||||
|
||||
// Handle symlinks by checking their real path
|
||||
try {
|
||||
const realPath = await fs.realpath(absolute)
|
||||
const normalizedReal = normalizePath(realPath)
|
||||
const isRealPathAllowed = allowedDirectories.some((dir) => normalizedReal.startsWith(dir))
|
||||
if (!isRealPathAllowed) {
|
||||
throw new Error('Access denied - symlink target outside allowed directories')
|
||||
}
|
||||
return realPath
|
||||
} catch (error) {
|
||||
// For new files that don't exist yet, verify parent directory
|
||||
const parentDir = path.dirname(absolute)
|
||||
try {
|
||||
const realParentPath = await fs.realpath(parentDir)
|
||||
const normalizedParent = normalizePath(realParentPath)
|
||||
const isParentAllowed = allowedDirectories.some((dir) => normalizedParent.startsWith(dir))
|
||||
if (!isParentAllowed) {
|
||||
throw new Error('Access denied - parent directory outside allowed directories')
|
||||
}
|
||||
return absolute
|
||||
} catch {
|
||||
throw new Error(`Parent directory does not exist: ${parentDir}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Schema definitions
|
||||
const ReadFileArgsSchema = z.object({
|
||||
path: z.string()
|
||||
})
|
||||
|
||||
const ReadMultipleFilesArgsSchema = z.object({
|
||||
paths: z.array(z.string())
|
||||
})
|
||||
|
||||
const WriteFileArgsSchema = z.object({
|
||||
path: z.string(),
|
||||
content: z.string()
|
||||
})
|
||||
|
||||
const EditOperation = z.object({
|
||||
oldText: z.string().describe('Text to search for - must match exactly'),
|
||||
newText: z.string().describe('Text to replace with')
|
||||
})
|
||||
|
||||
const EditFileArgsSchema = z.object({
|
||||
path: z.string(),
|
||||
edits: z.array(EditOperation),
|
||||
dryRun: z.boolean().default(false).describe('Preview changes using git-style diff format')
|
||||
})
|
||||
|
||||
const CreateDirectoryArgsSchema = z.object({
|
||||
path: z.string()
|
||||
})
|
||||
|
||||
const ListDirectoryArgsSchema = z.object({
|
||||
path: z.string()
|
||||
})
|
||||
|
||||
const DirectoryTreeArgsSchema = z.object({
|
||||
path: z.string()
|
||||
})
|
||||
|
||||
const MoveFileArgsSchema = z.object({
|
||||
source: z.string(),
|
||||
destination: z.string()
|
||||
})
|
||||
|
||||
const SearchFilesArgsSchema = z.object({
|
||||
path: z.string(),
|
||||
pattern: z.string(),
|
||||
excludePatterns: z.array(z.string()).optional().default([])
|
||||
})
|
||||
|
||||
const GetFileInfoArgsSchema = z.object({
|
||||
path: z.string()
|
||||
})
|
||||
|
||||
interface FileInfo {
|
||||
size: number
|
||||
created: Date
|
||||
modified: Date
|
||||
accessed: Date
|
||||
isDirectory: boolean
|
||||
isFile: boolean
|
||||
permissions: string
|
||||
}
|
||||
|
||||
// Tool implementations
|
||||
async function getFileStats(filePath: string): Promise<FileInfo> {
|
||||
const stats = await fs.stat(filePath)
|
||||
return {
|
||||
size: stats.size,
|
||||
created: stats.birthtime,
|
||||
modified: stats.mtime,
|
||||
accessed: stats.atime,
|
||||
isDirectory: stats.isDirectory(),
|
||||
isFile: stats.isFile(),
|
||||
permissions: stats.mode.toString(8).slice(-3)
|
||||
}
|
||||
}
|
||||
|
||||
async function searchFiles(
|
||||
allowedDirectories: string[],
|
||||
rootPath: string,
|
||||
pattern: string,
|
||||
excludePatterns: string[] = []
|
||||
): Promise<string[]> {
|
||||
const results: string[] = []
|
||||
|
||||
async function search(currentPath: string) {
|
||||
const entries = await fs.readdir(currentPath, { withFileTypes: true })
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(currentPath, entry.name)
|
||||
|
||||
try {
|
||||
// Validate each path before processing
|
||||
await validatePath(allowedDirectories, fullPath)
|
||||
|
||||
// Check if path matches any exclude pattern
|
||||
const relativePath = path.relative(rootPath, fullPath)
|
||||
const shouldExclude = excludePatterns.some((pattern) => {
|
||||
const globPattern = pattern.includes('*') ? pattern : `**/${pattern}/**`
|
||||
return minimatch(relativePath, globPattern, { dot: true })
|
||||
})
|
||||
|
||||
if (shouldExclude) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (entry.name.toLowerCase().includes(pattern.toLowerCase())) {
|
||||
results.push(fullPath)
|
||||
}
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
await search(fullPath)
|
||||
}
|
||||
} catch (error) {
|
||||
// Skip invalid paths during search
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await search(rootPath)
|
||||
return results
|
||||
}
|
||||
|
||||
// file editing and diffing utilities
|
||||
function normalizeLineEndings(text: string): string {
|
||||
return text.replace(/\r\n/g, '\n')
|
||||
}
|
||||
|
||||
function createUnifiedDiff(originalContent: string, newContent: string, filepath: string = 'file'): string {
|
||||
// Ensure consistent line endings for diff
|
||||
const normalizedOriginal = normalizeLineEndings(originalContent)
|
||||
const normalizedNew = normalizeLineEndings(newContent)
|
||||
|
||||
return createTwoFilesPatch(filepath, filepath, normalizedOriginal, normalizedNew, 'original', 'modified')
|
||||
}
|
||||
|
||||
async function applyFileEdits(
|
||||
filePath: string,
|
||||
edits: Array<{ oldText: string; newText: string }>,
|
||||
dryRun = false
|
||||
): Promise<string> {
|
||||
// Read file content and normalize line endings
|
||||
const content = normalizeLineEndings(await fs.readFile(filePath, 'utf-8'))
|
||||
|
||||
// Apply edits sequentially
|
||||
let modifiedContent = content
|
||||
for (const edit of edits) {
|
||||
const normalizedOld = normalizeLineEndings(edit.oldText)
|
||||
const normalizedNew = normalizeLineEndings(edit.newText)
|
||||
|
||||
// If exact match exists, use it
|
||||
if (modifiedContent.includes(normalizedOld)) {
|
||||
modifiedContent = modifiedContent.replace(normalizedOld, normalizedNew)
|
||||
continue
|
||||
}
|
||||
|
||||
// Otherwise, try line-by-line matching with flexibility for whitespace
|
||||
const oldLines = normalizedOld.split('\n')
|
||||
const contentLines = modifiedContent.split('\n')
|
||||
let matchFound = false
|
||||
|
||||
for (let i = 0; i <= contentLines.length - oldLines.length; i++) {
|
||||
const potentialMatch = contentLines.slice(i, i + oldLines.length)
|
||||
|
||||
// Compare lines with normalized whitespace
|
||||
const isMatch = oldLines.every((oldLine, j) => {
|
||||
const contentLine = potentialMatch[j]
|
||||
return oldLine.trim() === contentLine.trim()
|
||||
})
|
||||
|
||||
if (isMatch) {
|
||||
// Preserve original indentation of first line
|
||||
const originalIndent = contentLines[i].match(/^\s*/)?.[0] || ''
|
||||
const newLines = normalizedNew.split('\n').map((line, j) => {
|
||||
if (j === 0) return originalIndent + line.trimStart()
|
||||
// For subsequent lines, try to preserve relative indentation
|
||||
const oldIndent = oldLines[j]?.match(/^\s*/)?.[0] || ''
|
||||
const newIndent = line.match(/^\s*/)?.[0] || ''
|
||||
if (oldIndent && newIndent) {
|
||||
const relativeIndent = newIndent.length - oldIndent.length
|
||||
return originalIndent + ' '.repeat(Math.max(0, relativeIndent)) + line.trimStart()
|
||||
}
|
||||
return line
|
||||
})
|
||||
|
||||
contentLines.splice(i, oldLines.length, ...newLines)
|
||||
modifiedContent = contentLines.join('\n')
|
||||
matchFound = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (!matchFound) {
|
||||
throw new Error(`Could not find exact match for edit:\n${edit.oldText}`)
|
||||
}
|
||||
}
|
||||
|
||||
// Create unified diff
|
||||
const diff = createUnifiedDiff(content, modifiedContent, filePath)
|
||||
|
||||
// Format diff with appropriate number of backticks
|
||||
let numBackticks = 3
|
||||
while (diff.includes('`'.repeat(numBackticks))) {
|
||||
numBackticks++
|
||||
}
|
||||
const formattedDiff = `${'`'.repeat(numBackticks)}diff\n${diff}${'`'.repeat(numBackticks)}\n\n`
|
||||
|
||||
if (!dryRun) {
|
||||
await fs.writeFile(filePath, modifiedContent, 'utf-8')
|
||||
}
|
||||
|
||||
return formattedDiff
|
||||
}
|
||||
|
||||
class FileSystemServer {
|
||||
public server: Server
|
||||
private allowedDirectories: string[]
|
||||
constructor(allowedDirs: string[]) {
|
||||
if (!Array.isArray(allowedDirs) || allowedDirs.length === 0) {
|
||||
throw new Error('No allowed directories provided, please specify at least one directory in args')
|
||||
}
|
||||
|
||||
this.allowedDirectories = allowedDirs.map((dir) => normalizePath(path.resolve(expandHome(dir))))
|
||||
|
||||
// Validate that all directories exist and are accessible
|
||||
this.validateDirs().catch((error) => {
|
||||
logger.error('Error validating allowed directories:', error)
|
||||
throw new Error(`Error validating allowed directories: ${error}`)
|
||||
})
|
||||
|
||||
this.server = new Server(
|
||||
{
|
||||
name: 'secure-filesystem-server',
|
||||
version: '0.2.0'
|
||||
},
|
||||
{
|
||||
capabilities: {
|
||||
tools: {}
|
||||
}
|
||||
}
|
||||
)
|
||||
this.initialize()
|
||||
}
|
||||
|
||||
async validateDirs() {
|
||||
// Validate that all directories exist and are accessible
|
||||
await Promise.all(
|
||||
this.allowedDirectories.map(async (dir) => {
|
||||
try {
|
||||
const stats = await fs.stat(expandHome(dir))
|
||||
if (!stats.isDirectory()) {
|
||||
logger.error(`Error: ${dir} is not a directory`)
|
||||
throw new Error(`Error: ${dir} is not a directory`)
|
||||
}
|
||||
} catch (error: any) {
|
||||
logger.error(`Error accessing directory ${dir}:`, error)
|
||||
throw new Error(`Error accessing directory ${dir}:`, error)
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
initialize() {
|
||||
// Tool handlers
|
||||
this.server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||
return {
|
||||
tools: [
|
||||
{
|
||||
name: 'read_file',
|
||||
description:
|
||||
'Read the complete contents of a file from the file system. ' +
|
||||
'Handles various text encodings and provides detailed error messages ' +
|
||||
'if the file cannot be read. Use this tool when you need to examine ' +
|
||||
'the contents of a single file. Only works within allowed directories.',
|
||||
inputSchema: z.toJSONSchema(ReadFileArgsSchema)
|
||||
},
|
||||
{
|
||||
name: 'read_multiple_files',
|
||||
description:
|
||||
'Read the contents of multiple files simultaneously. This is more ' +
|
||||
'efficient than reading files one by one when you need to analyze ' +
|
||||
"or compare multiple files. Each file's content is returned with its " +
|
||||
"path as a reference. Failed reads for individual files won't stop " +
|
||||
'the entire operation. Only works within allowed directories.',
|
||||
inputSchema: z.toJSONSchema(ReadMultipleFilesArgsSchema)
|
||||
},
|
||||
{
|
||||
name: 'write_file',
|
||||
description:
|
||||
'Create a new file or completely overwrite an existing file with new content. ' +
|
||||
'Use with caution as it will overwrite existing files without warning. ' +
|
||||
'Handles text content with proper encoding. Only works within allowed directories.',
|
||||
inputSchema: z.toJSONSchema(WriteFileArgsSchema)
|
||||
},
|
||||
{
|
||||
name: 'edit_file',
|
||||
description:
|
||||
'Make line-based edits to a text file. Each edit replaces exact line sequences ' +
|
||||
'with new content. Returns a git-style diff showing the changes made. ' +
|
||||
'Only works within allowed directories.',
|
||||
inputSchema: z.toJSONSchema(EditFileArgsSchema)
|
||||
},
|
||||
{
|
||||
name: 'create_directory',
|
||||
description:
|
||||
'Create a new directory or ensure a directory exists. Can create multiple ' +
|
||||
'nested directories in one operation. If the directory already exists, ' +
|
||||
'this operation will succeed silently. Perfect for setting up directory ' +
|
||||
'structures for projects or ensuring required paths exist. Only works within allowed directories.',
|
||||
inputSchema: z.toJSONSchema(CreateDirectoryArgsSchema)
|
||||
},
|
||||
{
|
||||
name: 'list_directory',
|
||||
description:
|
||||
'Get a detailed listing of all files and directories in a specified path. ' +
|
||||
'Results clearly distinguish between files and directories with [FILE] and [DIR] ' +
|
||||
'prefixes. This tool is essential for understanding directory structure and ' +
|
||||
'finding specific files within a directory. Only works within allowed directories.',
|
||||
inputSchema: z.toJSONSchema(ListDirectoryArgsSchema)
|
||||
},
|
||||
{
|
||||
name: 'directory_tree',
|
||||
description:
|
||||
'Get a recursive tree view of files and directories as a JSON structure. ' +
|
||||
"Each entry includes 'name', 'type' (file/directory), and 'children' for directories. " +
|
||||
'Files have no children array, while directories always have a children array (which may be empty). ' +
|
||||
'The output is formatted with 2-space indentation for readability. Only works within allowed directories.',
|
||||
inputSchema: z.toJSONSchema(DirectoryTreeArgsSchema)
|
||||
},
|
||||
{
|
||||
name: 'move_file',
|
||||
description:
|
||||
'Move or rename files and directories. Can move files between directories ' +
|
||||
'and rename them in a single operation. If the destination exists, the ' +
|
||||
'operation will fail. Works across different directories and can be used ' +
|
||||
'for simple renaming within the same directory. Both source and destination must be within allowed directories.',
|
||||
inputSchema: z.toJSONSchema(MoveFileArgsSchema)
|
||||
},
|
||||
{
|
||||
name: 'search_files',
|
||||
description:
|
||||
'Recursively search for files and directories matching a pattern. ' +
|
||||
'Searches through all subdirectories from the starting path. The search ' +
|
||||
'is case-insensitive and matches partial names. Returns full paths to all ' +
|
||||
"matching items. Great for finding files when you don't know their exact location. " +
|
||||
'Only searches within allowed directories.',
|
||||
inputSchema: z.toJSONSchema(SearchFilesArgsSchema)
|
||||
},
|
||||
{
|
||||
name: 'get_file_info',
|
||||
description:
|
||||
'Retrieve detailed metadata about a file or directory. Returns comprehensive ' +
|
||||
'information including size, creation time, last modified time, permissions, ' +
|
||||
'and type. This tool is perfect for understanding file characteristics ' +
|
||||
'without reading the actual content. Only works within allowed directories.',
|
||||
inputSchema: z.toJSONSchema(GetFileInfoArgsSchema)
|
||||
},
|
||||
{
|
||||
name: 'list_allowed_directories',
|
||||
description:
|
||||
'Returns the list of directories that this server is allowed to access. ' +
|
||||
'Use this to understand which directories are available before trying to access files.',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {},
|
||||
required: []
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
this.server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
try {
|
||||
const { name, arguments: args } = request.params
|
||||
|
||||
switch (name) {
|
||||
case 'read_file': {
|
||||
const parsed = ReadFileArgsSchema.safeParse(args)
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for read_file: ${parsed.error}`)
|
||||
}
|
||||
const validPath = await validatePath(this.allowedDirectories, parsed.data.path)
|
||||
const content = await fs.readFile(validPath, 'utf-8')
|
||||
return {
|
||||
content: [{ type: 'text', text: content }]
|
||||
}
|
||||
}
|
||||
|
||||
case 'read_multiple_files': {
|
||||
const parsed = ReadMultipleFilesArgsSchema.safeParse(args)
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for read_multiple_files: ${parsed.error}`)
|
||||
}
|
||||
const results = await Promise.all(
|
||||
parsed.data.paths.map(async (filePath: string) => {
|
||||
try {
|
||||
const validPath = await validatePath(this.allowedDirectories, filePath)
|
||||
const content = await fs.readFile(validPath, 'utf-8')
|
||||
return `${filePath}:\n${content}\n`
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
return `${filePath}: Error - ${errorMessage}`
|
||||
}
|
||||
})
|
||||
)
|
||||
return {
|
||||
content: [{ type: 'text', text: results.join('\n---\n') }]
|
||||
}
|
||||
}
|
||||
|
||||
case 'write_file': {
|
||||
const parsed = WriteFileArgsSchema.safeParse(args)
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for write_file: ${parsed.error}`)
|
||||
}
|
||||
const validPath = await validatePath(this.allowedDirectories, parsed.data.path)
|
||||
await fs.writeFile(validPath, parsed.data.content, 'utf-8')
|
||||
return {
|
||||
content: [{ type: 'text', text: `Successfully wrote to ${parsed.data.path}` }]
|
||||
}
|
||||
}
|
||||
|
||||
case 'edit_file': {
|
||||
const parsed = EditFileArgsSchema.safeParse(args)
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for edit_file: ${parsed.error}`)
|
||||
}
|
||||
const validPath = await validatePath(this.allowedDirectories, parsed.data.path)
|
||||
const result = await applyFileEdits(validPath, parsed.data.edits, parsed.data.dryRun)
|
||||
return {
|
||||
content: [{ type: 'text', text: result }]
|
||||
}
|
||||
}
|
||||
|
||||
case 'create_directory': {
|
||||
const parsed = CreateDirectoryArgsSchema.safeParse(args)
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for create_directory: ${parsed.error}`)
|
||||
}
|
||||
const validPath = await validatePath(this.allowedDirectories, parsed.data.path)
|
||||
await fs.mkdir(validPath, { recursive: true })
|
||||
return {
|
||||
content: [{ type: 'text', text: `Successfully created directory ${parsed.data.path}` }]
|
||||
}
|
||||
}
|
||||
|
||||
case 'list_directory': {
|
||||
const parsed = ListDirectoryArgsSchema.safeParse(args)
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for list_directory: ${parsed.error}`)
|
||||
}
|
||||
const validPath = await validatePath(this.allowedDirectories, parsed.data.path)
|
||||
const entries = await fs.readdir(validPath, { withFileTypes: true })
|
||||
const formatted = entries
|
||||
.map((entry) => `${entry.isDirectory() ? '[DIR]' : '[FILE]'} ${entry.name}`)
|
||||
.join('\n')
|
||||
return {
|
||||
content: [{ type: 'text', text: formatted }]
|
||||
}
|
||||
}
|
||||
|
||||
case 'directory_tree': {
|
||||
const parsed = DirectoryTreeArgsSchema.safeParse(args)
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for directory_tree: ${parsed.error}`)
|
||||
}
|
||||
|
||||
interface TreeEntry {
|
||||
name: string
|
||||
type: 'file' | 'directory'
|
||||
children?: TreeEntry[]
|
||||
}
|
||||
|
||||
async function buildTree(allowedDirectories: string[], currentPath: string): Promise<TreeEntry[]> {
|
||||
const validPath = await validatePath(allowedDirectories, currentPath)
|
||||
const entries = await fs.readdir(validPath, { withFileTypes: true })
|
||||
const result: TreeEntry[] = []
|
||||
|
||||
for (const entry of entries) {
|
||||
const entryData: TreeEntry = {
|
||||
name: entry.name,
|
||||
type: entry.isDirectory() ? 'directory' : 'file'
|
||||
}
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
const subPath = path.join(currentPath, entry.name)
|
||||
entryData.children = await buildTree(allowedDirectories, subPath)
|
||||
}
|
||||
|
||||
result.push(entryData)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
const treeData = await buildTree(this.allowedDirectories, parsed.data.path)
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: JSON.stringify(treeData, null, 2)
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
case 'move_file': {
|
||||
const parsed = MoveFileArgsSchema.safeParse(args)
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for move_file: ${parsed.error}`)
|
||||
}
|
||||
const validSourcePath = await validatePath(this.allowedDirectories, parsed.data.source)
|
||||
const validDestPath = await validatePath(this.allowedDirectories, parsed.data.destination)
|
||||
await fs.rename(validSourcePath, validDestPath)
|
||||
return {
|
||||
content: [
|
||||
{ type: 'text', text: `Successfully moved ${parsed.data.source} to ${parsed.data.destination}` }
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
case 'search_files': {
|
||||
const parsed = SearchFilesArgsSchema.safeParse(args)
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for search_files: ${parsed.error}`)
|
||||
}
|
||||
const validPath = await validatePath(this.allowedDirectories, parsed.data.path)
|
||||
const results = await searchFiles(
|
||||
this.allowedDirectories,
|
||||
validPath,
|
||||
parsed.data.pattern,
|
||||
parsed.data.excludePatterns
|
||||
)
|
||||
return {
|
||||
content: [{ type: 'text', text: results.length > 0 ? results.join('\n') : 'No matches found' }]
|
||||
}
|
||||
}
|
||||
|
||||
case 'get_file_info': {
|
||||
const parsed = GetFileInfoArgsSchema.safeParse(args)
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for get_file_info: ${parsed.error}`)
|
||||
}
|
||||
const validPath = await validatePath(this.allowedDirectories, parsed.data.path)
|
||||
const info = await getFileStats(validPath)
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: Object.entries(info)
|
||||
.map(([key, value]) => `${key}: ${value}`)
|
||||
.join('\n')
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
case 'list_allowed_directories': {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `Allowed directories:\n${this.allowedDirectories.join('\n')}`
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown tool: ${name}`)
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
return {
|
||||
content: [{ type: 'text', text: `Error: ${errorMessage}` }],
|
||||
isError: true
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export default FileSystemServer
|
||||
2
src/main/mcpServers/filesystem/index.ts
Normal file
2
src/main/mcpServers/filesystem/index.ts
Normal file
@ -0,0 +1,2 @@
|
||||
// Re-export FileSystemServer to maintain existing import pattern
|
||||
export { default, FileSystemServer } from './server'
|
||||
118
src/main/mcpServers/filesystem/server.ts
Normal file
118
src/main/mcpServers/filesystem/server.ts
Normal file
@ -0,0 +1,118 @@
|
||||
import { Server } from '@modelcontextprotocol/sdk/server/index.js'
|
||||
import { CallToolRequestSchema, ListToolsRequestSchema } from '@modelcontextprotocol/sdk/types.js'
|
||||
import { app } from 'electron'
|
||||
import fs from 'fs/promises'
|
||||
import path from 'path'
|
||||
|
||||
import {
|
||||
deleteToolDefinition,
|
||||
editToolDefinition,
|
||||
globToolDefinition,
|
||||
grepToolDefinition,
|
||||
handleDeleteTool,
|
||||
handleEditTool,
|
||||
handleGlobTool,
|
||||
handleGrepTool,
|
||||
handleLsTool,
|
||||
handleReadTool,
|
||||
handleWriteTool,
|
||||
lsToolDefinition,
|
||||
readToolDefinition,
|
||||
writeToolDefinition
|
||||
} from './tools'
|
||||
import { logger } from './types'
|
||||
|
||||
export class FileSystemServer {
|
||||
public server: Server
|
||||
private baseDir: string
|
||||
|
||||
constructor(baseDir?: string) {
|
||||
if (baseDir && path.isAbsolute(baseDir)) {
|
||||
this.baseDir = baseDir
|
||||
logger.info(`Using provided baseDir for filesystem MCP: ${baseDir}`)
|
||||
} else {
|
||||
const userData = app.getPath('userData')
|
||||
this.baseDir = path.join(userData, 'Data', 'Workspace')
|
||||
logger.info(`Using default workspace for filesystem MCP baseDir: ${this.baseDir}`)
|
||||
}
|
||||
|
||||
this.server = new Server(
|
||||
{
|
||||
name: 'filesystem-server',
|
||||
version: '2.0.0'
|
||||
},
|
||||
{
|
||||
capabilities: {
|
||||
tools: {}
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
this.initialize()
|
||||
}
|
||||
|
||||
async initialize() {
|
||||
try {
|
||||
await fs.mkdir(this.baseDir, { recursive: true })
|
||||
} catch (error) {
|
||||
logger.error('Failed to create filesystem MCP baseDir', { error, baseDir: this.baseDir })
|
||||
}
|
||||
|
||||
// Register tool list handler
|
||||
this.server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||
return {
|
||||
tools: [
|
||||
globToolDefinition,
|
||||
lsToolDefinition,
|
||||
grepToolDefinition,
|
||||
readToolDefinition,
|
||||
editToolDefinition,
|
||||
writeToolDefinition,
|
||||
deleteToolDefinition
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
// Register tool call handler
|
||||
this.server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
try {
|
||||
const { name, arguments: args } = request.params
|
||||
|
||||
switch (name) {
|
||||
case 'glob':
|
||||
return await handleGlobTool(args, this.baseDir)
|
||||
|
||||
case 'ls':
|
||||
return await handleLsTool(args, this.baseDir)
|
||||
|
||||
case 'grep':
|
||||
return await handleGrepTool(args, this.baseDir)
|
||||
|
||||
case 'read':
|
||||
return await handleReadTool(args, this.baseDir)
|
||||
|
||||
case 'edit':
|
||||
return await handleEditTool(args, this.baseDir)
|
||||
|
||||
case 'write':
|
||||
return await handleWriteTool(args, this.baseDir)
|
||||
|
||||
case 'delete':
|
||||
return await handleDeleteTool(args, this.baseDir)
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown tool: ${name}`)
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
logger.error(`Tool execution error for ${request.params.name}:`, { error })
|
||||
return {
|
||||
content: [{ type: 'text', text: `Error: ${errorMessage}` }],
|
||||
isError: true
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export default FileSystemServer
|
||||
93
src/main/mcpServers/filesystem/tools/delete.ts
Normal file
93
src/main/mcpServers/filesystem/tools/delete.ts
Normal file
@ -0,0 +1,93 @@
|
||||
import fs from 'fs/promises'
|
||||
import path from 'path'
|
||||
import * as z from 'zod'
|
||||
|
||||
import { logger, validatePath } from '../types'
|
||||
|
||||
// Schema definition
|
||||
export const DeleteToolSchema = z.object({
|
||||
path: z.string().describe('The path to the file or directory to delete'),
|
||||
recursive: z.boolean().optional().describe('For directories, whether to delete recursively (default: false)')
|
||||
})
|
||||
|
||||
// Tool definition with detailed description
|
||||
export const deleteToolDefinition = {
|
||||
name: 'delete',
|
||||
description: `Deletes a file or directory from the filesystem.
|
||||
|
||||
CAUTION: This operation cannot be undone!
|
||||
|
||||
- For files: simply provide the path
|
||||
- For empty directories: provide the path
|
||||
- For non-empty directories: set recursive=true
|
||||
- The path must be an absolute path, not a relative path
|
||||
- Always verify the path before deleting to avoid data loss`,
|
||||
inputSchema: z.toJSONSchema(DeleteToolSchema)
|
||||
}
|
||||
|
||||
// Handler implementation
|
||||
export async function handleDeleteTool(args: unknown, baseDir: string) {
|
||||
const parsed = DeleteToolSchema.safeParse(args)
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for delete: ${parsed.error}`)
|
||||
}
|
||||
|
||||
const targetPath = parsed.data.path
|
||||
const validPath = await validatePath(targetPath, baseDir)
|
||||
const recursive = parsed.data.recursive || false
|
||||
|
||||
// Check if path exists and get stats
|
||||
let stats
|
||||
try {
|
||||
stats = await fs.stat(validPath)
|
||||
} catch (error: any) {
|
||||
if (error.code === 'ENOENT') {
|
||||
throw new Error(`Path not found: ${targetPath}`)
|
||||
}
|
||||
throw error
|
||||
}
|
||||
|
||||
const isDirectory = stats.isDirectory()
|
||||
const relativePath = path.relative(baseDir, validPath)
|
||||
|
||||
// Perform deletion
|
||||
try {
|
||||
if (isDirectory) {
|
||||
if (recursive) {
|
||||
// Delete directory recursively
|
||||
await fs.rm(validPath, { recursive: true, force: true })
|
||||
} else {
|
||||
// Try to delete empty directory
|
||||
await fs.rmdir(validPath)
|
||||
}
|
||||
} else {
|
||||
// Delete file
|
||||
await fs.unlink(validPath)
|
||||
}
|
||||
} catch (error: any) {
|
||||
if (error.code === 'ENOTEMPTY') {
|
||||
throw new Error(`Directory not empty: ${targetPath}. Use recursive=true to delete non-empty directories.`)
|
||||
}
|
||||
throw new Error(`Failed to delete: ${error.message}`)
|
||||
}
|
||||
|
||||
// Log the operation
|
||||
logger.info('Path deleted', {
|
||||
path: validPath,
|
||||
type: isDirectory ? 'directory' : 'file',
|
||||
recursive: isDirectory ? recursive : undefined
|
||||
})
|
||||
|
||||
// Format output
|
||||
const itemType = isDirectory ? 'Directory' : 'File'
|
||||
const recursiveNote = isDirectory && recursive ? ' (recursive)' : ''
|
||||
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `${itemType} deleted${recursiveNote}: ${relativePath}`
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
130
src/main/mcpServers/filesystem/tools/edit.ts
Normal file
130
src/main/mcpServers/filesystem/tools/edit.ts
Normal file
@ -0,0 +1,130 @@
|
||||
import fs from 'fs/promises'
|
||||
import path from 'path'
|
||||
import * as z from 'zod'
|
||||
|
||||
import { logger, replaceWithFuzzyMatch, validatePath } from '../types'
|
||||
|
||||
// Schema definition
|
||||
export const EditToolSchema = z.object({
|
||||
file_path: z.string().describe('The path to the file to modify'),
|
||||
old_string: z.string().describe('The text to replace'),
|
||||
new_string: z.string().describe('The text to replace it with'),
|
||||
replace_all: z.boolean().optional().default(false).describe('Replace all occurrences of old_string (default false)')
|
||||
})
|
||||
|
||||
// Tool definition with detailed description
|
||||
export const editToolDefinition = {
|
||||
name: 'edit',
|
||||
description: `Performs exact string replacements in files.
|
||||
|
||||
- You must use the 'read' tool at least once before editing
|
||||
- The file_path must be an absolute path, not a relative path
|
||||
- Preserve exact indentation from read output (after the line number prefix)
|
||||
- Never include line number prefixes in old_string or new_string
|
||||
- ALWAYS prefer editing existing files over creating new ones
|
||||
- The edit will FAIL if old_string is not found in the file
|
||||
- The edit will FAIL if old_string appears multiple times (provide more context or use replace_all)
|
||||
- The edit will FAIL if old_string equals new_string
|
||||
- Use replace_all to rename variables or replace all occurrences`,
|
||||
inputSchema: z.toJSONSchema(EditToolSchema)
|
||||
}
|
||||
|
||||
// Handler implementation
|
||||
export async function handleEditTool(args: unknown, baseDir: string) {
|
||||
const parsed = EditToolSchema.safeParse(args)
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for edit: ${parsed.error}`)
|
||||
}
|
||||
|
||||
const { file_path: filePath, old_string: oldString, new_string: newString, replace_all: replaceAll } = parsed.data
|
||||
|
||||
// Validate path
|
||||
const validPath = await validatePath(filePath, baseDir)
|
||||
|
||||
// Check if file exists
|
||||
try {
|
||||
const stats = await fs.stat(validPath)
|
||||
if (!stats.isFile()) {
|
||||
throw new Error(`Path is not a file: ${filePath}`)
|
||||
}
|
||||
} catch (error: any) {
|
||||
if (error.code === 'ENOENT') {
|
||||
// If old_string is empty, this is a create new file operation
|
||||
if (oldString === '') {
|
||||
// Create parent directory if needed
|
||||
const parentDir = path.dirname(validPath)
|
||||
await fs.mkdir(parentDir, { recursive: true })
|
||||
|
||||
// Write the new content
|
||||
await fs.writeFile(validPath, newString, 'utf-8')
|
||||
|
||||
logger.info('File created', { path: validPath })
|
||||
|
||||
const relativePath = path.relative(baseDir, validPath)
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `Created new file: ${relativePath}\nLines: ${newString.split('\n').length}`
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
throw new Error(`File not found: ${filePath}`)
|
||||
}
|
||||
throw error
|
||||
}
|
||||
|
||||
// Read current content
|
||||
const content = await fs.readFile(validPath, 'utf-8')
|
||||
|
||||
// Handle special case: old_string is empty (create file with content)
|
||||
if (oldString === '') {
|
||||
await fs.writeFile(validPath, newString, 'utf-8')
|
||||
|
||||
logger.info('File overwritten', { path: validPath })
|
||||
|
||||
const relativePath = path.relative(baseDir, validPath)
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `Overwrote file: ${relativePath}\nLines: ${newString.split('\n').length}`
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
// Perform the replacement with fuzzy matching
|
||||
const newContent = replaceWithFuzzyMatch(content, oldString, newString, replaceAll)
|
||||
|
||||
// Write the modified content
|
||||
await fs.writeFile(validPath, newContent, 'utf-8')
|
||||
|
||||
logger.info('File edited', {
|
||||
path: validPath,
|
||||
replaceAll
|
||||
})
|
||||
|
||||
// Generate a simple diff summary
|
||||
const oldLines = content.split('\n').length
|
||||
const newLines = newContent.split('\n').length
|
||||
const lineDiff = newLines - oldLines
|
||||
|
||||
const relativePath = path.relative(baseDir, validPath)
|
||||
let diffSummary = `Edited: ${relativePath}`
|
||||
if (lineDiff > 0) {
|
||||
diffSummary += `\n+${lineDiff} lines`
|
||||
} else if (lineDiff < 0) {
|
||||
diffSummary += `\n${lineDiff} lines`
|
||||
}
|
||||
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: diffSummary
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
149
src/main/mcpServers/filesystem/tools/glob.ts
Normal file
149
src/main/mcpServers/filesystem/tools/glob.ts
Normal file
@ -0,0 +1,149 @@
|
||||
import fs from 'fs/promises'
|
||||
import path from 'path'
|
||||
import * as z from 'zod'
|
||||
|
||||
import type { FileInfo } from '../types'
|
||||
import { logger, MAX_FILES_LIMIT, runRipgrep, validatePath } from '../types'
|
||||
|
||||
// Schema definition
|
||||
export const GlobToolSchema = z.object({
|
||||
pattern: z.string().describe('The glob pattern to match files against'),
|
||||
path: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('The directory to search in (must be absolute path). Defaults to the base directory')
|
||||
})
|
||||
|
||||
// Tool definition with detailed description
|
||||
export const globToolDefinition = {
|
||||
name: 'glob',
|
||||
description: `Fast file pattern matching tool that works with any codebase size.
|
||||
|
||||
- Supports glob patterns like "**/*.js" or "src/**/*.ts"
|
||||
- Returns matching absolute file paths sorted by modification time (newest first)
|
||||
- Use this when you need to find files by name patterns
|
||||
- Patterns without "/" (e.g., "*.txt") match files at ANY depth in the directory tree
|
||||
- Patterns with "/" (e.g., "src/*.ts") match relative to the search path
|
||||
- Pattern syntax: * (any chars), ** (any path), {a,b} (alternatives), ? (single char)
|
||||
- Results are limited to 100 files
|
||||
- The path parameter must be an absolute path if specified
|
||||
- If path is not specified, defaults to the base directory
|
||||
- IMPORTANT: Omit the path field for the default directory (don't use "undefined" or "null")`,
|
||||
inputSchema: z.toJSONSchema(GlobToolSchema)
|
||||
}
|
||||
|
||||
// Handler implementation
|
||||
export async function handleGlobTool(args: unknown, baseDir: string) {
|
||||
const parsed = GlobToolSchema.safeParse(args)
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for glob: ${parsed.error}`)
|
||||
}
|
||||
|
||||
const searchPath = parsed.data.path || baseDir
|
||||
const validPath = await validatePath(searchPath, baseDir)
|
||||
|
||||
// Verify the search directory exists
|
||||
try {
|
||||
const stats = await fs.stat(validPath)
|
||||
if (!stats.isDirectory()) {
|
||||
throw new Error(`Path is not a directory: ${validPath}`)
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
if (error && typeof error === 'object' && 'code' in error && error.code === 'ENOENT') {
|
||||
throw new Error(`Directory not found: ${validPath}`)
|
||||
}
|
||||
throw error
|
||||
}
|
||||
|
||||
// Validate pattern
|
||||
const pattern = parsed.data.pattern.trim()
|
||||
if (!pattern) {
|
||||
throw new Error('Pattern cannot be empty')
|
||||
}
|
||||
|
||||
const files: FileInfo[] = []
|
||||
let truncated = false
|
||||
|
||||
// Build ripgrep arguments for file listing using --glob=pattern format
|
||||
const rgArgs: string[] = [
|
||||
'--files',
|
||||
'--follow',
|
||||
'--hidden',
|
||||
`--glob=${pattern}`,
|
||||
'--glob=!.git/*',
|
||||
'--glob=!node_modules/*',
|
||||
'--glob=!dist/*',
|
||||
'--glob=!build/*',
|
||||
'--glob=!__pycache__/*',
|
||||
validPath
|
||||
]
|
||||
|
||||
// Use ripgrep for file listing
|
||||
logger.debug('Running ripgrep with args', { rgArgs })
|
||||
const rgResult = await runRipgrep(rgArgs)
|
||||
logger.debug('Ripgrep result', {
|
||||
ok: rgResult.ok,
|
||||
exitCode: rgResult.exitCode,
|
||||
stdoutLength: rgResult.stdout.length,
|
||||
stdoutPreview: rgResult.stdout.slice(0, 500)
|
||||
})
|
||||
|
||||
// Process results if we have stdout content
|
||||
// Exit code 2 can indicate partial errors (e.g., permission denied on some dirs) but still have valid results
|
||||
if (rgResult.ok && rgResult.stdout.length > 0) {
|
||||
const lines = rgResult.stdout.split('\n').filter(Boolean)
|
||||
logger.debug('Parsed lines from ripgrep', { lineCount: lines.length, lines })
|
||||
|
||||
for (const line of lines) {
|
||||
if (files.length >= MAX_FILES_LIMIT) {
|
||||
truncated = true
|
||||
break
|
||||
}
|
||||
|
||||
const filePath = line.trim()
|
||||
if (!filePath) continue
|
||||
|
||||
const absolutePath = path.isAbsolute(filePath) ? filePath : path.resolve(validPath, filePath)
|
||||
|
||||
try {
|
||||
const stats = await fs.stat(absolutePath)
|
||||
files.push({
|
||||
path: absolutePath,
|
||||
type: 'file', // ripgrep --files only returns files
|
||||
size: stats.size,
|
||||
modified: stats.mtime
|
||||
})
|
||||
} catch (error) {
|
||||
logger.debug('Failed to stat file from ripgrep output, skipping', { file: absolutePath, error })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by modification time (newest first)
|
||||
files.sort((a, b) => {
|
||||
const aTime = a.modified ? a.modified.getTime() : 0
|
||||
const bTime = b.modified ? b.modified.getTime() : 0
|
||||
return bTime - aTime
|
||||
})
|
||||
|
||||
// Format output - always use absolute paths
|
||||
const output: string[] = []
|
||||
if (files.length === 0) {
|
||||
output.push(`No files found matching pattern "${parsed.data.pattern}" in ${validPath}`)
|
||||
} else {
|
||||
output.push(...files.map((f) => f.path))
|
||||
if (truncated) {
|
||||
output.push('')
|
||||
output.push(`(Results truncated to ${MAX_FILES_LIMIT} files. Consider using a more specific pattern.)`)
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: output.join('\n')
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
266
src/main/mcpServers/filesystem/tools/grep.ts
Normal file
266
src/main/mcpServers/filesystem/tools/grep.ts
Normal file
@ -0,0 +1,266 @@
|
||||
import fs from 'fs/promises'
|
||||
import path from 'path'
|
||||
import * as z from 'zod'
|
||||
|
||||
import type { GrepMatch } from '../types'
|
||||
import { isBinaryFile, MAX_GREP_MATCHES, MAX_LINE_LENGTH, runRipgrep, validatePath } from '../types'
|
||||
|
||||
// Schema definition
|
||||
export const GrepToolSchema = z.object({
|
||||
pattern: z.string().describe('The regex pattern to search for in file contents'),
|
||||
path: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('The directory to search in (must be absolute path). Defaults to the base directory'),
|
||||
include: z.string().optional().describe('File pattern to include in the search (e.g. "*.js", "*.{ts,tsx}")')
|
||||
})
|
||||
|
||||
// Tool definition with detailed description
|
||||
export const grepToolDefinition = {
|
||||
name: 'grep',
|
||||
description: `Fast content search tool that works with any codebase size.
|
||||
|
||||
- Searches file contents using regular expressions
|
||||
- Supports full regex syntax (e.g., "log.*Error", "function\\s+\\w+")
|
||||
- Filter files by pattern with include (e.g., "*.js", "*.{ts,tsx}")
|
||||
- Returns absolute file paths and line numbers with matching content
|
||||
- Results are limited to 100 matches
|
||||
- Binary files are automatically skipped
|
||||
- Common directories (node_modules, .git, dist) are excluded
|
||||
- The path parameter must be an absolute path if specified
|
||||
- If path is not specified, defaults to the base directory`,
|
||||
inputSchema: z.toJSONSchema(GrepToolSchema)
|
||||
}
|
||||
|
||||
// Handler implementation
|
||||
export async function handleGrepTool(args: unknown, baseDir: string) {
|
||||
const parsed = GrepToolSchema.safeParse(args)
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for grep: ${parsed.error}`)
|
||||
}
|
||||
|
||||
const data = parsed.data
|
||||
|
||||
if (!data.pattern) {
|
||||
throw new Error('Pattern is required for grep')
|
||||
}
|
||||
|
||||
const searchPath = data.path || baseDir
|
||||
const validPath = await validatePath(searchPath, baseDir)
|
||||
|
||||
const matches: GrepMatch[] = []
|
||||
let truncated = false
|
||||
let regex: RegExp
|
||||
|
||||
// Build ripgrep arguments
|
||||
const rgArgs: string[] = [
|
||||
'--no-heading',
|
||||
'--line-number',
|
||||
'--color',
|
||||
'never',
|
||||
'--ignore-case',
|
||||
'--glob',
|
||||
'!.git/**',
|
||||
'--glob',
|
||||
'!node_modules/**',
|
||||
'--glob',
|
||||
'!dist/**',
|
||||
'--glob',
|
||||
'!build/**',
|
||||
'--glob',
|
||||
'!__pycache__/**'
|
||||
]
|
||||
|
||||
if (data.include) {
|
||||
for (const pat of data.include
|
||||
.split(',')
|
||||
.map((p) => p.trim())
|
||||
.filter(Boolean)) {
|
||||
rgArgs.push('--glob', pat)
|
||||
}
|
||||
}
|
||||
|
||||
rgArgs.push(data.pattern)
|
||||
rgArgs.push(validPath)
|
||||
|
||||
try {
|
||||
regex = new RegExp(data.pattern, 'gi')
|
||||
} catch (error) {
|
||||
throw new Error(`Invalid regex pattern: ${data.pattern}`)
|
||||
}
|
||||
|
||||
async function searchFile(filePath: string): Promise<void> {
|
||||
if (matches.length >= MAX_GREP_MATCHES) {
|
||||
truncated = true
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
// Skip binary files
|
||||
if (await isBinaryFile(filePath)) {
|
||||
return
|
||||
}
|
||||
|
||||
const content = await fs.readFile(filePath, 'utf-8')
|
||||
const lines = content.split('\n')
|
||||
|
||||
lines.forEach((line, index) => {
|
||||
if (matches.length >= MAX_GREP_MATCHES) {
|
||||
truncated = true
|
||||
return
|
||||
}
|
||||
|
||||
if (regex.test(line)) {
|
||||
// Truncate long lines
|
||||
const truncatedLine = line.length > MAX_LINE_LENGTH ? line.substring(0, MAX_LINE_LENGTH) + '...' : line
|
||||
|
||||
matches.push({
|
||||
file: filePath,
|
||||
line: index + 1,
|
||||
content: truncatedLine.trim()
|
||||
})
|
||||
}
|
||||
})
|
||||
} catch (error) {
|
||||
// Skip files we can't read
|
||||
}
|
||||
}
|
||||
|
||||
async function searchDirectory(dir: string): Promise<void> {
|
||||
if (matches.length >= MAX_GREP_MATCHES) {
|
||||
truncated = true
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
const entries = await fs.readdir(dir, { withFileTypes: true })
|
||||
|
||||
for (const entry of entries) {
|
||||
if (matches.length >= MAX_GREP_MATCHES) {
|
||||
truncated = true
|
||||
break
|
||||
}
|
||||
|
||||
const fullPath = path.join(dir, entry.name)
|
||||
|
||||
// Skip common ignore patterns
|
||||
if (entry.name.startsWith('.') && entry.name !== '.env.example') {
|
||||
continue
|
||||
}
|
||||
if (['node_modules', 'dist', 'build', '__pycache__', '.git'].includes(entry.name)) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (entry.isFile()) {
|
||||
// Check if file matches include pattern
|
||||
if (data.include) {
|
||||
const includePatterns = data.include.split(',').map((p) => p.trim())
|
||||
const fileName = path.basename(fullPath)
|
||||
const matchesInclude = includePatterns.some((pattern) => {
|
||||
// Simple glob pattern matching
|
||||
const regexPattern = pattern
|
||||
.replace(/\*/g, '.*')
|
||||
.replace(/\?/g, '.')
|
||||
.replace(/\{([^}]+)\}/g, (_, group) => `(${group.split(',').join('|')})`)
|
||||
return new RegExp(`^${regexPattern}$`).test(fileName)
|
||||
})
|
||||
if (!matchesInclude) {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
await searchFile(fullPath)
|
||||
} else if (entry.isDirectory()) {
|
||||
await searchDirectory(fullPath)
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// Skip directories we can't read
|
||||
}
|
||||
}
|
||||
|
||||
// Perform the search
|
||||
let usedRipgrep = false
|
||||
try {
|
||||
const rgResult = await runRipgrep(rgArgs)
|
||||
if (rgResult.ok && rgResult.exitCode !== null && rgResult.exitCode !== 2) {
|
||||
usedRipgrep = true
|
||||
const lines = rgResult.stdout.split('\n').filter(Boolean)
|
||||
for (const line of lines) {
|
||||
if (matches.length >= MAX_GREP_MATCHES) {
|
||||
truncated = true
|
||||
break
|
||||
}
|
||||
|
||||
const firstColon = line.indexOf(':')
|
||||
const secondColon = line.indexOf(':', firstColon + 1)
|
||||
if (firstColon === -1 || secondColon === -1) continue
|
||||
|
||||
const filePart = line.slice(0, firstColon)
|
||||
const linePart = line.slice(firstColon + 1, secondColon)
|
||||
const contentPart = line.slice(secondColon + 1)
|
||||
const lineNum = Number.parseInt(linePart, 10)
|
||||
if (!Number.isFinite(lineNum)) continue
|
||||
|
||||
const absoluteFilePath = path.isAbsolute(filePart) ? filePart : path.resolve(baseDir, filePart)
|
||||
const truncatedLine =
|
||||
contentPart.length > MAX_LINE_LENGTH ? contentPart.substring(0, MAX_LINE_LENGTH) + '...' : contentPart
|
||||
|
||||
matches.push({
|
||||
file: absoluteFilePath,
|
||||
line: lineNum,
|
||||
content: truncatedLine.trim()
|
||||
})
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
usedRipgrep = false
|
||||
}
|
||||
|
||||
if (!usedRipgrep) {
|
||||
const stats = await fs.stat(validPath)
|
||||
if (stats.isFile()) {
|
||||
await searchFile(validPath)
|
||||
} else {
|
||||
await searchDirectory(validPath)
|
||||
}
|
||||
}
|
||||
|
||||
// Format output
|
||||
const output: string[] = []
|
||||
|
||||
if (matches.length === 0) {
|
||||
output.push('No matches found')
|
||||
} else {
|
||||
// Group matches by file
|
||||
const fileGroups = new Map<string, GrepMatch[]>()
|
||||
matches.forEach((match) => {
|
||||
if (!fileGroups.has(match.file)) {
|
||||
fileGroups.set(match.file, [])
|
||||
}
|
||||
fileGroups.get(match.file)!.push(match)
|
||||
})
|
||||
|
||||
// Format grouped matches - always use absolute paths
|
||||
fileGroups.forEach((fileMatches, filePath) => {
|
||||
output.push(`\n${filePath}:`)
|
||||
fileMatches.forEach((match) => {
|
||||
output.push(` ${match.line}: ${match.content}`)
|
||||
})
|
||||
})
|
||||
|
||||
if (truncated) {
|
||||
output.push('')
|
||||
output.push(`(Results truncated to ${MAX_GREP_MATCHES} matches. Consider using a more specific pattern or path.)`)
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: output.join('\n')
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
8
src/main/mcpServers/filesystem/tools/index.ts
Normal file
8
src/main/mcpServers/filesystem/tools/index.ts
Normal file
@ -0,0 +1,8 @@
|
||||
// Export all tool definitions and handlers
|
||||
export { deleteToolDefinition, handleDeleteTool } from './delete'
|
||||
export { editToolDefinition, handleEditTool } from './edit'
|
||||
export { globToolDefinition, handleGlobTool } from './glob'
|
||||
export { grepToolDefinition, handleGrepTool } from './grep'
|
||||
export { handleLsTool, lsToolDefinition } from './ls'
|
||||
export { handleReadTool, readToolDefinition } from './read'
|
||||
export { handleWriteTool, writeToolDefinition } from './write'
|
||||
150
src/main/mcpServers/filesystem/tools/ls.ts
Normal file
150
src/main/mcpServers/filesystem/tools/ls.ts
Normal file
@ -0,0 +1,150 @@
|
||||
import fs from 'fs/promises'
|
||||
import path from 'path'
|
||||
import * as z from 'zod'
|
||||
|
||||
import { MAX_FILES_LIMIT, validatePath } from '../types'
|
||||
|
||||
// Schema definition
|
||||
export const LsToolSchema = z.object({
|
||||
path: z.string().optional().describe('The directory to list (must be absolute path). Defaults to the base directory'),
|
||||
recursive: z.boolean().optional().describe('Whether to list directories recursively (default: false)')
|
||||
})
|
||||
|
||||
// Tool definition with detailed description
|
||||
export const lsToolDefinition = {
|
||||
name: 'ls',
|
||||
description: `Lists files and directories in a specified path.
|
||||
|
||||
- Returns a tree-like structure with icons (📁 directories, 📄 files)
|
||||
- Shows the absolute directory path in the header
|
||||
- Entries are sorted alphabetically with directories first
|
||||
- Can list recursively with recursive=true (up to 5 levels deep)
|
||||
- Common directories (node_modules, dist, .git) are excluded
|
||||
- Hidden files (starting with .) are excluded except .env.example
|
||||
- Results are limited to 100 entries
|
||||
- The path parameter must be an absolute path if specified
|
||||
- If path is not specified, defaults to the base directory`,
|
||||
inputSchema: z.toJSONSchema(LsToolSchema)
|
||||
}
|
||||
|
||||
// Handler implementation
|
||||
export async function handleLsTool(args: unknown, baseDir: string) {
|
||||
const parsed = LsToolSchema.safeParse(args)
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for ls: ${parsed.error}`)
|
||||
}
|
||||
|
||||
const targetPath = parsed.data.path || baseDir
|
||||
const validPath = await validatePath(targetPath, baseDir)
|
||||
const recursive = parsed.data.recursive || false
|
||||
|
||||
interface TreeNode {
|
||||
name: string
|
||||
type: 'file' | 'directory'
|
||||
children?: TreeNode[]
|
||||
}
|
||||
|
||||
let fileCount = 0
|
||||
let truncated = false
|
||||
|
||||
async function buildTree(dirPath: string, depth: number = 0): Promise<TreeNode[]> {
|
||||
if (fileCount >= MAX_FILES_LIMIT) {
|
||||
truncated = true
|
||||
return []
|
||||
}
|
||||
|
||||
try {
|
||||
const entries = await fs.readdir(dirPath, { withFileTypes: true })
|
||||
const nodes: TreeNode[] = []
|
||||
|
||||
// Sort entries: directories first, then files, alphabetically
|
||||
entries.sort((a, b) => {
|
||||
if (a.isDirectory() && !b.isDirectory()) return -1
|
||||
if (!a.isDirectory() && b.isDirectory()) return 1
|
||||
return a.name.localeCompare(b.name)
|
||||
})
|
||||
|
||||
for (const entry of entries) {
|
||||
if (fileCount >= MAX_FILES_LIMIT) {
|
||||
truncated = true
|
||||
break
|
||||
}
|
||||
|
||||
// Skip hidden files and common ignore patterns
|
||||
if (entry.name.startsWith('.') && entry.name !== '.env.example') {
|
||||
continue
|
||||
}
|
||||
if (['node_modules', 'dist', 'build', '__pycache__'].includes(entry.name)) {
|
||||
continue
|
||||
}
|
||||
|
||||
fileCount++
|
||||
const node: TreeNode = {
|
||||
name: entry.name,
|
||||
type: entry.isDirectory() ? 'directory' : 'file'
|
||||
}
|
||||
|
||||
if (entry.isDirectory() && recursive && depth < 5) {
|
||||
// Limit depth to prevent infinite recursion
|
||||
const childPath = path.join(dirPath, entry.name)
|
||||
node.children = await buildTree(childPath, depth + 1)
|
||||
}
|
||||
|
||||
nodes.push(node)
|
||||
}
|
||||
|
||||
return nodes
|
||||
} catch (error) {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
// Build the tree
|
||||
const tree = await buildTree(validPath)
|
||||
|
||||
// Format as text output
|
||||
function formatTree(nodes: TreeNode[], prefix: string = ''): string[] {
|
||||
const lines: string[] = []
|
||||
|
||||
nodes.forEach((node, index) => {
|
||||
const isLastNode = index === nodes.length - 1
|
||||
const connector = isLastNode ? '└── ' : '├── '
|
||||
const icon = node.type === 'directory' ? '📁 ' : '📄 '
|
||||
|
||||
lines.push(prefix + connector + icon + node.name)
|
||||
|
||||
if (node.children && node.children.length > 0) {
|
||||
const childPrefix = prefix + (isLastNode ? ' ' : '│ ')
|
||||
lines.push(...formatTree(node.children, childPrefix))
|
||||
}
|
||||
})
|
||||
|
||||
return lines
|
||||
}
|
||||
|
||||
// Generate output
|
||||
const output: string[] = []
|
||||
output.push(`Directory: ${validPath}`)
|
||||
output.push('')
|
||||
|
||||
if (tree.length === 0) {
|
||||
output.push('(empty directory)')
|
||||
} else {
|
||||
const treeLines = formatTree(tree, '')
|
||||
output.push(...treeLines)
|
||||
|
||||
if (truncated) {
|
||||
output.push('')
|
||||
output.push(`(Results truncated to ${MAX_FILES_LIMIT} files. Consider listing a more specific directory.)`)
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: output.join('\n')
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
101
src/main/mcpServers/filesystem/tools/read.ts
Normal file
101
src/main/mcpServers/filesystem/tools/read.ts
Normal file
@ -0,0 +1,101 @@
|
||||
import fs from 'fs/promises'
|
||||
import path from 'path'
|
||||
import * as z from 'zod'
|
||||
|
||||
import { DEFAULT_READ_LIMIT, isBinaryFile, MAX_LINE_LENGTH, validatePath } from '../types'
|
||||
|
||||
// Schema definition
|
||||
export const ReadToolSchema = z.object({
|
||||
file_path: z.string().describe('The path to the file to read'),
|
||||
offset: z.number().optional().describe('The line number to start reading from (1-based)'),
|
||||
limit: z.number().optional().describe('The number of lines to read (defaults to 2000)')
|
||||
})
|
||||
|
||||
// Tool definition with detailed description
|
||||
export const readToolDefinition = {
|
||||
name: 'read',
|
||||
description: `Reads a file from the local filesystem.
|
||||
|
||||
- Assumes this tool can read all files on the machine
|
||||
- The file_path parameter must be an absolute path, not a relative path
|
||||
- By default, reads up to 2000 lines starting from the beginning
|
||||
- You can optionally specify a line offset and limit for long files
|
||||
- Any lines longer than 2000 characters will be truncated
|
||||
- Results are returned with line numbers starting at 1
|
||||
- Binary files are detected and rejected with an error
|
||||
- Empty files return a warning`,
|
||||
inputSchema: z.toJSONSchema(ReadToolSchema)
|
||||
}
|
||||
|
||||
// Handler implementation
|
||||
export async function handleReadTool(args: unknown, baseDir: string) {
|
||||
const parsed = ReadToolSchema.safeParse(args)
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for read: ${parsed.error}`)
|
||||
}
|
||||
|
||||
const filePath = parsed.data.file_path
|
||||
const validPath = await validatePath(filePath, baseDir)
|
||||
|
||||
// Check if file exists
|
||||
try {
|
||||
const stats = await fs.stat(validPath)
|
||||
if (!stats.isFile()) {
|
||||
throw new Error(`Path is not a file: ${filePath}`)
|
||||
}
|
||||
} catch (error: any) {
|
||||
if (error.code === 'ENOENT') {
|
||||
throw new Error(`File not found: ${filePath}`)
|
||||
}
|
||||
throw error
|
||||
}
|
||||
|
||||
// Check if file is binary
|
||||
if (await isBinaryFile(validPath)) {
|
||||
throw new Error(`Cannot read binary file: ${filePath}`)
|
||||
}
|
||||
|
||||
// Read file content
|
||||
const content = await fs.readFile(validPath, 'utf-8')
|
||||
const lines = content.split('\n')
|
||||
|
||||
// Apply offset and limit
|
||||
const offset = (parsed.data.offset || 1) - 1 // Convert to 0-based
|
||||
const limit = parsed.data.limit || DEFAULT_READ_LIMIT
|
||||
|
||||
if (offset < 0 || offset >= lines.length) {
|
||||
throw new Error(`Invalid offset: ${offset + 1}. File has ${lines.length} lines.`)
|
||||
}
|
||||
|
||||
const selectedLines = lines.slice(offset, offset + limit)
|
||||
|
||||
// Format output with line numbers and truncate long lines
|
||||
const output: string[] = []
|
||||
const relativePath = path.relative(baseDir, validPath)
|
||||
|
||||
output.push(`File: ${relativePath}`)
|
||||
if (offset > 0 || limit < lines.length) {
|
||||
output.push(`Lines ${offset + 1} to ${Math.min(offset + limit, lines.length)} of ${lines.length}`)
|
||||
}
|
||||
output.push('')
|
||||
|
||||
selectedLines.forEach((line, index) => {
|
||||
const lineNumber = offset + index + 1
|
||||
const truncatedLine = line.length > MAX_LINE_LENGTH ? line.substring(0, MAX_LINE_LENGTH) + '...' : line
|
||||
output.push(`${lineNumber.toString().padStart(6)}\t${truncatedLine}`)
|
||||
})
|
||||
|
||||
if (offset + limit < lines.length) {
|
||||
output.push('')
|
||||
output.push(`(${lines.length - (offset + limit)} more lines not shown)`)
|
||||
}
|
||||
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: output.join('\n')
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
83
src/main/mcpServers/filesystem/tools/write.ts
Normal file
83
src/main/mcpServers/filesystem/tools/write.ts
Normal file
@ -0,0 +1,83 @@
|
||||
import fs from 'fs/promises'
|
||||
import path from 'path'
|
||||
import * as z from 'zod'
|
||||
|
||||
import { logger, validatePath } from '../types'
|
||||
|
||||
// Schema definition
|
||||
export const WriteToolSchema = z.object({
|
||||
file_path: z.string().describe('The path to the file to write'),
|
||||
content: z.string().describe('The content to write to the file')
|
||||
})
|
||||
|
||||
// Tool definition with detailed description
|
||||
export const writeToolDefinition = {
|
||||
name: 'write',
|
||||
description: `Writes a file to the local filesystem.
|
||||
|
||||
- This tool will overwrite the existing file if one exists at the path
|
||||
- You MUST use the read tool first to understand what you're overwriting
|
||||
- ALWAYS prefer using the 'edit' tool for existing files
|
||||
- NEVER proactively create documentation files unless explicitly requested
|
||||
- Parent directories will be created automatically if they don't exist
|
||||
- The file_path must be an absolute path, not a relative path`,
|
||||
inputSchema: z.toJSONSchema(WriteToolSchema)
|
||||
}
|
||||
|
||||
// Handler implementation
|
||||
export async function handleWriteTool(args: unknown, baseDir: string) {
|
||||
const parsed = WriteToolSchema.safeParse(args)
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for write: ${parsed.error}`)
|
||||
}
|
||||
|
||||
const filePath = parsed.data.file_path
|
||||
const validPath = await validatePath(filePath, baseDir)
|
||||
|
||||
// Create parent directory if it doesn't exist
|
||||
const parentDir = path.dirname(validPath)
|
||||
try {
|
||||
await fs.mkdir(parentDir, { recursive: true })
|
||||
} catch (error: any) {
|
||||
if (error.code !== 'EEXIST') {
|
||||
throw new Error(`Failed to create parent directory: ${error.message}`)
|
||||
}
|
||||
}
|
||||
|
||||
// Check if file exists (for logging)
|
||||
let isOverwrite = false
|
||||
try {
|
||||
await fs.stat(validPath)
|
||||
isOverwrite = true
|
||||
} catch {
|
||||
// File doesn't exist, that's fine
|
||||
}
|
||||
|
||||
// Write the file
|
||||
try {
|
||||
await fs.writeFile(validPath, parsed.data.content, 'utf-8')
|
||||
} catch (error: any) {
|
||||
throw new Error(`Failed to write file: ${error.message}`)
|
||||
}
|
||||
|
||||
// Log the operation
|
||||
logger.info('File written', {
|
||||
path: validPath,
|
||||
overwrite: isOverwrite,
|
||||
size: parsed.data.content.length
|
||||
})
|
||||
|
||||
// Format output
|
||||
const relativePath = path.relative(baseDir, validPath)
|
||||
const action = isOverwrite ? 'Updated' : 'Created'
|
||||
const lines = parsed.data.content.split('\n').length
|
||||
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `${action} file: ${relativePath}\n` + `Size: ${parsed.data.content.length} bytes\n` + `Lines: ${lines}`
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
627
src/main/mcpServers/filesystem/types.ts
Normal file
627
src/main/mcpServers/filesystem/types.ts
Normal file
@ -0,0 +1,627 @@
|
||||
import { loggerService } from '@logger'
|
||||
import { isMac, isWin } from '@main/constant'
|
||||
import { spawn } from 'child_process'
|
||||
import fs from 'fs/promises'
|
||||
import os from 'os'
|
||||
import path from 'path'
|
||||
|
||||
export const logger = loggerService.withContext('MCP:FileSystemServer')
|
||||
|
||||
// Constants
|
||||
export const MAX_LINE_LENGTH = 2000
|
||||
export const DEFAULT_READ_LIMIT = 2000
|
||||
export const MAX_FILES_LIMIT = 100
|
||||
export const MAX_GREP_MATCHES = 100
|
||||
|
||||
// Common types
|
||||
export interface FileInfo {
|
||||
path: string
|
||||
type: 'file' | 'directory'
|
||||
size?: number
|
||||
modified?: Date
|
||||
}
|
||||
|
||||
export interface GrepMatch {
|
||||
file: string
|
||||
line: number
|
||||
content: string
|
||||
}
|
||||
|
||||
// Utility functions for path handling
|
||||
export function normalizePath(p: string): string {
|
||||
return path.normalize(p)
|
||||
}
|
||||
|
||||
export function expandHome(filepath: string): string {
|
||||
if (filepath.startsWith('~/') || filepath === '~') {
|
||||
return path.join(os.homedir(), filepath.slice(1))
|
||||
}
|
||||
return filepath
|
||||
}
|
||||
|
||||
// Security validation
|
||||
export async function validatePath(requestedPath: string, baseDir?: string): Promise<string> {
|
||||
const expandedPath = expandHome(requestedPath)
|
||||
const root = baseDir ?? process.cwd()
|
||||
const absolute = path.isAbsolute(expandedPath) ? path.resolve(expandedPath) : path.resolve(root, expandedPath)
|
||||
|
||||
// Handle symlinks by checking their real path
|
||||
try {
|
||||
const realPath = await fs.realpath(absolute)
|
||||
return normalizePath(realPath)
|
||||
} catch (error) {
|
||||
// For new files that don't exist yet, verify parent directory
|
||||
const parentDir = path.dirname(absolute)
|
||||
try {
|
||||
const realParentPath = await fs.realpath(parentDir)
|
||||
normalizePath(realParentPath)
|
||||
return normalizePath(absolute)
|
||||
} catch {
|
||||
return normalizePath(absolute)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Edit Tool Utilities - Fuzzy matching replacers from opencode
|
||||
// ============================================================================
|
||||
|
||||
export type Replacer = (content: string, find: string) => Generator<string, void, unknown>
|
||||
|
||||
// Similarity thresholds for block anchor fallback matching
|
||||
const SINGLE_CANDIDATE_SIMILARITY_THRESHOLD = 0.0
|
||||
const MULTIPLE_CANDIDATES_SIMILARITY_THRESHOLD = 0.3
|
||||
|
||||
/**
|
||||
* Levenshtein distance algorithm implementation
|
||||
*/
|
||||
function levenshtein(a: string, b: string): number {
|
||||
if (a === '' || b === '') {
|
||||
return Math.max(a.length, b.length)
|
||||
}
|
||||
const matrix = Array.from({ length: a.length + 1 }, (_, i) =>
|
||||
Array.from({ length: b.length + 1 }, (_, j) => (i === 0 ? j : j === 0 ? i : 0))
|
||||
)
|
||||
|
||||
for (let i = 1; i <= a.length; i++) {
|
||||
for (let j = 1; j <= b.length; j++) {
|
||||
const cost = a[i - 1] === b[j - 1] ? 0 : 1
|
||||
matrix[i][j] = Math.min(matrix[i - 1][j] + 1, matrix[i][j - 1] + 1, matrix[i - 1][j - 1] + cost)
|
||||
}
|
||||
}
|
||||
return matrix[a.length][b.length]
|
||||
}
|
||||
|
||||
export const SimpleReplacer: Replacer = function* (_content, find) {
|
||||
yield find
|
||||
}
|
||||
|
||||
export const LineTrimmedReplacer: Replacer = function* (content, find) {
|
||||
const originalLines = content.split('\n')
|
||||
const searchLines = find.split('\n')
|
||||
|
||||
if (searchLines[searchLines.length - 1] === '') {
|
||||
searchLines.pop()
|
||||
}
|
||||
|
||||
for (let i = 0; i <= originalLines.length - searchLines.length; i++) {
|
||||
let matches = true
|
||||
|
||||
for (let j = 0; j < searchLines.length; j++) {
|
||||
const originalTrimmed = originalLines[i + j].trim()
|
||||
const searchTrimmed = searchLines[j].trim()
|
||||
|
||||
if (originalTrimmed !== searchTrimmed) {
|
||||
matches = false
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (matches) {
|
||||
let matchStartIndex = 0
|
||||
for (let k = 0; k < i; k++) {
|
||||
matchStartIndex += originalLines[k].length + 1
|
||||
}
|
||||
|
||||
let matchEndIndex = matchStartIndex
|
||||
for (let k = 0; k < searchLines.length; k++) {
|
||||
matchEndIndex += originalLines[i + k].length
|
||||
if (k < searchLines.length - 1) {
|
||||
matchEndIndex += 1
|
||||
}
|
||||
}
|
||||
|
||||
yield content.substring(matchStartIndex, matchEndIndex)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const BlockAnchorReplacer: Replacer = function* (content, find) {
|
||||
const originalLines = content.split('\n')
|
||||
const searchLines = find.split('\n')
|
||||
|
||||
if (searchLines.length < 3) {
|
||||
return
|
||||
}
|
||||
|
||||
if (searchLines[searchLines.length - 1] === '') {
|
||||
searchLines.pop()
|
||||
}
|
||||
|
||||
const firstLineSearch = searchLines[0].trim()
|
||||
const lastLineSearch = searchLines[searchLines.length - 1].trim()
|
||||
const searchBlockSize = searchLines.length
|
||||
|
||||
const candidates: Array<{ startLine: number; endLine: number }> = []
|
||||
for (let i = 0; i < originalLines.length; i++) {
|
||||
if (originalLines[i].trim() !== firstLineSearch) {
|
||||
continue
|
||||
}
|
||||
|
||||
for (let j = i + 2; j < originalLines.length; j++) {
|
||||
if (originalLines[j].trim() === lastLineSearch) {
|
||||
candidates.push({ startLine: i, endLine: j })
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (candidates.length === 0) {
|
||||
return
|
||||
}
|
||||
|
||||
if (candidates.length === 1) {
|
||||
const { startLine, endLine } = candidates[0]
|
||||
const actualBlockSize = endLine - startLine + 1
|
||||
|
||||
let similarity = 0
|
||||
const linesToCheck = Math.min(searchBlockSize - 2, actualBlockSize - 2)
|
||||
|
||||
if (linesToCheck > 0) {
|
||||
for (let j = 1; j < searchBlockSize - 1 && j < actualBlockSize - 1; j++) {
|
||||
const originalLine = originalLines[startLine + j].trim()
|
||||
const searchLine = searchLines[j].trim()
|
||||
const maxLen = Math.max(originalLine.length, searchLine.length)
|
||||
if (maxLen === 0) {
|
||||
continue
|
||||
}
|
||||
const distance = levenshtein(originalLine, searchLine)
|
||||
similarity += (1 - distance / maxLen) / linesToCheck
|
||||
|
||||
if (similarity >= SINGLE_CANDIDATE_SIMILARITY_THRESHOLD) {
|
||||
break
|
||||
}
|
||||
}
|
||||
} else {
|
||||
similarity = 1.0
|
||||
}
|
||||
|
||||
if (similarity >= SINGLE_CANDIDATE_SIMILARITY_THRESHOLD) {
|
||||
let matchStartIndex = 0
|
||||
for (let k = 0; k < startLine; k++) {
|
||||
matchStartIndex += originalLines[k].length + 1
|
||||
}
|
||||
let matchEndIndex = matchStartIndex
|
||||
for (let k = startLine; k <= endLine; k++) {
|
||||
matchEndIndex += originalLines[k].length
|
||||
if (k < endLine) {
|
||||
matchEndIndex += 1
|
||||
}
|
||||
}
|
||||
yield content.substring(matchStartIndex, matchEndIndex)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
let bestMatch: { startLine: number; endLine: number } | null = null
|
||||
let maxSimilarity = -1
|
||||
|
||||
for (const candidate of candidates) {
|
||||
const { startLine, endLine } = candidate
|
||||
const actualBlockSize = endLine - startLine + 1
|
||||
|
||||
let similarity = 0
|
||||
const linesToCheck = Math.min(searchBlockSize - 2, actualBlockSize - 2)
|
||||
|
||||
if (linesToCheck > 0) {
|
||||
for (let j = 1; j < searchBlockSize - 1 && j < actualBlockSize - 1; j++) {
|
||||
const originalLine = originalLines[startLine + j].trim()
|
||||
const searchLine = searchLines[j].trim()
|
||||
const maxLen = Math.max(originalLine.length, searchLine.length)
|
||||
if (maxLen === 0) {
|
||||
continue
|
||||
}
|
||||
const distance = levenshtein(originalLine, searchLine)
|
||||
similarity += 1 - distance / maxLen
|
||||
}
|
||||
similarity /= linesToCheck
|
||||
} else {
|
||||
similarity = 1.0
|
||||
}
|
||||
|
||||
if (similarity > maxSimilarity) {
|
||||
maxSimilarity = similarity
|
||||
bestMatch = candidate
|
||||
}
|
||||
}
|
||||
|
||||
if (maxSimilarity >= MULTIPLE_CANDIDATES_SIMILARITY_THRESHOLD && bestMatch) {
|
||||
const { startLine, endLine } = bestMatch
|
||||
let matchStartIndex = 0
|
||||
for (let k = 0; k < startLine; k++) {
|
||||
matchStartIndex += originalLines[k].length + 1
|
||||
}
|
||||
let matchEndIndex = matchStartIndex
|
||||
for (let k = startLine; k <= endLine; k++) {
|
||||
matchEndIndex += originalLines[k].length
|
||||
if (k < endLine) {
|
||||
matchEndIndex += 1
|
||||
}
|
||||
}
|
||||
yield content.substring(matchStartIndex, matchEndIndex)
|
||||
}
|
||||
}
|
||||
|
||||
export const WhitespaceNormalizedReplacer: Replacer = function* (content, find) {
|
||||
const normalizeWhitespace = (text: string) => text.replace(/\s+/g, ' ').trim()
|
||||
const normalizedFind = normalizeWhitespace(find)
|
||||
|
||||
const lines = content.split('\n')
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i]
|
||||
if (normalizeWhitespace(line) === normalizedFind) {
|
||||
yield line
|
||||
} else {
|
||||
const normalizedLine = normalizeWhitespace(line)
|
||||
if (normalizedLine.includes(normalizedFind)) {
|
||||
const words = find.trim().split(/\s+/)
|
||||
if (words.length > 0) {
|
||||
const pattern = words.map((word) => word.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')).join('\\s+')
|
||||
try {
|
||||
const regex = new RegExp(pattern)
|
||||
const match = line.match(regex)
|
||||
if (match) {
|
||||
yield match[0]
|
||||
}
|
||||
} catch {
|
||||
// Invalid regex pattern, skip
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const findLines = find.split('\n')
|
||||
if (findLines.length > 1) {
|
||||
for (let i = 0; i <= lines.length - findLines.length; i++) {
|
||||
const block = lines.slice(i, i + findLines.length)
|
||||
if (normalizeWhitespace(block.join('\n')) === normalizedFind) {
|
||||
yield block.join('\n')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const IndentationFlexibleReplacer: Replacer = function* (content, find) {
|
||||
const removeIndentation = (text: string) => {
|
||||
const lines = text.split('\n')
|
||||
const nonEmptyLines = lines.filter((line) => line.trim().length > 0)
|
||||
if (nonEmptyLines.length === 0) return text
|
||||
|
||||
const minIndent = Math.min(
|
||||
...nonEmptyLines.map((line) => {
|
||||
const match = line.match(/^(\s*)/)
|
||||
return match ? match[1].length : 0
|
||||
})
|
||||
)
|
||||
|
||||
return lines.map((line) => (line.trim().length === 0 ? line : line.slice(minIndent))).join('\n')
|
||||
}
|
||||
|
||||
const normalizedFind = removeIndentation(find)
|
||||
const contentLines = content.split('\n')
|
||||
const findLines = find.split('\n')
|
||||
|
||||
for (let i = 0; i <= contentLines.length - findLines.length; i++) {
|
||||
const block = contentLines.slice(i, i + findLines.length).join('\n')
|
||||
if (removeIndentation(block) === normalizedFind) {
|
||||
yield block
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const EscapeNormalizedReplacer: Replacer = function* (content, find) {
|
||||
const unescapeString = (str: string): string => {
|
||||
return str.replace(/\\(n|t|r|'|"|`|\\|\n|\$)/g, (match, capturedChar) => {
|
||||
switch (capturedChar) {
|
||||
case 'n':
|
||||
return '\n'
|
||||
case 't':
|
||||
return '\t'
|
||||
case 'r':
|
||||
return '\r'
|
||||
case "'":
|
||||
return "'"
|
||||
case '"':
|
||||
return '"'
|
||||
case '`':
|
||||
return '`'
|
||||
case '\\':
|
||||
return '\\'
|
||||
case '\n':
|
||||
return '\n'
|
||||
case '$':
|
||||
return '$'
|
||||
default:
|
||||
return match
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const unescapedFind = unescapeString(find)
|
||||
|
||||
if (content.includes(unescapedFind)) {
|
||||
yield unescapedFind
|
||||
}
|
||||
|
||||
const lines = content.split('\n')
|
||||
const findLines = unescapedFind.split('\n')
|
||||
|
||||
for (let i = 0; i <= lines.length - findLines.length; i++) {
|
||||
const block = lines.slice(i, i + findLines.length).join('\n')
|
||||
const unescapedBlock = unescapeString(block)
|
||||
|
||||
if (unescapedBlock === unescapedFind) {
|
||||
yield block
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const TrimmedBoundaryReplacer: Replacer = function* (content, find) {
|
||||
const trimmedFind = find.trim()
|
||||
|
||||
if (trimmedFind === find) {
|
||||
return
|
||||
}
|
||||
|
||||
if (content.includes(trimmedFind)) {
|
||||
yield trimmedFind
|
||||
}
|
||||
|
||||
const lines = content.split('\n')
|
||||
const findLines = find.split('\n')
|
||||
|
||||
for (let i = 0; i <= lines.length - findLines.length; i++) {
|
||||
const block = lines.slice(i, i + findLines.length).join('\n')
|
||||
|
||||
if (block.trim() === trimmedFind) {
|
||||
yield block
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const ContextAwareReplacer: Replacer = function* (content, find) {
|
||||
const findLines = find.split('\n')
|
||||
if (findLines.length < 3) {
|
||||
return
|
||||
}
|
||||
|
||||
if (findLines[findLines.length - 1] === '') {
|
||||
findLines.pop()
|
||||
}
|
||||
|
||||
const contentLines = content.split('\n')
|
||||
|
||||
const firstLine = findLines[0].trim()
|
||||
const lastLine = findLines[findLines.length - 1].trim()
|
||||
|
||||
for (let i = 0; i < contentLines.length; i++) {
|
||||
if (contentLines[i].trim() !== firstLine) continue
|
||||
|
||||
for (let j = i + 2; j < contentLines.length; j++) {
|
||||
if (contentLines[j].trim() === lastLine) {
|
||||
const blockLines = contentLines.slice(i, j + 1)
|
||||
const block = blockLines.join('\n')
|
||||
|
||||
if (blockLines.length === findLines.length) {
|
||||
let matchingLines = 0
|
||||
let totalNonEmptyLines = 0
|
||||
|
||||
for (let k = 1; k < blockLines.length - 1; k++) {
|
||||
const blockLine = blockLines[k].trim()
|
||||
const findLine = findLines[k].trim()
|
||||
|
||||
if (blockLine.length > 0 || findLine.length > 0) {
|
||||
totalNonEmptyLines++
|
||||
if (blockLine === findLine) {
|
||||
matchingLines++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (totalNonEmptyLines === 0 || matchingLines / totalNonEmptyLines >= 0.5) {
|
||||
yield block
|
||||
break
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const MultiOccurrenceReplacer: Replacer = function* (content, find) {
|
||||
let startIndex = 0
|
||||
|
||||
while (true) {
|
||||
const index = content.indexOf(find, startIndex)
|
||||
if (index === -1) break
|
||||
|
||||
yield find
|
||||
startIndex = index + find.length
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* All replacers in order of specificity
|
||||
*/
|
||||
export const ALL_REPLACERS: Replacer[] = [
|
||||
SimpleReplacer,
|
||||
LineTrimmedReplacer,
|
||||
BlockAnchorReplacer,
|
||||
WhitespaceNormalizedReplacer,
|
||||
IndentationFlexibleReplacer,
|
||||
EscapeNormalizedReplacer,
|
||||
TrimmedBoundaryReplacer,
|
||||
ContextAwareReplacer,
|
||||
MultiOccurrenceReplacer
|
||||
]
|
||||
|
||||
/**
|
||||
* Replace oldString with newString in content using fuzzy matching
|
||||
*/
|
||||
export function replaceWithFuzzyMatch(
|
||||
content: string,
|
||||
oldString: string,
|
||||
newString: string,
|
||||
replaceAll = false
|
||||
): string {
|
||||
if (oldString === newString) {
|
||||
throw new Error('old_string and new_string must be different')
|
||||
}
|
||||
|
||||
let notFound = true
|
||||
|
||||
for (const replacer of ALL_REPLACERS) {
|
||||
for (const search of replacer(content, oldString)) {
|
||||
const index = content.indexOf(search)
|
||||
if (index === -1) continue
|
||||
notFound = false
|
||||
if (replaceAll) {
|
||||
return content.replaceAll(search, newString)
|
||||
}
|
||||
const lastIndex = content.lastIndexOf(search)
|
||||
if (index !== lastIndex) continue
|
||||
return content.substring(0, index) + newString + content.substring(index + search.length)
|
||||
}
|
||||
}
|
||||
|
||||
if (notFound) {
|
||||
throw new Error('old_string not found in content')
|
||||
}
|
||||
throw new Error(
|
||||
'Found multiple matches for old_string. Provide more surrounding lines in old_string to identify the correct match.'
|
||||
)
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Binary File Detection
|
||||
// ============================================================================
|
||||
|
||||
// Check if a file is likely binary
|
||||
export async function isBinaryFile(filePath: string): Promise<boolean> {
|
||||
try {
|
||||
const buffer = Buffer.alloc(4096)
|
||||
const fd = await fs.open(filePath, 'r')
|
||||
const { bytesRead } = await fd.read(buffer, 0, buffer.length, 0)
|
||||
await fd.close()
|
||||
|
||||
if (bytesRead === 0) return false
|
||||
|
||||
const view = buffer.subarray(0, bytesRead)
|
||||
|
||||
let zeroBytes = 0
|
||||
let evenZeros = 0
|
||||
let oddZeros = 0
|
||||
let nonPrintable = 0
|
||||
|
||||
for (let i = 0; i < view.length; i++) {
|
||||
const b = view[i]
|
||||
|
||||
if (b === 0) {
|
||||
zeroBytes++
|
||||
if (i % 2 === 0) evenZeros++
|
||||
else oddZeros++
|
||||
continue
|
||||
}
|
||||
|
||||
// treat common whitespace as printable
|
||||
if (b === 9 || b === 10 || b === 13) continue
|
||||
|
||||
// basic ASCII printable range
|
||||
if (b >= 32 && b <= 126) continue
|
||||
|
||||
// bytes >= 128 are likely part of UTF-8 sequences; count as printable
|
||||
if (b >= 128) continue
|
||||
|
||||
nonPrintable++
|
||||
}
|
||||
|
||||
// If there are lots of null bytes, it's probably binary unless it looks like UTF-16 text.
|
||||
if (zeroBytes > 0) {
|
||||
const evenSlots = Math.ceil(view.length / 2)
|
||||
const oddSlots = Math.floor(view.length / 2)
|
||||
const evenZeroRatio = evenSlots > 0 ? evenZeros / evenSlots : 0
|
||||
const oddZeroRatio = oddSlots > 0 ? oddZeros / oddSlots : 0
|
||||
|
||||
// UTF-16LE/BE tends to have zeros on every other byte.
|
||||
if (evenZeroRatio > 0.7 || oddZeroRatio > 0.7) return false
|
||||
|
||||
if (zeroBytes / view.length > 0.05) return true
|
||||
}
|
||||
|
||||
// Heuristic: too many non-printable bytes => binary.
|
||||
return nonPrintable / view.length > 0.3
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Ripgrep Utilities
|
||||
// ============================================================================
|
||||
|
||||
export interface RipgrepResult {
|
||||
ok: boolean
|
||||
stdout: string
|
||||
exitCode: number | null
|
||||
}
|
||||
|
||||
export function getRipgrepAddonPath(): string {
|
||||
const pkgJsonPath = require.resolve('@anthropic-ai/claude-agent-sdk/package.json')
|
||||
const pkgRoot = path.dirname(pkgJsonPath)
|
||||
const platform = isMac ? 'darwin' : isWin ? 'win32' : 'linux'
|
||||
const arch = process.arch === 'arm64' ? 'arm64' : 'x64'
|
||||
return path.join(pkgRoot, 'vendor', 'ripgrep', `${arch}-${platform}`, 'ripgrep.node')
|
||||
}
|
||||
|
||||
export async function runRipgrep(args: string[]): Promise<RipgrepResult> {
|
||||
const addonPath = getRipgrepAddonPath()
|
||||
const childScript = `const { ripgrepMain } = require(process.env.RIPGREP_ADDON_PATH); process.exit(ripgrepMain(process.argv.slice(1)));`
|
||||
|
||||
return new Promise((resolve) => {
|
||||
const child = spawn(process.execPath, ['--eval', childScript, 'rg', ...args], {
|
||||
cwd: process.cwd(),
|
||||
env: {
|
||||
...process.env,
|
||||
ELECTRON_RUN_AS_NODE: '1',
|
||||
RIPGREP_ADDON_PATH: addonPath
|
||||
},
|
||||
stdio: ['ignore', 'pipe', 'pipe']
|
||||
})
|
||||
|
||||
let stdout = ''
|
||||
|
||||
child.stdout?.on('data', (chunk) => {
|
||||
stdout += chunk.toString('utf-8')
|
||||
})
|
||||
|
||||
child.on('error', () => {
|
||||
resolve({ ok: false, stdout: '', exitCode: null })
|
||||
})
|
||||
|
||||
child.on('close', (code) => {
|
||||
resolve({ ok: true, stdout, exitCode: code })
|
||||
})
|
||||
})
|
||||
}
|
||||
@ -1,3 +1,19 @@
|
||||
/**
|
||||
* @deprecated Scheduled for removal in v2.0.0
|
||||
* --------------------------------------------------------------------------
|
||||
* ⚠️ NOTICE: V2 DATA&UI REFACTORING (by 0xfullex)
|
||||
* --------------------------------------------------------------------------
|
||||
* STOP: Feature PRs affecting this file are currently BLOCKED.
|
||||
* Only critical bug fixes are accepted during this migration phase.
|
||||
*
|
||||
* This file is being refactored to v2 standards.
|
||||
* Any non-critical changes will conflict with the ongoing work.
|
||||
*
|
||||
* 🔗 Context & Status:
|
||||
* - Contribution Hold: https://github.com/CherryHQ/cherry-studio/issues/10954
|
||||
* - v2 Refactor PR : https://github.com/CherryHQ/cherry-studio/pull/10162
|
||||
* --------------------------------------------------------------------------
|
||||
*/
|
||||
import { loggerService } from '@logger'
|
||||
import { IpcChannel } from '@shared/IpcChannel'
|
||||
import type { WebDavConfig } from '@types'
|
||||
@ -767,6 +783,56 @@ class BackupManager {
|
||||
const s3Client = this.getS3Storage(s3Config)
|
||||
return await s3Client.checkConnection()
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a temporary backup for LAN transfer
|
||||
* Creates a lightweight backup (skipBackupFile=true) in the temp directory
|
||||
* Returns the path to the created ZIP file
|
||||
*/
|
||||
async createLanTransferBackup(_: Electron.IpcMainInvokeEvent, data: string): Promise<string> {
|
||||
const timestamp = new Date()
|
||||
.toISOString()
|
||||
.replace(/[-:T.Z]/g, '')
|
||||
.slice(0, 12)
|
||||
const fileName = `cherry-studio.${timestamp}.zip`
|
||||
const tempPath = path.join(app.getPath('temp'), 'cherry-studio', 'lan-transfer')
|
||||
|
||||
// Ensure temp directory exists
|
||||
await fs.ensureDir(tempPath)
|
||||
|
||||
// Create backup with skipBackupFile=true (no Data folder)
|
||||
const backupedFilePath = await this.backup(_, fileName, data, tempPath, true)
|
||||
|
||||
logger.info(`[BackupManager] Created LAN transfer backup at: ${backupedFilePath}`)
|
||||
return backupedFilePath
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a temporary backup file after LAN transfer completes
|
||||
*/
|
||||
async deleteTempBackup(_: Electron.IpcMainInvokeEvent, filePath: string): Promise<boolean> {
|
||||
try {
|
||||
// Security check: only allow deletion within temp directory
|
||||
const tempBase = path.normalize(path.join(app.getPath('temp'), 'cherry-studio', 'lan-transfer'))
|
||||
const resolvedPath = path.normalize(path.resolve(filePath))
|
||||
|
||||
// Use normalized paths with trailing separator to prevent prefix attacks (e.g., /temp-evil)
|
||||
if (!resolvedPath.startsWith(tempBase + path.sep) && resolvedPath !== tempBase) {
|
||||
logger.warn(`[BackupManager] Attempted to delete file outside temp directory: ${filePath}`)
|
||||
return false
|
||||
}
|
||||
|
||||
if (await fs.pathExists(resolvedPath)) {
|
||||
await fs.remove(resolvedPath)
|
||||
logger.info(`[BackupManager] Deleted temp backup: ${resolvedPath}`)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
} catch (error) {
|
||||
logger.error('[BackupManager] Failed to delete temp backup:', error as Error)
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default BackupManager
|
||||
|
||||
@ -1,3 +1,19 @@
|
||||
/**
|
||||
* @deprecated Scheduled for removal in v2.0.0
|
||||
* --------------------------------------------------------------------------
|
||||
* ⚠️ NOTICE: V2 DATA&UI REFACTORING (by 0xfullex)
|
||||
* --------------------------------------------------------------------------
|
||||
* STOP: Feature PRs affecting this file are currently BLOCKED.
|
||||
* Only critical bug fixes are accepted during this migration phase.
|
||||
*
|
||||
* This file is being refactored to v2 standards.
|
||||
* Any non-critical changes will conflict with the ongoing work.
|
||||
*
|
||||
* 🔗 Context & Status:
|
||||
* - Contribution Hold: https://github.com/CherryHQ/cherry-studio/issues/10954
|
||||
* - v2 Refactor PR : https://github.com/CherryHQ/cherry-studio/pull/10162
|
||||
* --------------------------------------------------------------------------
|
||||
*/
|
||||
interface CacheItem<T> {
|
||||
data: T
|
||||
timestamp: number
|
||||
|
||||
@ -1,3 +1,19 @@
|
||||
/**
|
||||
* @deprecated Scheduled for removal in v2.0.0
|
||||
* --------------------------------------------------------------------------
|
||||
* ⚠️ NOTICE: V2 DATA&UI REFACTORING (by 0xfullex)
|
||||
* --------------------------------------------------------------------------
|
||||
* STOP: Feature PRs affecting this file are currently BLOCKED.
|
||||
* Only critical bug fixes are accepted during this migration phase.
|
||||
*
|
||||
* This file is being refactored to v2 standards.
|
||||
* Any non-critical changes will conflict with the ongoing work.
|
||||
*
|
||||
* 🔗 Context & Status:
|
||||
* - Contribution Hold: https://github.com/CherryHQ/cherry-studio/issues/10954
|
||||
* - v2 Refactor PR : https://github.com/CherryHQ/cherry-studio/pull/10162
|
||||
* --------------------------------------------------------------------------
|
||||
*/
|
||||
import type { UpgradeChannel } from '@shared/config/constant'
|
||||
import { defaultLanguage, ZOOM_SHORTCUTS } from '@shared/config/constant'
|
||||
import type { LanguageVarious, Shortcut } from '@types'
|
||||
@ -32,7 +48,8 @@ export enum ConfigKeys {
|
||||
Proxy = 'proxy',
|
||||
EnableDeveloperMode = 'enableDeveloperMode',
|
||||
ClientId = 'clientId',
|
||||
GitBashPath = 'gitBashPath'
|
||||
GitBashPath = 'gitBashPath',
|
||||
GitBashPathSource = 'gitBashPathSource' // 'manual' | 'auto' | null
|
||||
}
|
||||
|
||||
export class ConfigManager {
|
||||
|
||||
@ -2,7 +2,7 @@ import { loggerService } from '@logger'
|
||||
import {
|
||||
checkName,
|
||||
getFilesDir,
|
||||
getFileType,
|
||||
getFileType as getFileTypeByExt,
|
||||
getName,
|
||||
getNotesDir,
|
||||
getTempDir,
|
||||
@ -10,14 +10,15 @@ import {
|
||||
scanDir
|
||||
} from '@main/utils/file'
|
||||
import { documentExts, imageExts, KB, MB } from '@shared/config/constant'
|
||||
import { parseDataUrl } from '@shared/utils'
|
||||
import type { FileMetadata, NotesTreeNode } from '@types'
|
||||
import { FileTypes } from '@types'
|
||||
import chardet from 'chardet'
|
||||
import type { FSWatcher } from 'chokidar'
|
||||
import chokidar from 'chokidar'
|
||||
import * as crypto from 'crypto'
|
||||
import type { OpenDialogOptions, OpenDialogReturnValue, SaveDialogOptions, SaveDialogReturnValue } from 'electron'
|
||||
import { app } from 'electron'
|
||||
import { dialog, net, shell } from 'electron'
|
||||
import { app, dialog, net, shell } from 'electron'
|
||||
import * as fs from 'fs'
|
||||
import { writeFileSync } from 'fs'
|
||||
import { readFile } from 'fs/promises'
|
||||
@ -130,16 +131,18 @@ interface DirectoryListOptions {
|
||||
includeDirectories?: boolean
|
||||
maxEntries?: number
|
||||
searchPattern?: string
|
||||
fuzzy?: boolean
|
||||
}
|
||||
|
||||
const DEFAULT_DIRECTORY_LIST_OPTIONS: Required<DirectoryListOptions> = {
|
||||
recursive: true,
|
||||
maxDepth: 3,
|
||||
maxDepth: 10,
|
||||
includeHidden: false,
|
||||
includeFiles: true,
|
||||
includeDirectories: true,
|
||||
maxEntries: 10,
|
||||
searchPattern: '.'
|
||||
maxEntries: 20,
|
||||
searchPattern: '.',
|
||||
fuzzy: true
|
||||
}
|
||||
|
||||
class FileStorage {
|
||||
@ -163,7 +166,7 @@ class FileStorage {
|
||||
fs.mkdirSync(this.storageDir, { recursive: true })
|
||||
}
|
||||
if (!fs.existsSync(this.notesDir)) {
|
||||
fs.mkdirSync(this.storageDir, { recursive: true })
|
||||
fs.mkdirSync(this.notesDir, { recursive: true })
|
||||
}
|
||||
if (!fs.existsSync(this.tempDir)) {
|
||||
fs.mkdirSync(this.tempDir, { recursive: true })
|
||||
@ -185,7 +188,7 @@ class FileStorage {
|
||||
})
|
||||
}
|
||||
|
||||
findDuplicateFile = async (filePath: string): Promise<FileMetadata | null> => {
|
||||
private findDuplicateFile = async (filePath: string): Promise<FileMetadata | null> => {
|
||||
const stats = fs.statSync(filePath)
|
||||
logger.debug(`stats: ${stats}, filePath: ${filePath}`)
|
||||
const fileSize = stats.size
|
||||
@ -204,6 +207,8 @@ class FileStorage {
|
||||
if (originalHash === storedHash) {
|
||||
const ext = path.extname(file)
|
||||
const id = path.basename(file, ext)
|
||||
const type = await this.getFileType(filePath)
|
||||
|
||||
return {
|
||||
id,
|
||||
origin_name: file,
|
||||
@ -212,7 +217,7 @@ class FileStorage {
|
||||
created_at: storedStats.birthtime.toISOString(),
|
||||
size: storedStats.size,
|
||||
ext,
|
||||
type: getFileType(ext),
|
||||
type,
|
||||
count: 2
|
||||
}
|
||||
}
|
||||
@ -222,6 +227,13 @@ class FileStorage {
|
||||
return null
|
||||
}
|
||||
|
||||
public getFileType = async (filePath: string): Promise<FileTypes> => {
|
||||
const ext = path.extname(filePath)
|
||||
const fileType = getFileTypeByExt(ext)
|
||||
|
||||
return fileType === FileTypes.OTHER && (await this._isTextFile(filePath)) ? FileTypes.TEXT : fileType
|
||||
}
|
||||
|
||||
public selectFile = async (
|
||||
_: Electron.IpcMainInvokeEvent,
|
||||
options?: OpenDialogOptions
|
||||
@ -241,7 +253,7 @@ class FileStorage {
|
||||
const fileMetadataPromises = result.filePaths.map(async (filePath) => {
|
||||
const stats = fs.statSync(filePath)
|
||||
const ext = path.extname(filePath)
|
||||
const fileType = getFileType(ext)
|
||||
const fileType = await this.getFileType(filePath)
|
||||
|
||||
return {
|
||||
id: uuidv4(),
|
||||
@ -307,7 +319,7 @@ class FileStorage {
|
||||
}
|
||||
|
||||
const stats = await fs.promises.stat(destPath)
|
||||
const fileType = getFileType(ext)
|
||||
const fileType = await this.getFileType(destPath)
|
||||
|
||||
const fileMetadata: FileMetadata = {
|
||||
id: uuid,
|
||||
@ -332,8 +344,7 @@ class FileStorage {
|
||||
}
|
||||
|
||||
const stats = fs.statSync(filePath)
|
||||
const ext = path.extname(filePath)
|
||||
const fileType = getFileType(ext)
|
||||
const fileType = await this.getFileType(filePath)
|
||||
|
||||
return {
|
||||
id: uuidv4(),
|
||||
@ -342,7 +353,7 @@ class FileStorage {
|
||||
path: filePath,
|
||||
created_at: stats.birthtime.toISOString(),
|
||||
size: stats.size,
|
||||
ext: ext,
|
||||
ext: path.extname(filePath),
|
||||
type: fileType,
|
||||
count: 1
|
||||
}
|
||||
@ -662,8 +673,8 @@ class FileStorage {
|
||||
throw new Error('Base64 data is required')
|
||||
}
|
||||
|
||||
// 移除 base64 头部信息(如果存在)
|
||||
const base64String = base64Data.replace(/^data:.*;base64,/, '')
|
||||
const parseResult = parseDataUrl(base64Data)
|
||||
const base64String = parseResult?.data ?? base64Data
|
||||
const buffer = Buffer.from(base64String, 'base64')
|
||||
const uuid = uuidv4()
|
||||
const ext = '.png'
|
||||
@ -690,7 +701,7 @@ class FileStorage {
|
||||
created_at: new Date().toISOString(),
|
||||
size: buffer.length,
|
||||
ext: ext.slice(1),
|
||||
type: getFileType(ext),
|
||||
type: getFileTypeByExt(ext),
|
||||
count: 1
|
||||
}
|
||||
} catch (error) {
|
||||
@ -740,7 +751,7 @@ class FileStorage {
|
||||
created_at: new Date().toISOString(),
|
||||
size: stats.size,
|
||||
ext: ext.slice(1),
|
||||
type: getFileType(ext),
|
||||
type: getFileTypeByExt(ext),
|
||||
count: 1
|
||||
}
|
||||
} catch (error) {
|
||||
@ -1038,10 +1049,226 @@ class FileStorage {
|
||||
}
|
||||
|
||||
/**
|
||||
* Search files by content pattern
|
||||
* Fuzzy match: checks if all characters in query appear in text in order (case-insensitive)
|
||||
* Example: "updater" matches "packages/update/src/node/updateController.ts"
|
||||
*/
|
||||
private async searchByContent(resolvedPath: string, options: Required<DirectoryListOptions>): Promise<string[]> {
|
||||
const args: string[] = ['-l']
|
||||
private isFuzzyMatch(text: string, query: string): boolean {
|
||||
let i = 0 // text index
|
||||
let j = 0 // query index
|
||||
const textLower = text.toLowerCase()
|
||||
const queryLower = query.toLowerCase()
|
||||
|
||||
while (i < textLower.length && j < queryLower.length) {
|
||||
if (textLower[i] === queryLower[j]) {
|
||||
j++
|
||||
}
|
||||
i++
|
||||
}
|
||||
return j === queryLower.length
|
||||
}
|
||||
|
||||
/**
|
||||
* Scoring constants for fuzzy match relevance ranking
|
||||
* Higher values = higher priority in search results
|
||||
*/
|
||||
private static readonly SCORE_SEGMENT_MATCH = 60 // Per path segment that matches query
|
||||
private static readonly SCORE_FILENAME_CONTAINS = 80 // Filename contains exact query substring
|
||||
private static readonly SCORE_FILENAME_STARTS = 100 // Filename starts with query (highest priority)
|
||||
private static readonly SCORE_CONSECUTIVE_CHAR = 15 // Per consecutive character match
|
||||
private static readonly SCORE_WORD_BOUNDARY = 20 // Query matches start of a word
|
||||
private static readonly PATH_LENGTH_PENALTY_FACTOR = 4 // Logarithmic penalty multiplier for longer paths
|
||||
|
||||
/**
|
||||
* Calculate fuzzy match score (higher is better)
|
||||
* Scoring factors:
|
||||
* - Consecutive character matches (bonus)
|
||||
* - Match at word boundaries (bonus)
|
||||
* - Shorter path length (bonus)
|
||||
* - Match in filename vs directory (bonus)
|
||||
*/
|
||||
private getFuzzyMatchScore(filePath: string, query: string): number {
|
||||
const pathLower = filePath.toLowerCase()
|
||||
const queryLower = query.toLowerCase()
|
||||
const fileName = filePath.split('/').pop() || ''
|
||||
const fileNameLower = fileName.toLowerCase()
|
||||
|
||||
let score = 0
|
||||
|
||||
// Count how many times query-related words appear in path segments
|
||||
const pathSegments = pathLower.split(/[/\\]/)
|
||||
let segmentMatchCount = 0
|
||||
for (const segment of pathSegments) {
|
||||
if (this.isFuzzyMatch(segment, queryLower)) {
|
||||
segmentMatchCount++
|
||||
}
|
||||
}
|
||||
score += segmentMatchCount * FileStorage.SCORE_SEGMENT_MATCH
|
||||
|
||||
// Bonus for filename starting with query (stronger than generic "contains")
|
||||
if (fileNameLower.startsWith(queryLower)) {
|
||||
score += FileStorage.SCORE_FILENAME_STARTS
|
||||
} else if (fileNameLower.includes(queryLower)) {
|
||||
// Bonus for exact substring match in filename (e.g., "updater" in "RCUpdater.js")
|
||||
score += FileStorage.SCORE_FILENAME_CONTAINS
|
||||
}
|
||||
|
||||
// Calculate consecutive match bonus
|
||||
let i = 0
|
||||
let j = 0
|
||||
let consecutiveCount = 0
|
||||
let maxConsecutive = 0
|
||||
|
||||
while (i < pathLower.length && j < queryLower.length) {
|
||||
if (pathLower[i] === queryLower[j]) {
|
||||
consecutiveCount++
|
||||
maxConsecutive = Math.max(maxConsecutive, consecutiveCount)
|
||||
j++
|
||||
} else {
|
||||
consecutiveCount = 0
|
||||
}
|
||||
i++
|
||||
}
|
||||
score += maxConsecutive * FileStorage.SCORE_CONSECUTIVE_CHAR
|
||||
|
||||
// Bonus for word boundary matches (e.g., "upd" matches start of "update")
|
||||
// Only count once to avoid inflating scores for paths with repeated patterns
|
||||
const boundaryPrefix = queryLower.slice(0, Math.min(3, queryLower.length))
|
||||
const words = pathLower.split(/[/\\._-]/)
|
||||
for (const word of words) {
|
||||
if (word.startsWith(boundaryPrefix)) {
|
||||
score += FileStorage.SCORE_WORD_BOUNDARY
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Penalty for longer paths (prefer shorter, more specific matches)
|
||||
// Use logarithmic scaling to prevent long paths from dominating the score
|
||||
// A 50-char path gets ~-16 penalty, 100-char gets ~-18, 200-char gets ~-21
|
||||
score -= Math.log(filePath.length + 1) * FileStorage.PATH_LENGTH_PENALTY_FACTOR
|
||||
|
||||
return score
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert query to glob pattern for ripgrep pre-filtering
|
||||
* e.g., "updater" -> "*u*p*d*a*t*e*r*"
|
||||
*/
|
||||
private queryToGlobPattern(query: string): string {
|
||||
// Escape special glob characters (including ! for negation)
|
||||
const escaped = query.replace(/[[\]{}()*+?.,\\^$|#!]/g, '\\$&')
|
||||
// Convert to fuzzy glob: each char separated by *
|
||||
return '*' + escaped.split('').join('*') + '*'
|
||||
}
|
||||
|
||||
/**
|
||||
* Greedy substring match: check if all characters in query can be matched
|
||||
* by finding consecutive substrings in text (not necessarily single chars)
|
||||
* e.g., "updatercontroller" matches "updateController" by:
|
||||
* "update" + "r" (from Controller) + "controller"
|
||||
*/
|
||||
private isGreedySubstringMatch(text: string, query: string): boolean {
|
||||
const textLower = text.toLowerCase()
|
||||
const queryLower = query.toLowerCase()
|
||||
|
||||
let queryIndex = 0
|
||||
let searchStart = 0
|
||||
|
||||
while (queryIndex < queryLower.length) {
|
||||
// Try to find the longest matching substring starting at queryIndex
|
||||
let bestMatchLen = 0
|
||||
let bestMatchPos = -1
|
||||
|
||||
for (let len = queryLower.length - queryIndex; len >= 1; len--) {
|
||||
const substr = queryLower.slice(queryIndex, queryIndex + len)
|
||||
const foundAt = textLower.indexOf(substr, searchStart)
|
||||
if (foundAt !== -1) {
|
||||
bestMatchLen = len
|
||||
bestMatchPos = foundAt
|
||||
break // Found longest possible match
|
||||
}
|
||||
}
|
||||
|
||||
if (bestMatchLen === 0) {
|
||||
// No substring match found, query cannot be matched
|
||||
return false
|
||||
}
|
||||
|
||||
queryIndex += bestMatchLen
|
||||
searchStart = bestMatchPos + bestMatchLen
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate greedy substring match score (higher is better)
|
||||
* Rewards: fewer match fragments, shorter match span, matches in filename
|
||||
*/
|
||||
private getGreedyMatchScore(filePath: string, query: string): number {
|
||||
const textLower = filePath.toLowerCase()
|
||||
const queryLower = query.toLowerCase()
|
||||
const fileName = filePath.split('/').pop() || ''
|
||||
const fileNameLower = fileName.toLowerCase()
|
||||
|
||||
let queryIndex = 0
|
||||
let searchStart = 0
|
||||
let fragmentCount = 0
|
||||
let firstMatchPos = -1
|
||||
let lastMatchEnd = 0
|
||||
|
||||
while (queryIndex < queryLower.length) {
|
||||
let bestMatchLen = 0
|
||||
let bestMatchPos = -1
|
||||
|
||||
for (let len = queryLower.length - queryIndex; len >= 1; len--) {
|
||||
const substr = queryLower.slice(queryIndex, queryIndex + len)
|
||||
const foundAt = textLower.indexOf(substr, searchStart)
|
||||
if (foundAt !== -1) {
|
||||
bestMatchLen = len
|
||||
bestMatchPos = foundAt
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (bestMatchLen === 0) {
|
||||
return -Infinity // No match
|
||||
}
|
||||
|
||||
fragmentCount++
|
||||
if (firstMatchPos === -1) firstMatchPos = bestMatchPos
|
||||
lastMatchEnd = bestMatchPos + bestMatchLen
|
||||
queryIndex += bestMatchLen
|
||||
searchStart = lastMatchEnd
|
||||
}
|
||||
|
||||
const matchSpan = lastMatchEnd - firstMatchPos
|
||||
let score = 0
|
||||
|
||||
// Fewer fragments = better (single continuous match is best)
|
||||
// Max bonus when fragmentCount=1, decreases as fragments increase
|
||||
score += Math.max(0, 100 - (fragmentCount - 1) * 30)
|
||||
|
||||
// Shorter span relative to query length = better (tighter match)
|
||||
// Perfect match: span equals query length
|
||||
const spanRatio = queryLower.length / matchSpan
|
||||
score += spanRatio * 50
|
||||
|
||||
// Bonus for match in filename
|
||||
if (this.isGreedySubstringMatch(fileNameLower, queryLower)) {
|
||||
score += 80
|
||||
}
|
||||
|
||||
// Penalty for longer paths
|
||||
score -= Math.log(filePath.length + 1) * 4
|
||||
|
||||
return score
|
||||
}
|
||||
|
||||
/**
|
||||
* Build common ripgrep arguments for file listing
|
||||
*/
|
||||
private buildRipgrepBaseArgs(options: Required<DirectoryListOptions>, resolvedPath: string): string[] {
|
||||
const args: string[] = ['--files']
|
||||
|
||||
// Handle hidden files
|
||||
if (!options.includeHidden) {
|
||||
@ -1068,82 +1295,74 @@ class FileStorage {
|
||||
args.push('--max-depth', options.maxDepth.toString())
|
||||
}
|
||||
|
||||
// Handle max count
|
||||
if (options.maxEntries > 0) {
|
||||
args.push('--max-count', options.maxEntries.toString())
|
||||
}
|
||||
|
||||
// Add search pattern (search in content)
|
||||
args.push(options.searchPattern)
|
||||
|
||||
// Add the directory path
|
||||
args.push(resolvedPath)
|
||||
|
||||
const { exitCode, output } = await executeRipgrep(args)
|
||||
|
||||
// Exit code 0 means files found, 1 means no files found (still success), 2+ means error
|
||||
if (exitCode >= 2) {
|
||||
throw new Error(`Ripgrep failed with exit code ${exitCode}: ${output}`)
|
||||
}
|
||||
|
||||
// Parse ripgrep output (already sorted by relevance)
|
||||
const results = output
|
||||
.split('\n')
|
||||
.filter((line) => line.trim())
|
||||
.map((line) => line.replace(/\\/g, '/'))
|
||||
.slice(0, options.maxEntries)
|
||||
|
||||
return results
|
||||
return args
|
||||
}
|
||||
|
||||
private async listDirectoryWithRipgrep(
|
||||
resolvedPath: string,
|
||||
options: Required<DirectoryListOptions>
|
||||
): Promise<string[]> {
|
||||
const maxEntries = options.maxEntries
|
||||
// Fuzzy search mode: use ripgrep glob for pre-filtering, then score in JS
|
||||
if (options.fuzzy && options.searchPattern && options.searchPattern !== '.') {
|
||||
const args = this.buildRipgrepBaseArgs(options, resolvedPath)
|
||||
|
||||
// Step 1: Search by filename first
|
||||
// Insert glob pattern before the path (last element)
|
||||
const globPattern = this.queryToGlobPattern(options.searchPattern)
|
||||
args.splice(args.length - 1, 0, '--iglob', globPattern)
|
||||
|
||||
const { exitCode, output } = await executeRipgrep(args)
|
||||
|
||||
if (exitCode >= 2) {
|
||||
throw new Error(`Ripgrep failed with exit code ${exitCode}: ${output}`)
|
||||
}
|
||||
|
||||
const filteredFiles = output
|
||||
.split('\n')
|
||||
.filter((line) => line.trim())
|
||||
.map((line) => line.replace(/\\/g, '/'))
|
||||
|
||||
// If fuzzy glob found results, validate fuzzy match, sort and return
|
||||
if (filteredFiles.length > 0) {
|
||||
return filteredFiles
|
||||
.filter((file) => this.isFuzzyMatch(file, options.searchPattern))
|
||||
.map((file) => ({ file, score: this.getFuzzyMatchScore(file, options.searchPattern) }))
|
||||
.sort((a, b) => b.score - a.score)
|
||||
.slice(0, options.maxEntries)
|
||||
.map((item) => item.file)
|
||||
}
|
||||
|
||||
// Fallback: if no results, try greedy substring match on all files
|
||||
logger.debug('Fuzzy glob returned no results, falling back to greedy substring match')
|
||||
const fallbackArgs = this.buildRipgrepBaseArgs(options, resolvedPath)
|
||||
|
||||
const fallbackResult = await executeRipgrep(fallbackArgs)
|
||||
|
||||
if (fallbackResult.exitCode >= 2) {
|
||||
return []
|
||||
}
|
||||
|
||||
const allFiles = fallbackResult.output
|
||||
.split('\n')
|
||||
.filter((line) => line.trim())
|
||||
.map((line) => line.replace(/\\/g, '/'))
|
||||
|
||||
const greedyMatched = allFiles.filter((file) => this.isGreedySubstringMatch(file, options.searchPattern))
|
||||
|
||||
return greedyMatched
|
||||
.map((file) => ({ file, score: this.getGreedyMatchScore(file, options.searchPattern) }))
|
||||
.sort((a, b) => b.score - a.score)
|
||||
.slice(0, options.maxEntries)
|
||||
.map((item) => item.file)
|
||||
}
|
||||
|
||||
// Fallback: search by filename only (non-fuzzy mode)
|
||||
logger.debug('Searching by filename pattern', { pattern: options.searchPattern, path: resolvedPath })
|
||||
const filenameResults = await this.searchByFilename(resolvedPath, options)
|
||||
|
||||
logger.debug('Found matches by filename', { count: filenameResults.length })
|
||||
|
||||
// If we have enough filename matches, return them
|
||||
if (filenameResults.length >= maxEntries) {
|
||||
return filenameResults.slice(0, maxEntries)
|
||||
}
|
||||
|
||||
// Step 2: If filename matches are less than maxEntries, search by content to fill up
|
||||
logger.debug('Filename matches insufficient, searching by content to fill up', {
|
||||
filenameCount: filenameResults.length,
|
||||
needed: maxEntries - filenameResults.length
|
||||
})
|
||||
|
||||
// Adjust maxEntries for content search to get enough results
|
||||
const contentOptions = {
|
||||
...options,
|
||||
maxEntries: maxEntries - filenameResults.length + 20 // Request extra to account for duplicates
|
||||
}
|
||||
|
||||
const contentResults = await this.searchByContent(resolvedPath, contentOptions)
|
||||
|
||||
logger.debug('Found matches by content', { count: contentResults.length })
|
||||
|
||||
// Combine results: filename matches first, then content matches (deduplicated)
|
||||
const combined = [...filenameResults]
|
||||
const filenameSet = new Set(filenameResults)
|
||||
|
||||
for (const filePath of contentResults) {
|
||||
if (!filenameSet.has(filePath)) {
|
||||
combined.push(filePath)
|
||||
if (combined.length >= maxEntries) {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug('Combined results', { total: combined.length, filenameCount: filenameResults.length })
|
||||
return combined.slice(0, maxEntries)
|
||||
return filenameResults.slice(0, options.maxEntries)
|
||||
}
|
||||
|
||||
public validateNotesDirectory = async (_: Electron.IpcMainInvokeEvent, dirPath: string): Promise<boolean> => {
|
||||
@ -1246,8 +1465,8 @@ class FileStorage {
|
||||
})
|
||||
|
||||
if (filePath) {
|
||||
const base64Data = data.replace(/^data:image\/png;base64,/, '')
|
||||
fs.writeFileSync(filePath, base64Data, 'base64')
|
||||
const parseResult = parseDataUrl(data)
|
||||
fs.writeFileSync(filePath, parseResult?.data ?? data, 'base64')
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('[IPC - Error] An error occurred saving the image:', error as Error)
|
||||
@ -1317,7 +1536,7 @@ class FileStorage {
|
||||
await fs.promises.writeFile(destPath, buffer)
|
||||
|
||||
const stats = await fs.promises.stat(destPath)
|
||||
const fileType = getFileType(ext)
|
||||
const fileType = await this.getFileType(destPath)
|
||||
|
||||
return {
|
||||
id: uuid,
|
||||
@ -1604,6 +1823,10 @@ class FileStorage {
|
||||
}
|
||||
|
||||
public isTextFile = async (_: Electron.IpcMainInvokeEvent, filePath: string): Promise<boolean> => {
|
||||
return this._isTextFile(filePath)
|
||||
}
|
||||
|
||||
private _isTextFile = async (filePath: string): Promise<boolean> => {
|
||||
try {
|
||||
const isBinary = await isBinaryFile(filePath)
|
||||
if (isBinary) {
|
||||
|
||||
207
src/main/services/LocalTransferService.ts
Normal file
207
src/main/services/LocalTransferService.ts
Normal file
@ -0,0 +1,207 @@
|
||||
import { loggerService } from '@logger'
|
||||
import type { LocalTransferPeer, LocalTransferState } from '@shared/config/types'
|
||||
import { IpcChannel } from '@shared/IpcChannel'
|
||||
import type { Browser, Service } from 'bonjour-service'
|
||||
import Bonjour from 'bonjour-service'
|
||||
|
||||
import { windowService } from './WindowService'
|
||||
|
||||
const SERVICE_TYPE = 'cherrystudio'
|
||||
const SERVICE_PROTOCOL = 'tcp' as const
|
||||
|
||||
const logger = loggerService.withContext('LocalTransferService')
|
||||
|
||||
type StartDiscoveryOptions = {
|
||||
resetList?: boolean
|
||||
}
|
||||
|
||||
class LocalTransferService {
|
||||
private static instance: LocalTransferService
|
||||
private bonjour: Bonjour | null = null
|
||||
private browser: Browser | null = null
|
||||
private services = new Map<string, LocalTransferPeer>()
|
||||
private isScanning = false
|
||||
private lastScanStartedAt?: number
|
||||
private lastUpdatedAt = Date.now()
|
||||
private lastError?: string
|
||||
|
||||
private constructor() {}
|
||||
|
||||
public static getInstance(): LocalTransferService {
|
||||
if (!LocalTransferService.instance) {
|
||||
LocalTransferService.instance = new LocalTransferService()
|
||||
}
|
||||
return LocalTransferService.instance
|
||||
}
|
||||
|
||||
public startDiscovery(options?: StartDiscoveryOptions): LocalTransferState {
|
||||
if (options?.resetList) {
|
||||
this.services.clear()
|
||||
}
|
||||
|
||||
this.isScanning = true
|
||||
this.lastScanStartedAt = Date.now()
|
||||
this.lastUpdatedAt = Date.now()
|
||||
this.lastError = undefined
|
||||
this.restartBrowser()
|
||||
this.broadcastState()
|
||||
return this.getState()
|
||||
}
|
||||
|
||||
public stopDiscovery(): LocalTransferState {
|
||||
if (this.browser) {
|
||||
try {
|
||||
this.browser.stop()
|
||||
} catch (error) {
|
||||
logger.warn('Failed to stop local transfer browser', error as Error)
|
||||
}
|
||||
}
|
||||
this.isScanning = false
|
||||
this.lastUpdatedAt = Date.now()
|
||||
this.broadcastState()
|
||||
return this.getState()
|
||||
}
|
||||
|
||||
public getState(): LocalTransferState {
|
||||
const services = Array.from(this.services.values()).sort((a, b) => a.name.localeCompare(b.name))
|
||||
return {
|
||||
services,
|
||||
isScanning: this.isScanning,
|
||||
lastScanStartedAt: this.lastScanStartedAt,
|
||||
lastUpdatedAt: this.lastUpdatedAt,
|
||||
lastError: this.lastError
|
||||
}
|
||||
}
|
||||
|
||||
public getPeerById(id: string): LocalTransferPeer | undefined {
|
||||
return this.services.get(id)
|
||||
}
|
||||
|
||||
public dispose(): void {
|
||||
this.stopDiscovery()
|
||||
this.services.clear()
|
||||
this.browser?.removeAllListeners()
|
||||
this.browser = null
|
||||
if (this.bonjour) {
|
||||
try {
|
||||
this.bonjour.destroy()
|
||||
} catch (error) {
|
||||
logger.warn('Failed to destroy Bonjour instance', error as Error)
|
||||
}
|
||||
this.bonjour = null
|
||||
}
|
||||
}
|
||||
|
||||
private getBonjour(): Bonjour {
|
||||
if (!this.bonjour) {
|
||||
this.bonjour = new Bonjour()
|
||||
}
|
||||
return this.bonjour
|
||||
}
|
||||
|
||||
private restartBrowser(): void {
|
||||
// Clean up existing browser
|
||||
if (this.browser) {
|
||||
this.browser.removeAllListeners()
|
||||
try {
|
||||
this.browser.stop()
|
||||
} catch (error) {
|
||||
logger.warn('Error while stopping Bonjour browser', error as Error)
|
||||
}
|
||||
this.browser = null
|
||||
}
|
||||
|
||||
// Destroy and recreate Bonjour instance to prevent socket leaks
|
||||
if (this.bonjour) {
|
||||
try {
|
||||
this.bonjour.destroy()
|
||||
} catch (error) {
|
||||
logger.warn('Error while destroying Bonjour instance', error as Error)
|
||||
}
|
||||
this.bonjour = null
|
||||
}
|
||||
|
||||
const browser = this.getBonjour().find({ type: SERVICE_TYPE, protocol: SERVICE_PROTOCOL })
|
||||
this.browser = browser
|
||||
this.bindBrowserEvents(browser)
|
||||
|
||||
try {
|
||||
browser.start()
|
||||
logger.info('Local transfer discovery started')
|
||||
} catch (error) {
|
||||
const err = error instanceof Error ? error : new Error(String(error))
|
||||
this.lastError = err.message
|
||||
logger.error('Failed to start local transfer discovery', err)
|
||||
}
|
||||
}
|
||||
|
||||
private bindBrowserEvents(browser: Browser) {
|
||||
browser.on('up', (service) => {
|
||||
const peer = this.normalizeService(service)
|
||||
logger.info(`LAN peer detected: ${peer.name} (${peer.addresses.join(', ')})`)
|
||||
this.services.set(peer.id, peer)
|
||||
this.lastUpdatedAt = Date.now()
|
||||
this.broadcastState()
|
||||
})
|
||||
|
||||
browser.on('down', (service) => {
|
||||
const key = this.buildServiceKey(service.fqdn || service.name, service.host, service.port)
|
||||
if (this.services.delete(key)) {
|
||||
logger.info(`LAN peer removed: ${service.name}`)
|
||||
this.lastUpdatedAt = Date.now()
|
||||
this.broadcastState()
|
||||
}
|
||||
})
|
||||
|
||||
browser.on('error', (error) => {
|
||||
const err = error instanceof Error ? error : new Error(String(error))
|
||||
logger.error('Local transfer discovery error', err)
|
||||
this.lastError = err.message
|
||||
this.broadcastState()
|
||||
})
|
||||
}
|
||||
|
||||
private normalizeService(service: Service): LocalTransferPeer {
|
||||
const addressCandidates = [...(service.addresses || []), service.referer?.address].filter(
|
||||
(value): value is string => typeof value === 'string' && value.length > 0
|
||||
)
|
||||
const addresses = Array.from(new Set(addressCandidates))
|
||||
const txtEntries = Object.entries(service.txt || {})
|
||||
const txt =
|
||||
txtEntries.length > 0
|
||||
? Object.fromEntries(
|
||||
txtEntries.map(([key, value]) => [key, value === undefined || value === null ? '' : String(value)])
|
||||
)
|
||||
: undefined
|
||||
|
||||
const peer: LocalTransferPeer = {
|
||||
id: this.buildServiceKey(service.fqdn || service.name, service.host, service.port),
|
||||
name: service.name,
|
||||
host: service.host,
|
||||
fqdn: service.fqdn,
|
||||
port: service.port,
|
||||
type: service.type,
|
||||
protocol: service.protocol,
|
||||
addresses,
|
||||
txt,
|
||||
updatedAt: Date.now()
|
||||
}
|
||||
|
||||
return peer
|
||||
}
|
||||
|
||||
private buildServiceKey(name?: string, host?: string, port?: number): string {
|
||||
const raw = [name, host, port?.toString()].filter(Boolean).join('-')
|
||||
return raw || `service-${Date.now()}`
|
||||
}
|
||||
|
||||
private broadcastState() {
|
||||
const mainWindow = windowService.getMainWindow()
|
||||
if (!mainWindow || mainWindow.isDestroyed()) {
|
||||
return
|
||||
}
|
||||
mainWindow.webContents.send(IpcChannel.LocalTransfer_ServicesUpdated, this.getState())
|
||||
}
|
||||
}
|
||||
|
||||
export const localTransferService = LocalTransferService.getInstance()
|
||||
@ -6,7 +6,7 @@ import { loggerService } from '@logger'
|
||||
import { createInMemoryMCPServer } from '@main/mcpServers/factory'
|
||||
import { makeSureDirExists, removeEnvProxy } from '@main/utils'
|
||||
import { buildFunctionCallToolName } from '@main/utils/mcp'
|
||||
import { getBinaryName, getBinaryPath } from '@main/utils/process'
|
||||
import { findCommandInShellEnv, getBinaryName, getBinaryPath, isBinaryExists } from '@main/utils/process'
|
||||
import getLoginShellEnvironment from '@main/utils/shell-env'
|
||||
import { TraceMethod, withSpanFunc } from '@mcp-trace/trace-core'
|
||||
import { Client } from '@modelcontextprotocol/sdk/client/index.js'
|
||||
@ -249,6 +249,26 @@ class McpService {
|
||||
StdioClientTransport | SSEClientTransport | InMemoryTransport | StreamableHTTPClientTransport
|
||||
> => {
|
||||
// Create appropriate transport based on configuration
|
||||
|
||||
// Special case for nowledgeMem - uses HTTP transport instead of in-memory
|
||||
if (isBuiltinMCPServer(server) && server.name === BuiltinMCPServerNames.nowledgeMem) {
|
||||
const nowledgeMemUrl = 'http://127.0.0.1:14242/mcp'
|
||||
const options: StreamableHTTPClientTransportOptions = {
|
||||
fetch: async (url, init) => {
|
||||
return net.fetch(typeof url === 'string' ? url : url.toString(), init)
|
||||
},
|
||||
requestInit: {
|
||||
headers: {
|
||||
...defaultAppHeaders(),
|
||||
APP: 'Cherry Studio'
|
||||
}
|
||||
},
|
||||
authProvider
|
||||
}
|
||||
getServerLogger(server).debug(`Using StreamableHTTPClientTransport for ${server.name}`)
|
||||
return new StreamableHTTPClientTransport(new URL(nowledgeMemUrl), options)
|
||||
}
|
||||
|
||||
if (isBuiltinMCPServer(server) && server.name !== BuiltinMCPServerNames.mcpAutoInstall) {
|
||||
getServerLogger(server).debug(`Using in-memory transport`)
|
||||
const [clientTransport, serverTransport] = InMemoryTransport.createLinkedPair()
|
||||
@ -298,6 +318,10 @@ class McpService {
|
||||
} else if (server.command) {
|
||||
let cmd = server.command
|
||||
|
||||
// Get login shell environment first - needed for command detection and server execution
|
||||
// Note: getLoginShellEnvironment() is memoized, so subsequent calls are fast
|
||||
const loginShellEnv = await getLoginShellEnvironment()
|
||||
|
||||
// For DXT servers, use resolved configuration with platform overrides and variable substitution
|
||||
if (server.dxtPath) {
|
||||
const resolvedConfig = this.dxtService.getResolvedMcpConfig(server.dxtPath)
|
||||
@ -319,18 +343,45 @@ class McpService {
|
||||
}
|
||||
|
||||
if (server.command === 'npx') {
|
||||
cmd = await getBinaryPath('bun')
|
||||
getServerLogger(server).debug(`Using command`, { command: cmd })
|
||||
// First, check if npx is available in user's shell environment
|
||||
const npxPath = await findCommandInShellEnv('npx', loginShellEnv)
|
||||
|
||||
// add -x to args if args exist
|
||||
if (args && args.length > 0) {
|
||||
if (!args.includes('-y')) {
|
||||
args.unshift('-y')
|
||||
}
|
||||
if (!args.includes('x')) {
|
||||
args.unshift('x')
|
||||
if (npxPath) {
|
||||
// Use system npx
|
||||
cmd = npxPath
|
||||
getServerLogger(server).debug(`Using system npx`, { command: cmd })
|
||||
} else {
|
||||
// System npx not found, try bundled bun as fallback
|
||||
getServerLogger(server).debug(`System npx not found, checking for bundled bun`)
|
||||
|
||||
if (await isBinaryExists('bun')) {
|
||||
// Fall back to bundled bun
|
||||
cmd = await getBinaryPath('bun')
|
||||
getServerLogger(server).info(`Using bundled bun as fallback (npx not found in PATH)`, {
|
||||
command: cmd
|
||||
})
|
||||
|
||||
// Transform args for bun x format
|
||||
if (args && args.length > 0) {
|
||||
if (!args.includes('-y')) {
|
||||
args.unshift('-y')
|
||||
}
|
||||
if (!args.includes('x')) {
|
||||
args.unshift('x')
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Neither npx nor bun available
|
||||
throw new Error(
|
||||
'npx not found in PATH and bundled bun is not available. This may indicate an installation issue.\n' +
|
||||
'Please either:\n' +
|
||||
'1. Install Node.js (which includes npx) from https://nodejs.org\n' +
|
||||
'2. Run the MCP dependencies installer from Settings\n' +
|
||||
'3. Restart the application if you recently installed Node.js'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (server.registryUrl) {
|
||||
server.env = {
|
||||
...server.env,
|
||||
@ -345,7 +396,35 @@ class McpService {
|
||||
}
|
||||
}
|
||||
} else if (server.command === 'uvx' || server.command === 'uv') {
|
||||
cmd = await getBinaryPath(server.command)
|
||||
// First, check if uvx/uv is available in user's shell environment
|
||||
const uvPath = await findCommandInShellEnv(server.command, loginShellEnv)
|
||||
|
||||
if (uvPath) {
|
||||
// Use system uvx/uv
|
||||
cmd = uvPath
|
||||
getServerLogger(server).debug(`Using system ${server.command}`, { command: cmd })
|
||||
} else {
|
||||
// System command not found, try bundled version as fallback
|
||||
getServerLogger(server).debug(`System ${server.command} not found, checking for bundled version`)
|
||||
|
||||
if (await isBinaryExists(server.command)) {
|
||||
// Fall back to bundled version
|
||||
cmd = await getBinaryPath(server.command)
|
||||
getServerLogger(server).info(`Using bundled ${server.command} as fallback (not found in PATH)`, {
|
||||
command: cmd
|
||||
})
|
||||
} else {
|
||||
// Neither system nor bundled available
|
||||
throw new Error(
|
||||
`${server.command} not found in PATH and bundled version is not available. This may indicate an installation issue.\n` +
|
||||
'Please either:\n' +
|
||||
'1. Install uv from https://github.com/astral-sh/uv\n' +
|
||||
'2. Run the MCP dependencies installer from Settings\n' +
|
||||
`3. Restart the application if you recently installed ${server.command}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (server.registryUrl) {
|
||||
server.env = {
|
||||
...server.env,
|
||||
@ -356,8 +435,6 @@ class McpService {
|
||||
}
|
||||
|
||||
getServerLogger(server).debug(`Starting server`, { command: cmd, args })
|
||||
// Logger.info(`[MCP] Environment variables for server:`, server.env)
|
||||
const loginShellEnv = await getLoginShellEnvironment()
|
||||
|
||||
// Bun not support proxy https://github.com/oven-sh/bun/issues/16812
|
||||
if (cmd.includes('bun')) {
|
||||
@ -708,7 +785,7 @@ class McpService {
|
||||
...tool,
|
||||
inputSchema: z.parse(MCPToolInputSchema, tool.inputSchema),
|
||||
outputSchema: tool.outputSchema ? z.parse(MCPToolOutputSchema, tool.outputSchema) : undefined,
|
||||
id: buildFunctionCallToolName(server.name, tool.name, server.id),
|
||||
id: buildFunctionCallToolName(server.name, tool.name),
|
||||
serverId: server.id,
|
||||
serverName: server.name,
|
||||
type: 'mcp'
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user