mirror of
https://github.com/CherryHQ/cherry-studio.git
synced 2025-12-19 14:41:24 +08:00
Merge branch 'main' into v2
This commit is contained in:
commit
31484b7cd9
293
.github/workflows/sync-to-gitcode.yml
vendored
Normal file
293
.github/workflows/sync-to-gitcode.yml
vendored
Normal file
@ -0,0 +1,293 @@
|
||||
name: Sync Release to GitCode
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: 'Release tag (e.g. v1.0.0)'
|
||||
required: true
|
||||
clean:
|
||||
description: 'Clean node_modules before build'
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build-and-sync-to-gitcode:
|
||||
runs-on: [self-hosted, windows-signing]
|
||||
steps:
|
||||
- name: Get tag name
|
||||
id: get-tag
|
||||
shell: bash
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
|
||||
echo "tag=${{ github.event.inputs.tag }}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "tag=${{ github.event.release.tag_name }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Check out Git repository
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ steps.get-tag.outputs.tag }}
|
||||
|
||||
- name: Set package.json version
|
||||
shell: bash
|
||||
run: |
|
||||
TAG="${{ steps.get-tag.outputs.tag }}"
|
||||
VERSION="${TAG#v}"
|
||||
npm version "$VERSION" --no-git-tag-version --allow-same-version
|
||||
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 22
|
||||
|
||||
- name: Install corepack
|
||||
shell: bash
|
||||
run: corepack enable && corepack prepare yarn@4.9.1 --activate
|
||||
|
||||
- name: Clean node_modules
|
||||
if: ${{ github.event.inputs.clean == 'true' }}
|
||||
shell: bash
|
||||
run: rm -rf node_modules
|
||||
|
||||
- name: Install Dependencies
|
||||
shell: bash
|
||||
run: yarn install
|
||||
|
||||
- name: Build Windows with code signing
|
||||
shell: bash
|
||||
run: yarn build:win
|
||||
env:
|
||||
WIN_SIGN: true
|
||||
CHERRY_CERT_PATH: ${{ secrets.CHERRY_CERT_PATH }}
|
||||
CHERRY_CERT_KEY: ${{ secrets.CHERRY_CERT_KEY }}
|
||||
CHERRY_CERT_CSP: ${{ secrets.CHERRY_CERT_CSP }}
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
NODE_OPTIONS: --max-old-space-size=8192
|
||||
MAIN_VITE_CHERRYAI_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYAI_CLIENT_SECRET }}
|
||||
MAIN_VITE_MINERU_API_KEY: ${{ secrets.MAIN_VITE_MINERU_API_KEY }}
|
||||
RENDERER_VITE_AIHUBMIX_SECRET: ${{ secrets.RENDERER_VITE_AIHUBMIX_SECRET }}
|
||||
RENDERER_VITE_PPIO_APP_SECRET: ${{ secrets.RENDERER_VITE_PPIO_APP_SECRET }}
|
||||
|
||||
- name: List built Windows artifacts
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Built Windows artifacts:"
|
||||
ls -la dist/*.exe dist/*.blockmap dist/latest*.yml
|
||||
|
||||
- name: Download GitHub release assets
|
||||
shell: bash
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
TAG_NAME: ${{ steps.get-tag.outputs.tag }}
|
||||
run: |
|
||||
echo "Downloading release assets for $TAG_NAME..."
|
||||
mkdir -p release-assets
|
||||
cd release-assets
|
||||
|
||||
# Download all assets from the release
|
||||
gh release download "$TAG_NAME" \
|
||||
--repo "${{ github.repository }}" \
|
||||
--pattern "*" \
|
||||
--skip-existing
|
||||
|
||||
echo "Downloaded GitHub release assets:"
|
||||
ls -la
|
||||
|
||||
- name: Replace Windows files with signed versions
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Replacing Windows files with signed versions..."
|
||||
|
||||
# Verify signed files exist first
|
||||
if ! ls dist/*.exe 1>/dev/null 2>&1; then
|
||||
echo "ERROR: No signed .exe files found in dist/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Remove unsigned Windows files from downloaded assets
|
||||
# *.exe, *.exe.blockmap, latest.yml (Windows only)
|
||||
rm -f release-assets/*.exe release-assets/*.exe.blockmap release-assets/latest.yml 2>/dev/null || true
|
||||
|
||||
# Copy signed Windows files with error checking
|
||||
cp dist/*.exe release-assets/ || { echo "ERROR: Failed to copy .exe files"; exit 1; }
|
||||
cp dist/*.exe.blockmap release-assets/ || { echo "ERROR: Failed to copy .blockmap files"; exit 1; }
|
||||
cp dist/latest.yml release-assets/ || { echo "ERROR: Failed to copy latest.yml"; exit 1; }
|
||||
|
||||
echo "Final release assets:"
|
||||
ls -la release-assets/
|
||||
|
||||
- name: Get release info
|
||||
id: release-info
|
||||
shell: bash
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
TAG_NAME: ${{ steps.get-tag.outputs.tag }}
|
||||
LANG: C.UTF-8
|
||||
LC_ALL: C.UTF-8
|
||||
run: |
|
||||
# Always use gh cli to avoid special character issues
|
||||
RELEASE_NAME=$(gh release view "$TAG_NAME" --repo "${{ github.repository }}" --json name -q '.name')
|
||||
# Use delimiter to safely handle special characters in release name
|
||||
{
|
||||
echo 'name<<EOF'
|
||||
echo "$RELEASE_NAME"
|
||||
echo 'EOF'
|
||||
} >> $GITHUB_OUTPUT
|
||||
# Extract releaseNotes from electron-builder.yml (from releaseNotes: | to end of file, remove 4-space indent)
|
||||
sed -n '/releaseNotes: |/,$ { /releaseNotes: |/d; s/^ //; p }' electron-builder.yml > release_body.txt
|
||||
|
||||
- name: Create GitCode release and upload files
|
||||
shell: bash
|
||||
env:
|
||||
GITCODE_TOKEN: ${{ secrets.GITCODE_TOKEN }}
|
||||
GITCODE_OWNER: ${{ vars.GITCODE_OWNER }}
|
||||
GITCODE_REPO: ${{ vars.GITCODE_REPO }}
|
||||
GITCODE_API_URL: ${{ vars.GITCODE_API_URL }}
|
||||
TAG_NAME: ${{ steps.get-tag.outputs.tag }}
|
||||
RELEASE_NAME: ${{ steps.release-info.outputs.name }}
|
||||
LANG: C.UTF-8
|
||||
LC_ALL: C.UTF-8
|
||||
run: |
|
||||
# Validate required environment variables
|
||||
if [ -z "$GITCODE_TOKEN" ]; then
|
||||
echo "ERROR: GITCODE_TOKEN is not set"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "$GITCODE_OWNER" ]; then
|
||||
echo "ERROR: GITCODE_OWNER is not set"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "$GITCODE_REPO" ]; then
|
||||
echo "ERROR: GITCODE_REPO is not set"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
API_URL="${GITCODE_API_URL:-https://api.gitcode.com/api/v5}"
|
||||
|
||||
echo "Creating GitCode release..."
|
||||
echo "Tag: $TAG_NAME"
|
||||
echo "Repo: $GITCODE_OWNER/$GITCODE_REPO"
|
||||
|
||||
# Step 1: Create release
|
||||
# Use --rawfile to read body directly from file, avoiding shell variable encoding issues
|
||||
jq -n \
|
||||
--arg tag "$TAG_NAME" \
|
||||
--arg name "$RELEASE_NAME" \
|
||||
--rawfile body release_body.txt \
|
||||
'{
|
||||
tag_name: $tag,
|
||||
name: $name,
|
||||
body: $body,
|
||||
target_commitish: "main"
|
||||
}' > /tmp/release_payload.json
|
||||
|
||||
RELEASE_RESPONSE=$(curl -s -w "\n%{http_code}" -X POST \
|
||||
--connect-timeout 30 --max-time 60 \
|
||||
"${API_URL}/repos/${GITCODE_OWNER}/${GITCODE_REPO}/releases" \
|
||||
-H "Content-Type: application/json; charset=utf-8" \
|
||||
-H "Authorization: Bearer ${GITCODE_TOKEN}" \
|
||||
--data-binary "@/tmp/release_payload.json")
|
||||
|
||||
HTTP_CODE=$(echo "$RELEASE_RESPONSE" | tail -n1)
|
||||
RESPONSE_BODY=$(echo "$RELEASE_RESPONSE" | sed '$d')
|
||||
|
||||
if [ "$HTTP_CODE" -ge 200 ] && [ "$HTTP_CODE" -lt 300 ]; then
|
||||
echo "Release created successfully"
|
||||
else
|
||||
echo "Warning: Release creation returned HTTP $HTTP_CODE"
|
||||
echo "$RESPONSE_BODY"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Step 2: Upload files to release
|
||||
echo "Uploading files to GitCode release..."
|
||||
|
||||
# Function to upload a single file with retry
|
||||
upload_file() {
|
||||
local file="$1"
|
||||
local filename=$(basename "$file")
|
||||
local max_retries=3
|
||||
local retry=0
|
||||
|
||||
echo "Uploading: $filename"
|
||||
|
||||
# URL encode the filename
|
||||
encoded_filename=$(printf '%s' "$filename" | jq -sRr @uri)
|
||||
|
||||
while [ $retry -lt $max_retries ]; do
|
||||
# Get upload URL
|
||||
UPLOAD_INFO=$(curl -s --connect-timeout 30 --max-time 60 \
|
||||
-H "Authorization: Bearer ${GITCODE_TOKEN}" \
|
||||
"${API_URL}/repos/${GITCODE_OWNER}/${GITCODE_REPO}/releases/${TAG_NAME}/upload_url?file_name=${encoded_filename}")
|
||||
|
||||
UPLOAD_URL=$(echo "$UPLOAD_INFO" | jq -r '.url // empty')
|
||||
|
||||
if [ -n "$UPLOAD_URL" ]; then
|
||||
# Write headers to temp file to avoid shell escaping issues
|
||||
echo "$UPLOAD_INFO" | jq -r '.headers | to_entries[] | "header = \"" + .key + ": " + .value + "\""' > /tmp/upload_headers.txt
|
||||
|
||||
# Upload file using PUT with headers from file
|
||||
UPLOAD_RESPONSE=$(curl -s -w "\n%{http_code}" -X PUT \
|
||||
-K /tmp/upload_headers.txt \
|
||||
--data-binary "@${file}" \
|
||||
"$UPLOAD_URL")
|
||||
|
||||
HTTP_CODE=$(echo "$UPLOAD_RESPONSE" | tail -n1)
|
||||
RESPONSE_BODY=$(echo "$UPLOAD_RESPONSE" | sed '$d')
|
||||
|
||||
if [ "$HTTP_CODE" -ge 200 ] && [ "$HTTP_CODE" -lt 300 ]; then
|
||||
echo " Uploaded: $filename"
|
||||
return 0
|
||||
else
|
||||
echo " Failed (HTTP $HTTP_CODE), retry $((retry + 1))/$max_retries"
|
||||
echo " Response: $RESPONSE_BODY"
|
||||
fi
|
||||
else
|
||||
echo " Failed to get upload URL, retry $((retry + 1))/$max_retries"
|
||||
echo " Response: $UPLOAD_INFO"
|
||||
fi
|
||||
|
||||
retry=$((retry + 1))
|
||||
[ $retry -lt $max_retries ] && sleep 3
|
||||
done
|
||||
|
||||
echo " Failed: $filename after $max_retries retries"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Upload non-yml/json files first
|
||||
for file in release-assets/*; do
|
||||
if [ -f "$file" ]; then
|
||||
filename=$(basename "$file")
|
||||
if [[ ! "$filename" =~ \.(yml|yaml|json)$ ]]; then
|
||||
upload_file "$file"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
# Upload yml/json files last
|
||||
for file in release-assets/*; do
|
||||
if [ -f "$file" ]; then
|
||||
filename=$(basename "$file")
|
||||
if [[ "$filename" =~ \.(yml|yaml|json)$ ]]; then
|
||||
upload_file "$file"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
echo "GitCode release sync completed!"
|
||||
|
||||
- name: Cleanup temp files
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
rm -f /tmp/release_payload.json /tmp/upload_headers.txt release_body.txt
|
||||
rm -rf release-assets/
|
||||
@ -1,8 +1,8 @@
|
||||
diff --git a/dist/index.js b/dist/index.js
|
||||
index bf900591bf2847a3253fe441aad24c06da19c6c1..c1d9bb6fefa2df1383339324073db0a70ea2b5a2 100644
|
||||
index 130094d194ea1e8e7d3027d07d82465741192124..4d13dcee8c962ca9ee8f1c3d748f8ffe6a3cfb47 100644
|
||||
--- a/dist/index.js
|
||||
+++ b/dist/index.js
|
||||
@@ -274,6 +274,7 @@ var openaiChatResponseSchema = (0, import_provider_utils3.lazyValidator)(
|
||||
@@ -290,6 +290,7 @@ var openaiChatResponseSchema = (0, import_provider_utils3.lazyValidator)(
|
||||
message: import_v42.z.object({
|
||||
role: import_v42.z.literal("assistant").nullish(),
|
||||
content: import_v42.z.string().nullish(),
|
||||
@ -10,7 +10,7 @@ index bf900591bf2847a3253fe441aad24c06da19c6c1..c1d9bb6fefa2df1383339324073db0a7
|
||||
tool_calls: import_v42.z.array(
|
||||
import_v42.z.object({
|
||||
id: import_v42.z.string().nullish(),
|
||||
@@ -340,6 +341,7 @@ var openaiChatChunkSchema = (0, import_provider_utils3.lazyValidator)(
|
||||
@@ -356,6 +357,7 @@ var openaiChatChunkSchema = (0, import_provider_utils3.lazyValidator)(
|
||||
delta: import_v42.z.object({
|
||||
role: import_v42.z.enum(["assistant"]).nullish(),
|
||||
content: import_v42.z.string().nullish(),
|
||||
@ -18,7 +18,7 @@ index bf900591bf2847a3253fe441aad24c06da19c6c1..c1d9bb6fefa2df1383339324073db0a7
|
||||
tool_calls: import_v42.z.array(
|
||||
import_v42.z.object({
|
||||
index: import_v42.z.number(),
|
||||
@@ -795,6 +797,13 @@ var OpenAIChatLanguageModel = class {
|
||||
@@ -814,6 +816,13 @@ var OpenAIChatLanguageModel = class {
|
||||
if (text != null && text.length > 0) {
|
||||
content.push({ type: "text", text });
|
||||
}
|
||||
@ -32,7 +32,7 @@ index bf900591bf2847a3253fe441aad24c06da19c6c1..c1d9bb6fefa2df1383339324073db0a7
|
||||
for (const toolCall of (_a = choice.message.tool_calls) != null ? _a : []) {
|
||||
content.push({
|
||||
type: "tool-call",
|
||||
@@ -876,6 +885,7 @@ var OpenAIChatLanguageModel = class {
|
||||
@@ -895,6 +904,7 @@ var OpenAIChatLanguageModel = class {
|
||||
};
|
||||
let metadataExtracted = false;
|
||||
let isActiveText = false;
|
||||
@ -40,7 +40,7 @@ index bf900591bf2847a3253fe441aad24c06da19c6c1..c1d9bb6fefa2df1383339324073db0a7
|
||||
const providerMetadata = { openai: {} };
|
||||
return {
|
||||
stream: response.pipeThrough(
|
||||
@@ -933,6 +943,21 @@ var OpenAIChatLanguageModel = class {
|
||||
@@ -952,6 +962,21 @@ var OpenAIChatLanguageModel = class {
|
||||
return;
|
||||
}
|
||||
const delta = choice.delta;
|
||||
@ -62,7 +62,7 @@ index bf900591bf2847a3253fe441aad24c06da19c6c1..c1d9bb6fefa2df1383339324073db0a7
|
||||
if (delta.content != null) {
|
||||
if (!isActiveText) {
|
||||
controller.enqueue({ type: "text-start", id: "0" });
|
||||
@@ -1045,6 +1070,9 @@ var OpenAIChatLanguageModel = class {
|
||||
@@ -1064,6 +1089,9 @@ var OpenAIChatLanguageModel = class {
|
||||
}
|
||||
},
|
||||
flush(controller) {
|
||||
@ -1,5 +1,5 @@
|
||||
diff --git a/sdk.mjs b/sdk.mjs
|
||||
index bf429a344b7d59f70aead16b639f949b07688a81..f77d50cc5d3fb04292cb3ac7fa7085d02dcc628f 100755
|
||||
index dea7766a3432a1e809f12d6daba4f2834a219689..e0b02ef73da177ba32b903887d7bbbeaa08cc6d3 100755
|
||||
--- a/sdk.mjs
|
||||
+++ b/sdk.mjs
|
||||
@@ -6250,7 +6250,7 @@ function createAbortController(maxListeners = DEFAULT_MAX_LISTENERS) {
|
||||
@ -11,7 +11,7 @@ index bf429a344b7d59f70aead16b639f949b07688a81..f77d50cc5d3fb04292cb3ac7fa7085d0
|
||||
import { createInterface } from "readline";
|
||||
|
||||
// ../src/utils/fsOperations.ts
|
||||
@@ -6619,18 +6619,11 @@ class ProcessTransport {
|
||||
@@ -6644,18 +6644,11 @@ class ProcessTransport {
|
||||
const errorMessage = isNativeBinary(pathToClaudeCodeExecutable) ? `Claude Code native binary not found at ${pathToClaudeCodeExecutable}. Please ensure Claude Code is installed via native installer or specify a valid path with options.pathToClaudeCodeExecutable.` : `Claude Code executable not found at ${pathToClaudeCodeExecutable}. Is options.pathToClaudeCodeExecutable set?`;
|
||||
throw new ReferenceError(errorMessage);
|
||||
}
|
||||
145
.yarn/patches/ollama-ai-provider-v2-npm-1.5.5-8bef249af9.patch
vendored
Normal file
145
.yarn/patches/ollama-ai-provider-v2-npm-1.5.5-8bef249af9.patch
vendored
Normal file
@ -0,0 +1,145 @@
|
||||
diff --git a/dist/index.d.ts b/dist/index.d.ts
|
||||
index 8dd9b498050dbecd8dd6b901acf1aa8ca38a49af..ed644349c9d38fe2a66b2fb44214f7c18eb97f89 100644
|
||||
--- a/dist/index.d.ts
|
||||
+++ b/dist/index.d.ts
|
||||
@@ -4,7 +4,7 @@ import { z } from 'zod/v4';
|
||||
|
||||
type OllamaChatModelId = "athene-v2" | "athene-v2:72b" | "aya-expanse" | "aya-expanse:8b" | "aya-expanse:32b" | "codegemma" | "codegemma:2b" | "codegemma:7b" | "codellama" | "codellama:7b" | "codellama:13b" | "codellama:34b" | "codellama:70b" | "codellama:code" | "codellama:python" | "command-r" | "command-r:35b" | "command-r-plus" | "command-r-plus:104b" | "command-r7b" | "command-r7b:7b" | "deepseek-r1" | "deepseek-r1:1.5b" | "deepseek-r1:7b" | "deepseek-r1:8b" | "deepseek-r1:14b" | "deepseek-r1:32b" | "deepseek-r1:70b" | "deepseek-r1:671b" | "deepseek-coder-v2" | "deepseek-coder-v2:16b" | "deepseek-coder-v2:236b" | "deepseek-v3" | "deepseek-v3:671b" | "devstral" | "devstral:24b" | "dolphin3" | "dolphin3:8b" | "exaone3.5" | "exaone3.5:2.4b" | "exaone3.5:7.8b" | "exaone3.5:32b" | "falcon2" | "falcon2:11b" | "falcon3" | "falcon3:1b" | "falcon3:3b" | "falcon3:7b" | "falcon3:10b" | "firefunction-v2" | "firefunction-v2:70b" | "gemma" | "gemma:2b" | "gemma:7b" | "gemma2" | "gemma2:2b" | "gemma2:9b" | "gemma2:27b" | "gemma3" | "gemma3:1b" | "gemma3:4b" | "gemma3:12b" | "gemma3:27b" | "granite3-dense" | "granite3-dense:2b" | "granite3-dense:8b" | "granite3-guardian" | "granite3-guardian:2b" | "granite3-guardian:8b" | "granite3-moe" | "granite3-moe:1b" | "granite3-moe:3b" | "granite3.1-dense" | "granite3.1-dense:2b" | "granite3.1-dense:8b" | "granite3.1-moe" | "granite3.1-moe:1b" | "granite3.1-moe:3b" | "llama2" | "llama2:7b" | "llama2:13b" | "llama2:70b" | "llama3" | "llama3:8b" | "llama3:70b" | "llama3-chatqa" | "llama3-chatqa:8b" | "llama3-chatqa:70b" | "llama3-gradient" | "llama3-gradient:8b" | "llama3-gradient:70b" | "llama3.1" | "llama3.1:8b" | "llama3.1:70b" | "llama3.1:405b" | "llama3.2" | "llama3.2:1b" | "llama3.2:3b" | "llama3.2-vision" | "llama3.2-vision:11b" | "llama3.2-vision:90b" | "llama3.3" | "llama3.3:70b" | "llama4" | "llama4:16x17b" | "llama4:128x17b" | "llama-guard3" | "llama-guard3:1b" | "llama-guard3:8b" | "llava" | "llava:7b" | "llava:13b" | "llava:34b" | "llava-llama3" | "llava-llama3:8b" | "llava-phi3" | "llava-phi3:3.8b" | "marco-o1" | "marco-o1:7b" | "mistral" | "mistral:7b" | "mistral-large" | "mistral-large:123b" | "mistral-nemo" | "mistral-nemo:12b" | "mistral-small" | "mistral-small:22b" | "mixtral" | "mixtral:8x7b" | "mixtral:8x22b" | "moondream" | "moondream:1.8b" | "openhermes" | "openhermes:v2.5" | "nemotron" | "nemotron:70b" | "nemotron-mini" | "nemotron-mini:4b" | "olmo" | "olmo:7b" | "olmo:13b" | "opencoder" | "opencoder:1.5b" | "opencoder:8b" | "phi3" | "phi3:3.8b" | "phi3:14b" | "phi3.5" | "phi3.5:3.8b" | "phi4" | "phi4:14b" | "qwen" | "qwen:7b" | "qwen:14b" | "qwen:32b" | "qwen:72b" | "qwen:110b" | "qwen2" | "qwen2:0.5b" | "qwen2:1.5b" | "qwen2:7b" | "qwen2:72b" | "qwen2.5" | "qwen2.5:0.5b" | "qwen2.5:1.5b" | "qwen2.5:3b" | "qwen2.5:7b" | "qwen2.5:14b" | "qwen2.5:32b" | "qwen2.5:72b" | "qwen2.5-coder" | "qwen2.5-coder:0.5b" | "qwen2.5-coder:1.5b" | "qwen2.5-coder:3b" | "qwen2.5-coder:7b" | "qwen2.5-coder:14b" | "qwen2.5-coder:32b" | "qwen3" | "qwen3:0.6b" | "qwen3:1.7b" | "qwen3:4b" | "qwen3:8b" | "qwen3:14b" | "qwen3:30b" | "qwen3:32b" | "qwen3:235b" | "qwq" | "qwq:32b" | "sailor2" | "sailor2:1b" | "sailor2:8b" | "sailor2:20b" | "shieldgemma" | "shieldgemma:2b" | "shieldgemma:9b" | "shieldgemma:27b" | "smallthinker" | "smallthinker:3b" | "smollm" | "smollm:135m" | "smollm:360m" | "smollm:1.7b" | "tinyllama" | "tinyllama:1.1b" | "tulu3" | "tulu3:8b" | "tulu3:70b" | (string & {});
|
||||
declare const ollamaProviderOptions: z.ZodObject<{
|
||||
- think: z.ZodOptional<z.ZodBoolean>;
|
||||
+ think: z.ZodOptional<z.ZodUnion<[z.ZodBoolean, z.ZodEnum<['low', 'medium', 'high']>]>>;
|
||||
options: z.ZodOptional<z.ZodObject<{
|
||||
num_ctx: z.ZodOptional<z.ZodNumber>;
|
||||
repeat_last_n: z.ZodOptional<z.ZodNumber>;
|
||||
@@ -27,9 +27,11 @@ interface OllamaCompletionSettings {
|
||||
* the model's thinking from the model's output. When disabled, the model will not think
|
||||
* and directly output the content.
|
||||
*
|
||||
+ * For gpt-oss models, you can also use 'low', 'medium', or 'high' to control the depth of thinking.
|
||||
+ *
|
||||
* Only supported by certain models like DeepSeek R1 and Qwen 3.
|
||||
*/
|
||||
- think?: boolean;
|
||||
+ think?: boolean | 'low' | 'medium' | 'high';
|
||||
/**
|
||||
* Echo back the prompt in addition to the completion.
|
||||
*/
|
||||
@@ -146,7 +148,7 @@ declare const ollamaEmbeddingProviderOptions: z.ZodObject<{
|
||||
type OllamaEmbeddingProviderOptions = z.infer<typeof ollamaEmbeddingProviderOptions>;
|
||||
|
||||
declare const ollamaCompletionProviderOptions: z.ZodObject<{
|
||||
- think: z.ZodOptional<z.ZodBoolean>;
|
||||
+ think: z.ZodOptional<z.ZodUnion<[z.ZodBoolean, z.ZodEnum<['low', 'medium', 'high']>]>>;
|
||||
user: z.ZodOptional<z.ZodString>;
|
||||
suffix: z.ZodOptional<z.ZodString>;
|
||||
echo: z.ZodOptional<z.ZodBoolean>;
|
||||
diff --git a/dist/index.js b/dist/index.js
|
||||
index 35b5142ce8476ce2549ed7c2ec48e7d8c46c90d9..2ef64dc9a4c2be043e6af608241a6a8309a5a69f 100644
|
||||
--- a/dist/index.js
|
||||
+++ b/dist/index.js
|
||||
@@ -158,7 +158,7 @@ function getResponseMetadata({
|
||||
|
||||
// src/completion/ollama-completion-language-model.ts
|
||||
var ollamaCompletionProviderOptions = import_v42.z.object({
|
||||
- think: import_v42.z.boolean().optional(),
|
||||
+ think: import_v42.z.union([import_v42.z.boolean(), import_v42.z.enum(['low', 'medium', 'high'])]).optional(),
|
||||
user: import_v42.z.string().optional(),
|
||||
suffix: import_v42.z.string().optional(),
|
||||
echo: import_v42.z.boolean().optional()
|
||||
@@ -662,7 +662,7 @@ function convertToOllamaChatMessages({
|
||||
const images = content.filter((part) => part.type === "file" && part.mediaType.startsWith("image/")).map((part) => part.data);
|
||||
messages.push({
|
||||
role: "user",
|
||||
- content: userText.length > 0 ? userText : [],
|
||||
+ content: userText.length > 0 ? userText : '',
|
||||
images: images.length > 0 ? images : void 0
|
||||
});
|
||||
break;
|
||||
@@ -813,9 +813,11 @@ var ollamaProviderOptions = import_v44.z.object({
|
||||
* the model's thinking from the model's output. When disabled, the model will not think
|
||||
* and directly output the content.
|
||||
*
|
||||
+ * For gpt-oss models, you can also use 'low', 'medium', or 'high' to control the depth of thinking.
|
||||
+ *
|
||||
* Only supported by certain models like DeepSeek R1 and Qwen 3.
|
||||
*/
|
||||
- think: import_v44.z.boolean().optional(),
|
||||
+ think: import_v44.z.union([import_v44.z.boolean(), import_v44.z.enum(['low', 'medium', 'high'])]).optional(),
|
||||
options: import_v44.z.object({
|
||||
num_ctx: import_v44.z.number().optional(),
|
||||
repeat_last_n: import_v44.z.number().optional(),
|
||||
@@ -929,14 +931,16 @@ var OllamaRequestBuilder = class {
|
||||
prompt,
|
||||
systemMessageMode: "system"
|
||||
}),
|
||||
- temperature,
|
||||
- top_p: topP,
|
||||
max_output_tokens: maxOutputTokens,
|
||||
...(responseFormat == null ? void 0 : responseFormat.type) === "json" && {
|
||||
format: responseFormat.schema != null ? responseFormat.schema : "json"
|
||||
},
|
||||
think: (_a = ollamaOptions == null ? void 0 : ollamaOptions.think) != null ? _a : false,
|
||||
- options: (_b = ollamaOptions == null ? void 0 : ollamaOptions.options) != null ? _b : void 0
|
||||
+ options: {
|
||||
+ ...temperature !== void 0 && { temperature },
|
||||
+ ...topP !== void 0 && { top_p: topP },
|
||||
+ ...((_b = ollamaOptions == null ? void 0 : ollamaOptions.options) != null ? _b : {})
|
||||
+ }
|
||||
};
|
||||
}
|
||||
};
|
||||
diff --git a/dist/index.mjs b/dist/index.mjs
|
||||
index e2a634a78d80ac9542f2cc4f96cf2291094b10cf..67b23efce3c1cf4f026693d3ff9246988a3ef26e 100644
|
||||
--- a/dist/index.mjs
|
||||
+++ b/dist/index.mjs
|
||||
@@ -144,7 +144,7 @@ function getResponseMetadata({
|
||||
|
||||
// src/completion/ollama-completion-language-model.ts
|
||||
var ollamaCompletionProviderOptions = z2.object({
|
||||
- think: z2.boolean().optional(),
|
||||
+ think: z2.union([z2.boolean(), z2.enum(['low', 'medium', 'high'])]).optional(),
|
||||
user: z2.string().optional(),
|
||||
suffix: z2.string().optional(),
|
||||
echo: z2.boolean().optional()
|
||||
@@ -662,7 +662,7 @@ function convertToOllamaChatMessages({
|
||||
const images = content.filter((part) => part.type === "file" && part.mediaType.startsWith("image/")).map((part) => part.data);
|
||||
messages.push({
|
||||
role: "user",
|
||||
- content: userText.length > 0 ? userText : [],
|
||||
+ content: userText.length > 0 ? userText : '',
|
||||
images: images.length > 0 ? images : void 0
|
||||
});
|
||||
break;
|
||||
@@ -815,9 +815,11 @@ var ollamaProviderOptions = z4.object({
|
||||
* the model's thinking from the model's output. When disabled, the model will not think
|
||||
* and directly output the content.
|
||||
*
|
||||
+ * For gpt-oss models, you can also use 'low', 'medium', or 'high' to control the depth of thinking.
|
||||
+ *
|
||||
* Only supported by certain models like DeepSeek R1 and Qwen 3.
|
||||
*/
|
||||
- think: z4.boolean().optional(),
|
||||
+ think: z4.union([z4.boolean(), z4.enum(['low', 'medium', 'high'])]).optional(),
|
||||
options: z4.object({
|
||||
num_ctx: z4.number().optional(),
|
||||
repeat_last_n: z4.number().optional(),
|
||||
@@ -931,14 +933,16 @@ var OllamaRequestBuilder = class {
|
||||
prompt,
|
||||
systemMessageMode: "system"
|
||||
}),
|
||||
- temperature,
|
||||
- top_p: topP,
|
||||
max_output_tokens: maxOutputTokens,
|
||||
...(responseFormat == null ? void 0 : responseFormat.type) === "json" && {
|
||||
format: responseFormat.schema != null ? responseFormat.schema : "json"
|
||||
},
|
||||
think: (_a = ollamaOptions == null ? void 0 : ollamaOptions.think) != null ? _a : false,
|
||||
- options: (_b = ollamaOptions == null ? void 0 : ollamaOptions.options) != null ? _b : void 0
|
||||
+ options: {
|
||||
+ ...temperature !== void 0 && { temperature },
|
||||
+ ...topP !== void 0 && { top_p: topP },
|
||||
+ ...((_b = ollamaOptions == null ? void 0 : ollamaOptions.options) != null ? _b : {})
|
||||
+ }
|
||||
};
|
||||
}
|
||||
};
|
||||
@ -135,66 +135,60 @@ artifactBuildCompleted: scripts/artifact-build-completed.js
|
||||
releaseInfo:
|
||||
releaseNotes: |
|
||||
<!--LANG:en-->
|
||||
Cherry Studio 1.7.2 - Stability & Enhancement Update
|
||||
Cherry Studio 1.7.3 - Feature & Stability Update
|
||||
|
||||
This release focuses on stability improvements, bug fixes, and quality-of-life enhancements.
|
||||
This release brings new features, UI improvements, and important bug fixes.
|
||||
|
||||
✨ New Features
|
||||
- Add MCP server log viewer for better debugging
|
||||
- Support custom Git Bash path configuration
|
||||
- Add print to PDF and save as HTML for mini program webviews
|
||||
- Add CherryIN API host selection settings
|
||||
- Enhance assistant presets with sort and batch delete modes
|
||||
- Open URL directly for SelectionAssistant search action
|
||||
- Enhance web search tool switching with provider-specific context
|
||||
|
||||
🔧 Improvements
|
||||
- Enhanced update dialog functionality and state management
|
||||
- Improved ImageViewer context menu UX
|
||||
- Better temperature and top_p parameter handling
|
||||
- User-configurable stream options for OpenAI API
|
||||
- Translation feature now supports document files
|
||||
|
||||
🤖 AI & Models
|
||||
- Added explicit thinking token support for Gemini 3 Pro Image
|
||||
- Updated DeepSeek logic to match DeepSeek v3.2
|
||||
- Updated AiOnly default models
|
||||
- Updated AI model configurations to latest versions
|
||||
|
||||
♿ Accessibility
|
||||
- Improved screen reader (NVDA) support with aria-label attributes
|
||||
- Added Slovak language support for spell check
|
||||
- Remove Intel Ultra limit for OVMS
|
||||
- Improve settings tab and assistant item UI
|
||||
|
||||
🐛 Bug Fixes
|
||||
- Fixed Quick Assistant shortcut registration issue
|
||||
- Fixed UI freeze on multi-file selection via batch processing
|
||||
- Fixed assistant default model update when editing model capabilities
|
||||
- Fixed provider handling and API key rotation logic
|
||||
- Fixed OVMS API URL path formation
|
||||
- Fixed custom parameters placement for Vercel AI Gateway
|
||||
- Fixed topic message blocks clearing
|
||||
- Fixed input bar blocking enter send while generating
|
||||
- Fix stack overflow with base64 images
|
||||
- Fix infinite loop in knowledge queue processing
|
||||
- Fix quick panel closing in multiple selection mode
|
||||
- Fix thinking timer not stopping when reply is aborted
|
||||
- Fix ThinkingButton icon display for fixed reasoning mode
|
||||
- Fix knowledge query prioritization and intent prompt
|
||||
- Fix OpenRouter embeddings support
|
||||
- Fix SelectionAction window resize on Windows
|
||||
- Add gpustack provider support for qwen3 thinking mode
|
||||
|
||||
<!--LANG:zh-CN-->
|
||||
Cherry Studio 1.7.2 - 稳定性与功能增强更新
|
||||
Cherry Studio 1.7.3 - 功能与稳定性更新
|
||||
|
||||
本次更新专注于稳定性改进、问题修复和用户体验提升。
|
||||
本次更新带来新功能、界面改进和重要的问题修复。
|
||||
|
||||
✨ 新功能
|
||||
- 新增 MCP 服务器日志查看器,便于调试
|
||||
- 支持自定义 Git Bash 路径配置
|
||||
- 小程序 webview 支持打印 PDF 和保存为 HTML
|
||||
- 新增 CherryIN API 主机选择设置
|
||||
- 助手预设增强:支持排序和批量删除模式
|
||||
- 划词助手搜索操作直接打开 URL
|
||||
- 增强网页搜索工具切换逻辑,支持服务商特定上下文
|
||||
|
||||
🔧 功能改进
|
||||
- 增强更新对话框功能和状态管理
|
||||
- 优化图片查看器右键菜单体验
|
||||
- 改进温度和 top_p 参数处理逻辑
|
||||
- 支持用户自定义 OpenAI API 流式选项
|
||||
- 翻译功能现已支持文档文件
|
||||
|
||||
🤖 AI 与模型
|
||||
- 为 Gemini 3 Pro Image 添加显式思考 token 支持
|
||||
- 更新 DeepSeek 逻辑以适配 DeepSeek v3.2
|
||||
- 更新 AiOnly 默认模型
|
||||
- 更新 AI 模型配置至最新版本
|
||||
|
||||
♿ 无障碍支持
|
||||
- 改进屏幕阅读器 (NVDA) 支持,添加 aria-label 属性
|
||||
- 新增斯洛伐克语拼写检查支持
|
||||
- 移除 OVMS 的 Intel Ultra 限制
|
||||
- 优化设置标签页和助手项目 UI
|
||||
|
||||
🐛 问题修复
|
||||
- 修复快捷助手无法注册快捷键的问题
|
||||
- 修复多文件选择时 UI 冻结问题(通过批处理优化)
|
||||
- 修复编辑模型能力时助手默认模型更新问题
|
||||
- 修复服务商处理和 API 密钥轮换逻辑
|
||||
- 修复 OVMS API URL 路径格式问题
|
||||
- 修复 Vercel AI Gateway 自定义参数位置问题
|
||||
- 修复话题消息块清理问题
|
||||
- 修复生成时输入框阻止回车发送的问题
|
||||
- 修复 base64 图片导致的栈溢出问题
|
||||
- 修复知识库队列处理的无限循环问题
|
||||
- 修复多选模式下快捷面板意外关闭的问题
|
||||
- 修复回复中止时思考计时器未停止的问题
|
||||
- 修复固定推理模式下思考按钮图标显示问题
|
||||
- 修复知识库查询优先级和意图提示
|
||||
- 修复 OpenRouter 嵌入模型支持
|
||||
- 修复 Windows 上划词助手窗口大小调整问题
|
||||
- 为 gpustack 服务商添加 qwen3 思考模式支持
|
||||
<!--LANG:END-->
|
||||
|
||||
10
package.json
10
package.json
@ -84,7 +84,7 @@
|
||||
"release:ai-sdk-provider": "yarn workspace @cherrystudio/ai-sdk-provider version patch --immediate && yarn workspace @cherrystudio/ai-sdk-provider build && yarn workspace @cherrystudio/ai-sdk-provider npm publish --access public"
|
||||
},
|
||||
"dependencies": {
|
||||
"@anthropic-ai/claude-agent-sdk": "patch:@anthropic-ai/claude-agent-sdk@npm%3A0.1.53#~/.yarn/patches/@anthropic-ai-claude-agent-sdk-npm-0.1.53-4b77f4cf29.patch",
|
||||
"@anthropic-ai/claude-agent-sdk": "patch:@anthropic-ai/claude-agent-sdk@npm%3A0.1.62#~/.yarn/patches/@anthropic-ai-claude-agent-sdk-npm-0.1.62-23ae56f8c8.patch",
|
||||
"@libsql/client": "0.14.0",
|
||||
"@libsql/win32-x64-msvc": "^0.4.7",
|
||||
"@napi-rs/system-ocr": "patch:@napi-rs/system-ocr@npm%3A1.0.2#~/.yarn/patches/@napi-rs-system-ocr-npm-1.0.2-59e7a78e8b.patch",
|
||||
@ -122,7 +122,7 @@
|
||||
"@ai-sdk/google-vertex": "^3.0.79",
|
||||
"@ai-sdk/huggingface": "^0.0.10",
|
||||
"@ai-sdk/mistral": "^2.0.24",
|
||||
"@ai-sdk/openai": "patch:@ai-sdk/openai@npm%3A2.0.72#~/.yarn/patches/@ai-sdk-openai-npm-2.0.72-234e68da87.patch",
|
||||
"@ai-sdk/openai": "patch:@ai-sdk/openai@npm%3A2.0.85#~/.yarn/patches/@ai-sdk-openai-npm-2.0.85-27483d1d6a.patch",
|
||||
"@ai-sdk/perplexity": "^2.0.20",
|
||||
"@ai-sdk/test-server": "^0.0.1",
|
||||
"@ant-design/v5-patch-for-react-19": "^1.0.3",
|
||||
@ -146,7 +146,7 @@
|
||||
"@cherrystudio/embedjs-ollama": "^0.1.31",
|
||||
"@cherrystudio/embedjs-openai": "^0.1.31",
|
||||
"@cherrystudio/extension-table-plus": "workspace:^",
|
||||
"@cherrystudio/openai": "^6.9.0",
|
||||
"@cherrystudio/openai": "^6.12.0",
|
||||
"@cherrystudio/ui": "workspace:*",
|
||||
"@dnd-kit/core": "^6.3.1",
|
||||
"@dnd-kit/modifiers": "^9.0.0",
|
||||
@ -324,7 +324,7 @@
|
||||
"motion": "^12.10.5",
|
||||
"notion-helper": "^1.3.22",
|
||||
"npx-scope-finder": "^1.2.0",
|
||||
"ollama-ai-provider-v2": "^1.5.5",
|
||||
"ollama-ai-provider-v2": "patch:ollama-ai-provider-v2@npm%3A1.5.5#~/.yarn/patches/ollama-ai-provider-v2-npm-1.5.5-8bef249af9.patch",
|
||||
"oxlint": "^1.22.0",
|
||||
"oxlint-tsgolint": "^0.2.0",
|
||||
"p-queue": "^8.1.0",
|
||||
@ -420,7 +420,7 @@
|
||||
"@langchain/openai@npm:>=0.1.0 <0.6.0": "patch:@langchain/openai@npm%3A1.0.0#~/.yarn/patches/@langchain-openai-npm-1.0.0-474d0ad9d4.patch",
|
||||
"@langchain/openai@npm:^0.3.16": "patch:@langchain/openai@npm%3A1.0.0#~/.yarn/patches/@langchain-openai-npm-1.0.0-474d0ad9d4.patch",
|
||||
"@langchain/openai@npm:>=0.2.0 <0.7.0": "patch:@langchain/openai@npm%3A1.0.0#~/.yarn/patches/@langchain-openai-npm-1.0.0-474d0ad9d4.patch",
|
||||
"@ai-sdk/openai@npm:^2.0.42": "patch:@ai-sdk/openai@npm%3A2.0.72#~/.yarn/patches/@ai-sdk-openai-npm-2.0.72-234e68da87.patch",
|
||||
"@ai-sdk/openai@npm:^2.0.42": "patch:@ai-sdk/openai@npm%3A2.0.85#~/.yarn/patches/@ai-sdk-openai-npm-2.0.85-27483d1d6a.patch",
|
||||
"@ai-sdk/google@npm:^2.0.40": "patch:@ai-sdk/google@npm%3A2.0.40#~/.yarn/patches/@ai-sdk-google-npm-2.0.40-47e0eeee83.patch",
|
||||
"@ai-sdk/openai-compatible@npm:^1.0.27": "patch:@ai-sdk/openai-compatible@npm%3A1.0.27#~/.yarn/patches/@ai-sdk-openai-compatible-npm-1.0.27-06f74278cf.patch"
|
||||
},
|
||||
|
||||
@ -40,7 +40,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@ai-sdk/anthropic": "^2.0.49",
|
||||
"@ai-sdk/azure": "^2.0.74",
|
||||
"@ai-sdk/azure": "^2.0.87",
|
||||
"@ai-sdk/deepseek": "^1.0.31",
|
||||
"@ai-sdk/openai-compatible": "patch:@ai-sdk/openai-compatible@npm%3A1.0.27#~/.yarn/patches/@ai-sdk-openai-compatible-npm-1.0.27-06f74278cf.patch",
|
||||
"@ai-sdk/provider": "^2.0.0",
|
||||
|
||||
@ -62,7 +62,7 @@ export class StreamEventManager {
|
||||
const recursiveResult = await context.recursiveCall(recursiveParams)
|
||||
|
||||
if (recursiveResult && recursiveResult.fullStream) {
|
||||
await this.pipeRecursiveStream(controller, recursiveResult.fullStream, context)
|
||||
await this.pipeRecursiveStream(controller, recursiveResult.fullStream)
|
||||
} else {
|
||||
console.warn('[MCP Prompt] No fullstream found in recursive result:', recursiveResult)
|
||||
}
|
||||
@ -74,11 +74,7 @@ export class StreamEventManager {
|
||||
/**
|
||||
* 将递归流的数据传递到当前流
|
||||
*/
|
||||
private async pipeRecursiveStream(
|
||||
controller: StreamController,
|
||||
recursiveStream: ReadableStream,
|
||||
context?: AiRequestContext
|
||||
): Promise<void> {
|
||||
private async pipeRecursiveStream(controller: StreamController, recursiveStream: ReadableStream): Promise<void> {
|
||||
const reader = recursiveStream.getReader()
|
||||
try {
|
||||
while (true) {
|
||||
@ -86,18 +82,14 @@ export class StreamEventManager {
|
||||
if (done) {
|
||||
break
|
||||
}
|
||||
if (value.type === 'start') {
|
||||
continue
|
||||
}
|
||||
|
||||
if (value.type === 'finish') {
|
||||
// 迭代的流不发finish,但需要累加其 usage
|
||||
if (value.usage && context?.accumulatedUsage) {
|
||||
this.accumulateUsage(context.accumulatedUsage, value.usage)
|
||||
}
|
||||
break
|
||||
}
|
||||
// 对于 finish-step 类型,累加其 usage
|
||||
if (value.type === 'finish-step' && value.usage && context?.accumulatedUsage) {
|
||||
this.accumulateUsage(context.accumulatedUsage, value.usage)
|
||||
}
|
||||
// 将递归流的数据传递到当前流
|
||||
|
||||
controller.enqueue(value)
|
||||
}
|
||||
} finally {
|
||||
@ -135,10 +127,8 @@ export class StreamEventManager {
|
||||
// 构建新的对话消息
|
||||
const newMessages: ModelMessage[] = [
|
||||
...(context.originalParams.messages || []),
|
||||
{
|
||||
role: 'assistant',
|
||||
content: textBuffer
|
||||
},
|
||||
// 只有当 textBuffer 有内容时才添加 assistant 消息,避免空消息导致 API 错误
|
||||
...(textBuffer ? [{ role: 'assistant' as const, content: textBuffer }] : []),
|
||||
{
|
||||
role: 'user',
|
||||
content: toolResultsText
|
||||
@ -161,7 +151,7 @@ export class StreamEventManager {
|
||||
/**
|
||||
* 累加 usage 数据
|
||||
*/
|
||||
private accumulateUsage(target: any, source: any): void {
|
||||
accumulateUsage(target: any, source: any): void {
|
||||
if (!target || !source) return
|
||||
|
||||
// 累加各种 token 类型
|
||||
|
||||
@ -411,7 +411,10 @@ export const createPromptToolUsePlugin = (config: PromptToolUseConfig = {}) => {
|
||||
}
|
||||
}
|
||||
|
||||
// 如果没有执行工具调用,直接传递原始finish-step事件
|
||||
// 如果没有执行工具调用,累加 usage 后透传 finish-step 事件
|
||||
if (chunk.usage && context.accumulatedUsage) {
|
||||
streamEventManager.accumulateUsage(context.accumulatedUsage, chunk.usage)
|
||||
}
|
||||
controller.enqueue(chunk)
|
||||
|
||||
// 清理状态
|
||||
|
||||
@ -5,6 +5,7 @@ import type { InferToolInput, InferToolOutput, Tool } from 'ai'
|
||||
|
||||
import { createOpenRouterOptions, createXaiOptions, mergeProviderOptions } from '../../../options'
|
||||
import type { ProviderOptionsMap } from '../../../options/types'
|
||||
import type { AiRequestContext } from '../../'
|
||||
import type { OpenRouterSearchConfig } from './openrouter'
|
||||
|
||||
/**
|
||||
@ -94,28 +95,84 @@ export type WebSearchToolInputSchema = {
|
||||
'openai-chat': InferToolInput<OpenAIChatWebSearchTool>
|
||||
}
|
||||
|
||||
export const switchWebSearchTool = (config: WebSearchPluginConfig, params: any) => {
|
||||
if (config.openai) {
|
||||
if (!params.tools) params.tools = {}
|
||||
params.tools.web_search = openai.tools.webSearch(config.openai)
|
||||
} else if (config['openai-chat']) {
|
||||
if (!params.tools) params.tools = {}
|
||||
params.tools.web_search_preview = openai.tools.webSearchPreview(config['openai-chat'])
|
||||
} else if (config.anthropic) {
|
||||
if (!params.tools) params.tools = {}
|
||||
params.tools.web_search = anthropic.tools.webSearch_20250305(config.anthropic)
|
||||
} else if (config.google) {
|
||||
// case 'google-vertex':
|
||||
if (!params.tools) params.tools = {}
|
||||
params.tools.web_search = google.tools.googleSearch(config.google || {})
|
||||
} else if (config.xai) {
|
||||
const searchOptions = createXaiOptions({
|
||||
searchParameters: { ...config.xai, mode: 'on' }
|
||||
})
|
||||
params.providerOptions = mergeProviderOptions(params.providerOptions, searchOptions)
|
||||
} else if (config.openrouter) {
|
||||
const searchOptions = createOpenRouterOptions(config.openrouter)
|
||||
params.providerOptions = mergeProviderOptions(params.providerOptions, searchOptions)
|
||||
/**
|
||||
* Helper function to ensure params.tools object exists
|
||||
*/
|
||||
const ensureToolsObject = (params: any) => {
|
||||
if (!params.tools) params.tools = {}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to apply tool-based web search configuration
|
||||
*/
|
||||
const applyToolBasedSearch = (params: any, toolName: string, toolInstance: any) => {
|
||||
ensureToolsObject(params)
|
||||
params.tools[toolName] = toolInstance
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to apply provider options-based web search configuration
|
||||
*/
|
||||
const applyProviderOptionsSearch = (params: any, searchOptions: any) => {
|
||||
params.providerOptions = mergeProviderOptions(params.providerOptions, searchOptions)
|
||||
}
|
||||
|
||||
export const switchWebSearchTool = (config: WebSearchPluginConfig, params: any, context?: AiRequestContext) => {
|
||||
const providerId = context?.providerId
|
||||
|
||||
// Provider-specific configuration map
|
||||
const providerHandlers: Record<string, () => void> = {
|
||||
openai: () => {
|
||||
const cfg = config.openai ?? DEFAULT_WEB_SEARCH_CONFIG.openai
|
||||
applyToolBasedSearch(params, 'web_search', openai.tools.webSearch(cfg))
|
||||
},
|
||||
'openai-chat': () => {
|
||||
const cfg = (config['openai-chat'] ?? DEFAULT_WEB_SEARCH_CONFIG['openai-chat']) as OpenAISearchPreviewConfig
|
||||
applyToolBasedSearch(params, 'web_search_preview', openai.tools.webSearchPreview(cfg))
|
||||
},
|
||||
anthropic: () => {
|
||||
const cfg = config.anthropic ?? DEFAULT_WEB_SEARCH_CONFIG.anthropic
|
||||
applyToolBasedSearch(params, 'web_search', anthropic.tools.webSearch_20250305(cfg))
|
||||
},
|
||||
google: () => {
|
||||
const cfg = (config.google ?? DEFAULT_WEB_SEARCH_CONFIG.google) as GoogleSearchConfig
|
||||
applyToolBasedSearch(params, 'web_search', google.tools.googleSearch(cfg))
|
||||
},
|
||||
xai: () => {
|
||||
const cfg = config.xai ?? DEFAULT_WEB_SEARCH_CONFIG.xai
|
||||
const searchOptions = createXaiOptions({ searchParameters: { ...cfg, mode: 'on' } })
|
||||
applyProviderOptionsSearch(params, searchOptions)
|
||||
},
|
||||
openrouter: () => {
|
||||
const cfg = (config.openrouter ?? DEFAULT_WEB_SEARCH_CONFIG.openrouter) as OpenRouterSearchConfig
|
||||
const searchOptions = createOpenRouterOptions(cfg)
|
||||
applyProviderOptionsSearch(params, searchOptions)
|
||||
}
|
||||
}
|
||||
|
||||
// Try provider-specific handler first
|
||||
const handler = providerId && providerHandlers[providerId]
|
||||
if (handler) {
|
||||
handler()
|
||||
return params
|
||||
}
|
||||
|
||||
// Fallback: apply based on available config keys (prioritized order)
|
||||
const fallbackOrder: Array<keyof WebSearchPluginConfig> = [
|
||||
'openai',
|
||||
'openai-chat',
|
||||
'anthropic',
|
||||
'google',
|
||||
'xai',
|
||||
'openrouter'
|
||||
]
|
||||
|
||||
for (const key of fallbackOrder) {
|
||||
if (config[key]) {
|
||||
providerHandlers[key]()
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return params
|
||||
}
|
||||
|
||||
@ -17,8 +17,22 @@ export const webSearchPlugin = (config: WebSearchPluginConfig = DEFAULT_WEB_SEAR
|
||||
name: 'webSearch',
|
||||
enforce: 'pre',
|
||||
|
||||
transformParams: async (params: any) => {
|
||||
switchWebSearchTool(config, params)
|
||||
transformParams: async (params: any, context) => {
|
||||
let { providerId } = context
|
||||
|
||||
// For cherryin providers, extract the actual provider from the model's provider string
|
||||
// Expected format: "cherryin.{actualProvider}" (e.g., "cherryin.gemini")
|
||||
if (providerId === 'cherryin' || providerId === 'cherryin-chat') {
|
||||
const provider = params.model?.provider
|
||||
if (provider && typeof provider === 'string' && provider.includes('.')) {
|
||||
const extractedProviderId = provider.split('.')[1]
|
||||
if (extractedProviderId) {
|
||||
providerId = extractedProviderId
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
switchWebSearchTool(config, params, { ...context, providerId })
|
||||
return params
|
||||
}
|
||||
})
|
||||
|
||||
@ -55,6 +55,8 @@ export enum IpcChannel {
|
||||
Webview_SetOpenLinkExternal = 'webview:set-open-link-external',
|
||||
Webview_SetSpellCheckEnabled = 'webview:set-spell-check-enabled',
|
||||
Webview_SearchHotkey = 'webview:search-hotkey',
|
||||
Webview_PrintToPDF = 'webview:print-to-pdf',
|
||||
Webview_SaveAsHTML = 'webview:save-as-html',
|
||||
|
||||
// Open
|
||||
Open_Path = 'open:path',
|
||||
@ -90,6 +92,8 @@ export enum IpcChannel {
|
||||
Mcp_AbortTool = 'mcp:abort-tool',
|
||||
Mcp_GetServerVersion = 'mcp:get-server-version',
|
||||
Mcp_Progress = 'mcp:progress',
|
||||
Mcp_GetServerLogs = 'mcp:get-server-logs',
|
||||
Mcp_ServerLog = 'mcp:server-log',
|
||||
// Python
|
||||
Python_Execute = 'python:execute',
|
||||
|
||||
@ -255,6 +259,8 @@ export enum IpcChannel {
|
||||
System_GetHostname = 'system:getHostname',
|
||||
System_GetCpuName = 'system:getCpuName',
|
||||
System_CheckGitBash = 'system:checkGitBash',
|
||||
System_GetGitBashPath = 'system:getGitBashPath',
|
||||
System_SetGitBashPath = 'system:setGitBashPath',
|
||||
|
||||
// DevTools
|
||||
System_ToggleDevTools = 'system:toggleDevTools',
|
||||
|
||||
@ -88,16 +88,11 @@ export function getSdkClient(
|
||||
}
|
||||
})
|
||||
}
|
||||
let baseURL =
|
||||
const baseURL =
|
||||
provider.type === 'anthropic'
|
||||
? provider.apiHost
|
||||
: (provider.anthropicApiHost && provider.anthropicApiHost.trim()) || provider.apiHost
|
||||
|
||||
// Anthropic SDK automatically appends /v1 to all endpoints (like /v1/messages, /v1/models)
|
||||
// We need to strip api version from baseURL to avoid duplication (e.g., /v3/v1/models)
|
||||
// formatProviderApiHost adds /v1 for AI SDK compatibility, but Anthropic SDK needs it removed
|
||||
baseURL = baseURL.replace(/\/v\d+(?:alpha|beta)?(?=\/|$)/i, '')
|
||||
|
||||
logger.debug('Anthropic API baseURL', { baseURL, providerId: provider.id })
|
||||
|
||||
if (provider.id === 'aihubmix') {
|
||||
|
||||
@ -306,7 +306,7 @@ export const SEARCH_SUMMARY_PROMPT_KNOWLEDGE_ONLY = `
|
||||
**Use user's language to rephrase the question.**
|
||||
Follow these guidelines:
|
||||
1. If the question is a simple writing task, greeting (e.g., Hi, Hello, How are you), or does not require searching for information (unless the greeting contains a follow-up question), return 'not_needed' in the 'question' XML block. This indicates that no search is required.
|
||||
2. For knowledge, You need rewrite user query into 'rewrite' XML block with one alternative version while preserving the original intent and meaning. Also include the original question in the 'question' block.
|
||||
2. For knowledge, You need rewrite user query into 'rewrite' XML block with one alternative version while preserving the original intent and meaning. Also include the rephrased or decomposed question(s) in the 'question' block.
|
||||
3. Always return the rephrased question inside the 'question' XML block.
|
||||
4. Always wrap the rephrased question in the appropriate XML blocks: use <knowledge></knowledge> for queries that can be answered from a pre-existing knowledge base. Ensure that the rephrased question is always contained within a <question></question> block inside the wrapper.
|
||||
5. *use knowledge to rephrase the question*
|
||||
|
||||
@ -23,6 +23,14 @@ export type MCPProgressEvent = {
|
||||
progress: number // 0-1 range
|
||||
}
|
||||
|
||||
export type MCPServerLogEntry = {
|
||||
timestamp: number
|
||||
level: 'debug' | 'info' | 'warn' | 'error' | 'stderr' | 'stdout'
|
||||
message: string
|
||||
data?: any
|
||||
source?: string
|
||||
}
|
||||
|
||||
export type WebviewKeyEvent = {
|
||||
webviewId: number
|
||||
key: string
|
||||
|
||||
@ -11,7 +11,7 @@ const OVMS_EX_URL = 'https://gitcode.com/gcw_ggDjjkY3/kjfile/releases/download/d
|
||||
|
||||
/**
|
||||
* error code:
|
||||
* 101: Unsupported CPU (not Intel Ultra)
|
||||
* 101: Unsupported CPU (not Intel)
|
||||
* 102: Unsupported platform (not Windows)
|
||||
* 103: Download failed
|
||||
* 104: Installation failed
|
||||
@ -213,8 +213,8 @@ async function installOvms() {
|
||||
console.log(`CPU Name: ${cpuName}`)
|
||||
|
||||
// Check if CPU name contains "Ultra"
|
||||
if (!cpuName.toLowerCase().includes('intel') || !cpuName.toLowerCase().includes('ultra')) {
|
||||
console.error('OVMS installation requires an Intel(R) Core(TM) Ultra CPU.')
|
||||
if (!cpuName.toLowerCase().includes('intel')) {
|
||||
console.error('OVMS installation requires an Intel CPU.')
|
||||
return 101
|
||||
}
|
||||
|
||||
|
||||
@ -5,9 +5,17 @@ exports.default = async function (configuration) {
|
||||
const { path } = configuration
|
||||
if (configuration.path) {
|
||||
try {
|
||||
const certPath = process.env.CHERRY_CERT_PATH
|
||||
const keyContainer = process.env.CHERRY_CERT_KEY
|
||||
const csp = process.env.CHERRY_CERT_CSP
|
||||
|
||||
if (!certPath || !keyContainer || !csp) {
|
||||
throw new Error('CHERRY_CERT_PATH, CHERRY_CERT_KEY or CHERRY_CERT_CSP is not set')
|
||||
}
|
||||
|
||||
console.log('Start code signing...')
|
||||
console.log('Signing file:', path)
|
||||
const signCommand = `signtool sign /tr http://timestamp.comodoca.com /td sha256 /fd sha256 /a /v "${path}"`
|
||||
const signCommand = `signtool sign /tr http://timestamp.comodoca.com /td sha256 /fd sha256 /v /f "${certPath}" /csp "${csp}" /k "${keyContainer}" "${path}"`
|
||||
execSync(signCommand, { stdio: 'inherit' })
|
||||
console.log('Code signing completed')
|
||||
} catch (error) {
|
||||
|
||||
@ -20,8 +20,8 @@ import { registerIpc } from './ipc'
|
||||
import { agentService } from './services/agents'
|
||||
import { apiServerService } from './services/ApiServerService'
|
||||
import { appMenuService } from './services/AppMenuService'
|
||||
import mcpService from './services/MCPService'
|
||||
import { nodeTraceService } from './services/NodeTraceService'
|
||||
import mcpService from './services/MCPService'
|
||||
import powerMonitorService from './services/PowerMonitorService'
|
||||
import {
|
||||
CHERRY_STUDIO_PROTOCOL,
|
||||
|
||||
@ -8,7 +8,7 @@ import { loggerService } from '@logger'
|
||||
import { isLinux, isMac, isPortable, isWin } from '@main/constant'
|
||||
import { generateSignature } from '@main/integration/cherryai'
|
||||
import anthropicService from '@main/services/AnthropicService'
|
||||
import { findGitBash, getBinaryPath, isBinaryExists, runInstallScript } from '@main/utils/process'
|
||||
import { findGitBash, getBinaryPath, isBinaryExists, runInstallScript, validateGitBashPath } from '@main/utils/process'
|
||||
import { handleZoomFactor } from '@main/utils/zoom'
|
||||
import type { SpanEntity, TokenUsage } from '@mcp-trace/trace-core'
|
||||
import { MIN_WINDOW_HEIGHT, MIN_WINDOW_WIDTH } from '@shared/config/constant'
|
||||
@ -36,7 +36,7 @@ import appService from './services/AppService'
|
||||
import AppUpdater from './services/AppUpdater'
|
||||
import BackupManager from './services/BackupManager'
|
||||
import { codeToolsService } from './services/CodeToolsService'
|
||||
import { configManager } from './services/ConfigManager'
|
||||
import { ConfigKeys, configManager } from './services/ConfigManager'
|
||||
import CopilotService from './services/CopilotService'
|
||||
import DxtService from './services/DxtService'
|
||||
import { ExportService } from './services/ExportService'
|
||||
@ -500,7 +500,8 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
}
|
||||
|
||||
try {
|
||||
const bashPath = findGitBash()
|
||||
const customPath = configManager.get(ConfigKeys.GitBashPath) as string | undefined
|
||||
const bashPath = findGitBash(customPath)
|
||||
|
||||
if (bashPath) {
|
||||
logger.info('Git Bash is available', { path: bashPath })
|
||||
@ -514,6 +515,35 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
return false
|
||||
}
|
||||
})
|
||||
|
||||
ipcMain.handle(IpcChannel.System_GetGitBashPath, () => {
|
||||
if (!isWin) {
|
||||
return null
|
||||
}
|
||||
|
||||
const customPath = configManager.get(ConfigKeys.GitBashPath) as string | undefined
|
||||
return customPath ?? null
|
||||
})
|
||||
|
||||
ipcMain.handle(IpcChannel.System_SetGitBashPath, (_, newPath: string | null) => {
|
||||
if (!isWin) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (!newPath) {
|
||||
configManager.set(ConfigKeys.GitBashPath, null)
|
||||
return true
|
||||
}
|
||||
|
||||
const validated = validateGitBashPath(newPath)
|
||||
if (!validated) {
|
||||
return false
|
||||
}
|
||||
|
||||
configManager.set(ConfigKeys.GitBashPath, validated)
|
||||
return true
|
||||
})
|
||||
|
||||
ipcMain.handle(IpcChannel.System_ToggleDevTools, (e) => {
|
||||
const win = BrowserWindow.fromWebContents(e.sender)
|
||||
win && win.webContents.toggleDevTools()
|
||||
@ -766,6 +796,7 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
ipcMain.handle(IpcChannel.Mcp_CheckConnectivity, mcpService.checkMcpConnectivity)
|
||||
ipcMain.handle(IpcChannel.Mcp_AbortTool, mcpService.abortTool)
|
||||
ipcMain.handle(IpcChannel.Mcp_GetServerVersion, mcpService.getServerVersion)
|
||||
ipcMain.handle(IpcChannel.Mcp_GetServerLogs, mcpService.getServerLogs)
|
||||
|
||||
// DXT upload handler
|
||||
ipcMain.handle(IpcChannel.Mcp_UploadDxt, async (event, fileBuffer: ArrayBuffer, fileName: string) => {
|
||||
@ -844,6 +875,17 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
webview.session.setSpellCheckerEnabled(isEnable)
|
||||
})
|
||||
|
||||
// Webview print and save handlers
|
||||
ipcMain.handle(IpcChannel.Webview_PrintToPDF, async (_, webviewId: number) => {
|
||||
const { printWebviewToPDF } = await import('./services/WebviewService')
|
||||
return await printWebviewToPDF(webviewId)
|
||||
})
|
||||
|
||||
ipcMain.handle(IpcChannel.Webview_SaveAsHTML, async (_, webviewId: number) => {
|
||||
const { saveWebviewAsHTML } = await import('./services/WebviewService')
|
||||
return await saveWebviewAsHTML(webviewId)
|
||||
})
|
||||
|
||||
// store sync
|
||||
storeSyncService.registerIpcHandler()
|
||||
|
||||
|
||||
@ -26,7 +26,8 @@ export enum ConfigKeys {
|
||||
DisableHardwareAcceleration = 'disableHardwareAcceleration',
|
||||
Proxy = 'proxy',
|
||||
EnableDeveloperMode = 'enableDeveloperMode',
|
||||
ClientId = 'clientId'
|
||||
ClientId = 'clientId',
|
||||
GitBashPath = 'gitBashPath'
|
||||
}
|
||||
|
||||
export class ConfigManager {
|
||||
|
||||
@ -34,6 +34,7 @@ import {
|
||||
import { nanoid } from '@reduxjs/toolkit'
|
||||
import { HOME_CHERRY_DIR } from '@shared/config/constant'
|
||||
import type { MCPProgressEvent } from '@shared/config/types'
|
||||
import type { MCPServerLogEntry } from '@shared/config/types'
|
||||
import { IpcChannel } from '@shared/IpcChannel'
|
||||
import { defaultAppHeaders } from '@shared/utils'
|
||||
import {
|
||||
@ -56,6 +57,7 @@ import * as z from 'zod'
|
||||
import DxtService from './DxtService'
|
||||
import { CallBackServer } from './mcp/oauth/callback'
|
||||
import { McpOAuthClientProvider } from './mcp/oauth/provider'
|
||||
import { ServerLogBuffer } from './mcp/ServerLogBuffer'
|
||||
import { windowService } from './WindowService'
|
||||
|
||||
// Generic type for caching wrapped functions
|
||||
@ -142,6 +144,7 @@ class McpService {
|
||||
private pendingClients: Map<string, Promise<Client>> = new Map()
|
||||
private dxtService = new DxtService()
|
||||
private activeToolCalls: Map<string, AbortController> = new Map()
|
||||
private serverLogs = new ServerLogBuffer(200)
|
||||
|
||||
constructor() {
|
||||
this.initClient = this.initClient.bind(this)
|
||||
@ -159,6 +162,7 @@ class McpService {
|
||||
this.cleanup = this.cleanup.bind(this)
|
||||
this.checkMcpConnectivity = this.checkMcpConnectivity.bind(this)
|
||||
this.getServerVersion = this.getServerVersion.bind(this)
|
||||
this.getServerLogs = this.getServerLogs.bind(this)
|
||||
}
|
||||
|
||||
private getServerKey(server: MCPServer): string {
|
||||
@ -172,6 +176,19 @@ class McpService {
|
||||
})
|
||||
}
|
||||
|
||||
private emitServerLog(server: MCPServer, entry: MCPServerLogEntry) {
|
||||
const serverKey = this.getServerKey(server)
|
||||
this.serverLogs.append(serverKey, entry)
|
||||
const mainWindow = windowService.getMainWindow()
|
||||
if (mainWindow) {
|
||||
mainWindow.webContents.send(IpcChannel.Mcp_ServerLog, { ...entry, serverId: server.id })
|
||||
}
|
||||
}
|
||||
|
||||
public getServerLogs(_: Electron.IpcMainInvokeEvent, server: MCPServer): MCPServerLogEntry[] {
|
||||
return this.serverLogs.get(this.getServerKey(server))
|
||||
}
|
||||
|
||||
async initClient(server: MCPServer): Promise<Client> {
|
||||
const serverKey = this.getServerKey(server)
|
||||
|
||||
@ -366,9 +383,18 @@ class McpService {
|
||||
}
|
||||
|
||||
const stdioTransport = new StdioClientTransport(transportOptions)
|
||||
stdioTransport.stderr?.on('data', (data) =>
|
||||
getServerLogger(server).debug(`Stdio stderr`, { data: data.toString() })
|
||||
)
|
||||
stdioTransport.stderr?.on('data', (data) => {
|
||||
const msg = data.toString()
|
||||
getServerLogger(server).debug(`Stdio stderr`, { data: msg })
|
||||
this.emitServerLog(server, {
|
||||
timestamp: Date.now(),
|
||||
level: 'stderr',
|
||||
message: msg.trim(),
|
||||
source: 'stdio'
|
||||
})
|
||||
})
|
||||
// StdioClientTransport does not expose stdout as a readable stream for raw logging
|
||||
// (stdout is reserved for JSON-RPC). Avoid attaching a listener that would never fire.
|
||||
return stdioTransport
|
||||
} else {
|
||||
throw new Error('Either baseUrl or command must be provided')
|
||||
@ -436,6 +462,13 @@ class McpService {
|
||||
}
|
||||
}
|
||||
|
||||
this.emitServerLog(server, {
|
||||
timestamp: Date.now(),
|
||||
level: 'info',
|
||||
message: 'Server connected',
|
||||
source: 'client'
|
||||
})
|
||||
|
||||
// Store the new client in the cache
|
||||
this.clients.set(serverKey, client)
|
||||
|
||||
@ -446,9 +479,22 @@ class McpService {
|
||||
this.clearServerCache(serverKey)
|
||||
|
||||
logger.debug(`Activated server: ${server.name}`)
|
||||
this.emitServerLog(server, {
|
||||
timestamp: Date.now(),
|
||||
level: 'info',
|
||||
message: 'Server activated',
|
||||
source: 'client'
|
||||
})
|
||||
return client
|
||||
} catch (error) {
|
||||
getServerLogger(server).error(`Error activating server ${server.name}`, error as Error)
|
||||
this.emitServerLog(server, {
|
||||
timestamp: Date.now(),
|
||||
level: 'error',
|
||||
message: `Error activating server: ${(error as Error)?.message}`,
|
||||
data: redactSensitive(error),
|
||||
source: 'client'
|
||||
})
|
||||
throw error
|
||||
}
|
||||
} finally {
|
||||
@ -506,6 +552,16 @@ class McpService {
|
||||
// Set up logging message notification handler
|
||||
client.setNotificationHandler(LoggingMessageNotificationSchema, async (notification) => {
|
||||
logger.debug(`Message from server ${server.name}:`, notification.params)
|
||||
const msg = notification.params?.message
|
||||
if (msg) {
|
||||
this.emitServerLog(server, {
|
||||
timestamp: Date.now(),
|
||||
level: (notification.params?.level as MCPServerLogEntry['level']) || 'info',
|
||||
message: typeof msg === 'string' ? msg : JSON.stringify(msg),
|
||||
data: redactSensitive(notification.params?.data),
|
||||
source: notification.params?.logger || 'server'
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
getServerLogger(server).debug(`Set up notification handlers`)
|
||||
@ -540,6 +596,7 @@ class McpService {
|
||||
this.clients.delete(serverKey)
|
||||
// Clear all caches for this server
|
||||
this.clearServerCache(serverKey)
|
||||
this.serverLogs.remove(serverKey)
|
||||
} else {
|
||||
logger.warn(`No client found for server`, { serverKey })
|
||||
}
|
||||
@ -548,6 +605,12 @@ class McpService {
|
||||
async stopServer(_: Electron.IpcMainInvokeEvent, server: MCPServer) {
|
||||
const serverKey = this.getServerKey(server)
|
||||
getServerLogger(server).debug(`Stopping server`)
|
||||
this.emitServerLog(server, {
|
||||
timestamp: Date.now(),
|
||||
level: 'info',
|
||||
message: 'Stopping server',
|
||||
source: 'client'
|
||||
})
|
||||
await this.closeClient(serverKey)
|
||||
}
|
||||
|
||||
@ -574,6 +637,12 @@ class McpService {
|
||||
async restartServer(_: Electron.IpcMainInvokeEvent, server: MCPServer) {
|
||||
getServerLogger(server).debug(`Restarting server`)
|
||||
const serverKey = this.getServerKey(server)
|
||||
this.emitServerLog(server, {
|
||||
timestamp: Date.now(),
|
||||
level: 'info',
|
||||
message: 'Restarting server',
|
||||
source: 'client'
|
||||
})
|
||||
await this.closeClient(serverKey)
|
||||
// Clear cache before restarting to ensure fresh data
|
||||
this.clearServerCache(serverKey)
|
||||
@ -606,9 +675,22 @@ class McpService {
|
||||
// Attempt to list tools as a way to check connectivity
|
||||
await client.listTools()
|
||||
getServerLogger(server).debug(`Connectivity check successful`)
|
||||
this.emitServerLog(server, {
|
||||
timestamp: Date.now(),
|
||||
level: 'info',
|
||||
message: 'Connectivity check successful',
|
||||
source: 'connectivity'
|
||||
})
|
||||
return true
|
||||
} catch (error) {
|
||||
getServerLogger(server).error(`Connectivity check failed`, error as Error)
|
||||
this.emitServerLog(server, {
|
||||
timestamp: Date.now(),
|
||||
level: 'error',
|
||||
message: `Connectivity check failed: ${(error as Error).message}`,
|
||||
data: redactSensitive(error),
|
||||
source: 'connectivity'
|
||||
})
|
||||
// Close the client if connectivity check fails to ensure a clean state for the next attempt
|
||||
const serverKey = this.getServerKey(server)
|
||||
await this.closeClient(serverKey)
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
import { IpcChannel } from '@shared/IpcChannel'
|
||||
import { app, session, shell, webContents } from 'electron'
|
||||
import { app, dialog, session, shell, webContents } from 'electron'
|
||||
import { promises as fs } from 'fs'
|
||||
|
||||
/**
|
||||
* init the useragent of the webview session
|
||||
@ -53,11 +54,17 @@ const attachKeyboardHandler = (contents: Electron.WebContents) => {
|
||||
return
|
||||
}
|
||||
|
||||
const isFindShortcut = (input.control || input.meta) && key === 'f'
|
||||
const isEscape = key === 'escape'
|
||||
const isEnter = key === 'enter'
|
||||
// Helper to check if this is a shortcut we handle
|
||||
const isHandledShortcut = (k: string) => {
|
||||
const isFindShortcut = (input.control || input.meta) && k === 'f'
|
||||
const isPrintShortcut = (input.control || input.meta) && k === 'p'
|
||||
const isSaveShortcut = (input.control || input.meta) && k === 's'
|
||||
const isEscape = k === 'escape'
|
||||
const isEnter = k === 'enter'
|
||||
return isFindShortcut || isPrintShortcut || isSaveShortcut || isEscape || isEnter
|
||||
}
|
||||
|
||||
if (!isFindShortcut && !isEscape && !isEnter) {
|
||||
if (!isHandledShortcut(key)) {
|
||||
return
|
||||
}
|
||||
|
||||
@ -66,11 +73,20 @@ const attachKeyboardHandler = (contents: Electron.WebContents) => {
|
||||
return
|
||||
}
|
||||
|
||||
const isFindShortcut = (input.control || input.meta) && key === 'f'
|
||||
const isPrintShortcut = (input.control || input.meta) && key === 'p'
|
||||
const isSaveShortcut = (input.control || input.meta) && key === 's'
|
||||
|
||||
// Always prevent Cmd/Ctrl+F to override the guest page's native find dialog
|
||||
if (isFindShortcut) {
|
||||
event.preventDefault()
|
||||
}
|
||||
|
||||
// Prevent default print/save dialogs and handle them with custom logic
|
||||
if (isPrintShortcut || isSaveShortcut) {
|
||||
event.preventDefault()
|
||||
}
|
||||
|
||||
// Send the hotkey event to the renderer
|
||||
// The renderer will decide whether to preventDefault for Escape and Enter
|
||||
// based on whether the search bar is visible
|
||||
@ -100,3 +116,130 @@ export function initWebviewHotkeys() {
|
||||
attachKeyboardHandler(contents)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Print webview content to PDF
|
||||
* @param webviewId The webview webContents id
|
||||
* @returns Path to saved PDF file or null if user cancelled
|
||||
*/
|
||||
export async function printWebviewToPDF(webviewId: number): Promise<string | null> {
|
||||
const webview = webContents.fromId(webviewId)
|
||||
if (!webview) {
|
||||
throw new Error('Webview not found')
|
||||
}
|
||||
|
||||
try {
|
||||
// Get the page title for default filename
|
||||
const pageTitle = await webview.executeJavaScript('document.title || "webpage"').catch(() => 'webpage')
|
||||
// Sanitize filename by removing invalid characters
|
||||
const sanitizedTitle = pageTitle.replace(/[<>:"/\\|?*]/g, '-').substring(0, 100)
|
||||
const defaultFilename = sanitizedTitle ? `${sanitizedTitle}.pdf` : `webpage-${Date.now()}.pdf`
|
||||
|
||||
// Show save dialog
|
||||
const { canceled, filePath } = await dialog.showSaveDialog({
|
||||
title: 'Save as PDF',
|
||||
defaultPath: defaultFilename,
|
||||
filters: [{ name: 'PDF Files', extensions: ['pdf'] }]
|
||||
})
|
||||
|
||||
if (canceled || !filePath) {
|
||||
return null
|
||||
}
|
||||
|
||||
// Generate PDF with settings to capture full page
|
||||
const pdfData = await webview.printToPDF({
|
||||
margins: {
|
||||
marginType: 'default'
|
||||
},
|
||||
printBackground: true,
|
||||
landscape: false,
|
||||
pageSize: 'A4',
|
||||
preferCSSPageSize: true
|
||||
})
|
||||
|
||||
// Save PDF to file
|
||||
await fs.writeFile(filePath, pdfData)
|
||||
|
||||
return filePath
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to print to PDF: ${(error as Error).message}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save webview content as HTML
|
||||
* @param webviewId The webview webContents id
|
||||
* @returns Path to saved HTML file or null if user cancelled
|
||||
*/
|
||||
export async function saveWebviewAsHTML(webviewId: number): Promise<string | null> {
|
||||
const webview = webContents.fromId(webviewId)
|
||||
if (!webview) {
|
||||
throw new Error('Webview not found')
|
||||
}
|
||||
|
||||
try {
|
||||
// Get the page title for default filename
|
||||
const pageTitle = await webview.executeJavaScript('document.title || "webpage"').catch(() => 'webpage')
|
||||
// Sanitize filename by removing invalid characters
|
||||
const sanitizedTitle = pageTitle.replace(/[<>:"/\\|?*]/g, '-').substring(0, 100)
|
||||
const defaultFilename = sanitizedTitle ? `${sanitizedTitle}.html` : `webpage-${Date.now()}.html`
|
||||
|
||||
// Show save dialog
|
||||
const { canceled, filePath } = await dialog.showSaveDialog({
|
||||
title: 'Save as HTML',
|
||||
defaultPath: defaultFilename,
|
||||
filters: [
|
||||
{ name: 'HTML Files', extensions: ['html', 'htm'] },
|
||||
{ name: 'All Files', extensions: ['*'] }
|
||||
]
|
||||
})
|
||||
|
||||
if (canceled || !filePath) {
|
||||
return null
|
||||
}
|
||||
|
||||
// Get the HTML content with safe error handling
|
||||
const html = await webview.executeJavaScript(`
|
||||
(() => {
|
||||
try {
|
||||
// Build complete DOCTYPE string if present
|
||||
let doctype = '';
|
||||
if (document.doctype) {
|
||||
const dt = document.doctype;
|
||||
doctype = '<!DOCTYPE ' + (dt.name || 'html');
|
||||
|
||||
// Add PUBLIC identifier if publicId is present
|
||||
if (dt.publicId) {
|
||||
// Escape single quotes in publicId
|
||||
const escapedPublicId = String(dt.publicId).replace(/'/g, "\\'");
|
||||
doctype += " PUBLIC '" + escapedPublicId + "'";
|
||||
|
||||
// Add systemId if present (required when publicId is present)
|
||||
if (dt.systemId) {
|
||||
const escapedSystemId = String(dt.systemId).replace(/'/g, "\\'");
|
||||
doctype += " '" + escapedSystemId + "'";
|
||||
}
|
||||
} else if (dt.systemId) {
|
||||
// SYSTEM identifier (without PUBLIC)
|
||||
const escapedSystemId = String(dt.systemId).replace(/'/g, "\\'");
|
||||
doctype += " SYSTEM '" + escapedSystemId + "'";
|
||||
}
|
||||
|
||||
doctype += '>';
|
||||
}
|
||||
return doctype + (document.documentElement?.outerHTML || '');
|
||||
} catch (error) {
|
||||
// Fallback: just return the HTML without DOCTYPE if there's an error
|
||||
return document.documentElement?.outerHTML || '';
|
||||
}
|
||||
})()
|
||||
`)
|
||||
|
||||
// Save HTML to file
|
||||
await fs.writeFile(filePath, html, 'utf-8')
|
||||
|
||||
return filePath
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to save as HTML: ${(error as Error).message}`)
|
||||
}
|
||||
}
|
||||
|
||||
29
src/main/services/__tests__/ServerLogBuffer.test.ts
Normal file
29
src/main/services/__tests__/ServerLogBuffer.test.ts
Normal file
@ -0,0 +1,29 @@
|
||||
import { describe, expect, it } from 'vitest'
|
||||
|
||||
import { ServerLogBuffer } from '../mcp/ServerLogBuffer'
|
||||
|
||||
describe('ServerLogBuffer', () => {
|
||||
it('keeps a bounded number of entries per server', () => {
|
||||
const buffer = new ServerLogBuffer(3)
|
||||
const key = 'srv'
|
||||
|
||||
buffer.append(key, { timestamp: 1, level: 'info', message: 'a' })
|
||||
buffer.append(key, { timestamp: 2, level: 'info', message: 'b' })
|
||||
buffer.append(key, { timestamp: 3, level: 'info', message: 'c' })
|
||||
buffer.append(key, { timestamp: 4, level: 'info', message: 'd' })
|
||||
|
||||
const logs = buffer.get(key)
|
||||
expect(logs).toHaveLength(3)
|
||||
expect(logs[0].message).toBe('b')
|
||||
expect(logs[2].message).toBe('d')
|
||||
})
|
||||
|
||||
it('isolates entries by server key', () => {
|
||||
const buffer = new ServerLogBuffer(5)
|
||||
buffer.append('one', { timestamp: 1, level: 'info', message: 'a' })
|
||||
buffer.append('two', { timestamp: 2, level: 'info', message: 'b' })
|
||||
|
||||
expect(buffer.get('one')).toHaveLength(1)
|
||||
expect(buffer.get('two')).toHaveLength(1)
|
||||
})
|
||||
})
|
||||
@ -15,6 +15,8 @@ import { query } from '@anthropic-ai/claude-agent-sdk'
|
||||
import { preferenceService } from '@data/PreferenceService'
|
||||
import { loggerService } from '@logger'
|
||||
import { validateModelId } from '@main/apiServer/utils'
|
||||
import { ConfigKeys, configManager } from '@main/services/ConfigManager'
|
||||
import { validateGitBashPath } from '@main/utils/process'
|
||||
import getLoginShellEnvironment from '@main/utils/shell-env'
|
||||
import { app } from 'electron'
|
||||
|
||||
@ -111,6 +113,8 @@ class ClaudeCodeService implements AgentServiceInterface {
|
||||
Object.entries(loginShellEnv).filter(([key]) => !key.toLowerCase().endsWith('_proxy'))
|
||||
) as Record<string, string>
|
||||
|
||||
const customGitBashPath = validateGitBashPath(configManager.get(ConfigKeys.GitBashPath) as string | undefined)
|
||||
|
||||
const env = {
|
||||
...loginShellEnvWithoutProxies,
|
||||
// TODO: fix the proxy api server
|
||||
@ -130,7 +134,8 @@ class ClaudeCodeService implements AgentServiceInterface {
|
||||
// Set CLAUDE_CONFIG_DIR to app's userData directory to avoid path encoding issues
|
||||
// on Windows when the username contains non-ASCII characters (e.g., Chinese characters)
|
||||
// This prevents the SDK from using the user's home directory which may have encoding problems
|
||||
CLAUDE_CONFIG_DIR: path.join(app.getPath('userData'), '.claude')
|
||||
CLAUDE_CONFIG_DIR: path.join(app.getPath('userData'), '.claude'),
|
||||
...(customGitBashPath ? { CLAUDE_CODE_GIT_BASH_PATH: customGitBashPath } : {})
|
||||
}
|
||||
|
||||
const errorChunks: string[] = []
|
||||
|
||||
36
src/main/services/mcp/ServerLogBuffer.ts
Normal file
36
src/main/services/mcp/ServerLogBuffer.ts
Normal file
@ -0,0 +1,36 @@
|
||||
export type MCPServerLogEntry = {
|
||||
timestamp: number
|
||||
level: 'debug' | 'info' | 'warn' | 'error' | 'stderr' | 'stdout'
|
||||
message: string
|
||||
data?: any
|
||||
source?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Lightweight ring buffer for per-server MCP logs.
|
||||
*/
|
||||
export class ServerLogBuffer {
|
||||
private maxEntries: number
|
||||
private logs: Map<string, MCPServerLogEntry[]> = new Map()
|
||||
|
||||
constructor(maxEntries = 200) {
|
||||
this.maxEntries = maxEntries
|
||||
}
|
||||
|
||||
append(serverKey: string, entry: MCPServerLogEntry) {
|
||||
const list = this.logs.get(serverKey) ?? []
|
||||
list.push(entry)
|
||||
if (list.length > this.maxEntries) {
|
||||
list.splice(0, list.length - this.maxEntries)
|
||||
}
|
||||
this.logs.set(serverKey, list)
|
||||
}
|
||||
|
||||
get(serverKey: string): MCPServerLogEntry[] {
|
||||
return [...(this.logs.get(serverKey) ?? [])]
|
||||
}
|
||||
|
||||
remove(serverKey: string) {
|
||||
this.logs.delete(serverKey)
|
||||
}
|
||||
}
|
||||
@ -3,7 +3,7 @@ import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { findExecutable, findGitBash } from '../process'
|
||||
import { findExecutable, findGitBash, validateGitBashPath } from '../process'
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('child_process')
|
||||
@ -289,7 +289,133 @@ describe.skipIf(process.platform !== 'win32')('process utilities', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('validateGitBashPath', () => {
|
||||
it('returns null when path is null', () => {
|
||||
const result = validateGitBashPath(null)
|
||||
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
|
||||
it('returns null when path is undefined', () => {
|
||||
const result = validateGitBashPath(undefined)
|
||||
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
|
||||
it('returns normalized path when valid bash.exe exists', () => {
|
||||
const customPath = 'C:\\PortableGit\\bin\\bash.exe'
|
||||
vi.mocked(fs.existsSync).mockImplementation((p) => p === 'C:\\PortableGit\\bin\\bash.exe')
|
||||
|
||||
const result = validateGitBashPath(customPath)
|
||||
|
||||
expect(result).toBe('C:\\PortableGit\\bin\\bash.exe')
|
||||
})
|
||||
|
||||
it('returns null when file does not exist', () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(false)
|
||||
|
||||
const result = validateGitBashPath('C:\\missing\\bash.exe')
|
||||
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
|
||||
it('returns null when path is not bash.exe', () => {
|
||||
const customPath = 'C:\\PortableGit\\bin\\git.exe'
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true)
|
||||
|
||||
const result = validateGitBashPath(customPath)
|
||||
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
describe('findGitBash', () => {
|
||||
describe('customPath parameter', () => {
|
||||
beforeEach(() => {
|
||||
delete process.env.CLAUDE_CODE_GIT_BASH_PATH
|
||||
})
|
||||
|
||||
it('uses customPath when valid', () => {
|
||||
const customPath = 'C:\\CustomGit\\bin\\bash.exe'
|
||||
vi.mocked(fs.existsSync).mockImplementation((p) => p === customPath)
|
||||
|
||||
const result = findGitBash(customPath)
|
||||
|
||||
expect(result).toBe(customPath)
|
||||
expect(execFileSync).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('falls back when customPath is invalid', () => {
|
||||
const customPath = 'C:\\Invalid\\bash.exe'
|
||||
const gitPath = 'C:\\Program Files\\Git\\cmd\\git.exe'
|
||||
const bashPath = 'C:\\Program Files\\Git\\bin\\bash.exe'
|
||||
|
||||
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
||||
if (p === customPath) return false
|
||||
if (p === gitPath) return true
|
||||
if (p === bashPath) return true
|
||||
return false
|
||||
})
|
||||
|
||||
vi.mocked(execFileSync).mockReturnValue(gitPath)
|
||||
|
||||
const result = findGitBash(customPath)
|
||||
|
||||
expect(result).toBe(bashPath)
|
||||
})
|
||||
|
||||
it('prioritizes customPath over env override', () => {
|
||||
const customPath = 'C:\\CustomGit\\bin\\bash.exe'
|
||||
const envPath = 'C:\\EnvGit\\bin\\bash.exe'
|
||||
process.env.CLAUDE_CODE_GIT_BASH_PATH = envPath
|
||||
|
||||
vi.mocked(fs.existsSync).mockImplementation((p) => p === customPath || p === envPath)
|
||||
|
||||
const result = findGitBash(customPath)
|
||||
|
||||
expect(result).toBe(customPath)
|
||||
})
|
||||
})
|
||||
|
||||
describe('env override', () => {
|
||||
beforeEach(() => {
|
||||
delete process.env.CLAUDE_CODE_GIT_BASH_PATH
|
||||
})
|
||||
|
||||
it('uses CLAUDE_CODE_GIT_BASH_PATH when valid', () => {
|
||||
const envPath = 'C:\\OverrideGit\\bin\\bash.exe'
|
||||
process.env.CLAUDE_CODE_GIT_BASH_PATH = envPath
|
||||
|
||||
vi.mocked(fs.existsSync).mockImplementation((p) => p === envPath)
|
||||
|
||||
const result = findGitBash()
|
||||
|
||||
expect(result).toBe(envPath)
|
||||
expect(execFileSync).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('falls back when CLAUDE_CODE_GIT_BASH_PATH is invalid', () => {
|
||||
const envPath = 'C:\\Invalid\\bash.exe'
|
||||
const gitPath = 'C:\\Program Files\\Git\\cmd\\git.exe'
|
||||
const bashPath = 'C:\\Program Files\\Git\\bin\\bash.exe'
|
||||
|
||||
process.env.CLAUDE_CODE_GIT_BASH_PATH = envPath
|
||||
|
||||
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
||||
if (p === envPath) return false
|
||||
if (p === gitPath) return true
|
||||
if (p === bashPath) return true
|
||||
return false
|
||||
})
|
||||
|
||||
vi.mocked(execFileSync).mockReturnValue(gitPath)
|
||||
|
||||
const result = findGitBash()
|
||||
|
||||
expect(result).toBe(bashPath)
|
||||
})
|
||||
})
|
||||
|
||||
describe('git.exe path derivation', () => {
|
||||
it('should derive bash.exe from standard Git installation (Git/cmd/git.exe)', () => {
|
||||
const gitPath = 'C:\\Program Files\\Git\\cmd\\git.exe'
|
||||
|
||||
@ -131,15 +131,37 @@ export function findExecutable(name: string): string | null {
|
||||
|
||||
/**
|
||||
* Find Git Bash executable on Windows
|
||||
* @param customPath - Optional custom path from config
|
||||
* @returns Full path to bash.exe or null if not found
|
||||
*/
|
||||
export function findGitBash(): string | null {
|
||||
export function findGitBash(customPath?: string | null): string | null {
|
||||
// Git Bash is Windows-only
|
||||
if (!isWin) {
|
||||
return null
|
||||
}
|
||||
|
||||
// 1. Find git.exe and derive bash.exe path
|
||||
// 1. Check custom path from config first
|
||||
if (customPath) {
|
||||
const validated = validateGitBashPath(customPath)
|
||||
if (validated) {
|
||||
logger.debug('Using custom Git Bash path from config', { path: validated })
|
||||
return validated
|
||||
}
|
||||
logger.warn('Custom Git Bash path provided but invalid', { path: customPath })
|
||||
}
|
||||
|
||||
// 2. Check environment variable override
|
||||
const envOverride = process.env.CLAUDE_CODE_GIT_BASH_PATH
|
||||
if (envOverride) {
|
||||
const validated = validateGitBashPath(envOverride)
|
||||
if (validated) {
|
||||
logger.debug('Using CLAUDE_CODE_GIT_BASH_PATH override for bash.exe', { path: validated })
|
||||
return validated
|
||||
}
|
||||
logger.warn('CLAUDE_CODE_GIT_BASH_PATH provided but path is invalid', { path: envOverride })
|
||||
}
|
||||
|
||||
// 3. Find git.exe and derive bash.exe path
|
||||
const gitPath = findExecutable('git')
|
||||
if (gitPath) {
|
||||
// Try multiple possible locations for bash.exe relative to git.exe
|
||||
@ -164,7 +186,7 @@ export function findGitBash(): string | null {
|
||||
})
|
||||
}
|
||||
|
||||
// 2. Fallback: check common Git Bash paths directly
|
||||
// 4. Fallback: check common Git Bash paths directly
|
||||
const commonBashPaths = [
|
||||
path.join(process.env.ProgramFiles || 'C:\\Program Files', 'Git', 'bin', 'bash.exe'),
|
||||
path.join(process.env['ProgramFiles(x86)'] || 'C:\\Program Files (x86)', 'Git', 'bin', 'bash.exe'),
|
||||
@ -181,3 +203,25 @@ export function findGitBash(): string | null {
|
||||
logger.debug('Git Bash not found - checked git derivation and common paths')
|
||||
return null
|
||||
}
|
||||
|
||||
export function validateGitBashPath(customPath?: string | null): string | null {
|
||||
if (!customPath) {
|
||||
return null
|
||||
}
|
||||
|
||||
const resolved = path.resolve(customPath)
|
||||
|
||||
if (!fs.existsSync(resolved)) {
|
||||
logger.warn('Custom Git Bash path does not exist', { path: resolved })
|
||||
return null
|
||||
}
|
||||
|
||||
const isExe = resolved.toLowerCase().endsWith('bash.exe')
|
||||
if (!isExe) {
|
||||
logger.warn('Custom Git Bash path is not bash.exe', { path: resolved })
|
||||
return null
|
||||
}
|
||||
|
||||
logger.debug('Validated custom Git Bash path', { path: resolved })
|
||||
return resolved
|
||||
}
|
||||
|
||||
@ -5,6 +5,7 @@ import type { SpanContext } from '@opentelemetry/api'
|
||||
import type { TerminalConfig } from '@shared/config/constant'
|
||||
import type { LogLevel, LogSourceWithContext } from '@shared/config/logger'
|
||||
import type { FileChangeEvent, WebviewKeyEvent } from '@shared/config/types'
|
||||
import type { MCPServerLogEntry } from '@shared/config/types'
|
||||
import type { CacheSyncMessage } from '@shared/data/cache/cacheTypes'
|
||||
import type {
|
||||
PreferenceDefaultScopeType,
|
||||
@ -129,7 +130,10 @@ const api = {
|
||||
getDeviceType: () => ipcRenderer.invoke(IpcChannel.System_GetDeviceType),
|
||||
getHostname: () => ipcRenderer.invoke(IpcChannel.System_GetHostname),
|
||||
getCpuName: () => ipcRenderer.invoke(IpcChannel.System_GetCpuName),
|
||||
checkGitBash: (): Promise<boolean> => ipcRenderer.invoke(IpcChannel.System_CheckGitBash)
|
||||
checkGitBash: (): Promise<boolean> => ipcRenderer.invoke(IpcChannel.System_CheckGitBash),
|
||||
getGitBashPath: (): Promise<string | null> => ipcRenderer.invoke(IpcChannel.System_GetGitBashPath),
|
||||
setGitBashPath: (newPath: string | null): Promise<boolean> =>
|
||||
ipcRenderer.invoke(IpcChannel.System_SetGitBashPath, newPath)
|
||||
},
|
||||
devTools: {
|
||||
toggle: () => ipcRenderer.invoke(IpcChannel.System_ToggleDevTools)
|
||||
@ -378,7 +382,16 @@ const api = {
|
||||
},
|
||||
abortTool: (callId: string) => ipcRenderer.invoke(IpcChannel.Mcp_AbortTool, callId),
|
||||
getServerVersion: (server: MCPServer): Promise<string | null> =>
|
||||
ipcRenderer.invoke(IpcChannel.Mcp_GetServerVersion, server)
|
||||
ipcRenderer.invoke(IpcChannel.Mcp_GetServerVersion, server),
|
||||
getServerLogs: (server: MCPServer): Promise<MCPServerLogEntry[]> =>
|
||||
ipcRenderer.invoke(IpcChannel.Mcp_GetServerLogs, server),
|
||||
onServerLog: (callback: (log: MCPServerLogEntry & { serverId?: string }) => void) => {
|
||||
const listener = (_event: Electron.IpcRendererEvent, log: MCPServerLogEntry & { serverId?: string }) => {
|
||||
callback(log)
|
||||
}
|
||||
ipcRenderer.on(IpcChannel.Mcp_ServerLog, listener)
|
||||
return () => ipcRenderer.off(IpcChannel.Mcp_ServerLog, listener)
|
||||
}
|
||||
},
|
||||
python: {
|
||||
execute: (script: string, context?: Record<string, any>, timeout?: number) =>
|
||||
@ -430,6 +443,8 @@ const api = {
|
||||
ipcRenderer.invoke(IpcChannel.Webview_SetOpenLinkExternal, webviewId, isExternal),
|
||||
setSpellCheckEnabled: (webviewId: number, isEnable: boolean) =>
|
||||
ipcRenderer.invoke(IpcChannel.Webview_SetSpellCheckEnabled, webviewId, isEnable),
|
||||
printToPDF: (webviewId: number) => ipcRenderer.invoke(IpcChannel.Webview_PrintToPDF, webviewId),
|
||||
saveAsHTML: (webviewId: number) => ipcRenderer.invoke(IpcChannel.Webview_SaveAsHTML, webviewId),
|
||||
onFindShortcut: (callback: (payload: WebviewKeyEvent) => void) => {
|
||||
const listener = (_event: Electron.IpcRendererEvent, payload: WebviewKeyEvent) => {
|
||||
callback(payload)
|
||||
|
||||
@ -91,7 +91,9 @@ export default class ModernAiProvider {
|
||||
if (this.isModel(modelOrProvider)) {
|
||||
// 传入的是 Model
|
||||
this.model = modelOrProvider
|
||||
this.actualProvider = provider ? adaptProvider({ provider }) : getActualProvider(modelOrProvider)
|
||||
this.actualProvider = provider
|
||||
? adaptProvider({ provider, model: modelOrProvider })
|
||||
: getActualProvider(modelOrProvider)
|
||||
// 只保存配置,不预先创建executor
|
||||
this.config = providerToAiSdkConfig(this.actualProvider, modelOrProvider)
|
||||
} else {
|
||||
|
||||
@ -124,7 +124,8 @@ export class AnthropicAPIClient extends BaseApiClient<
|
||||
|
||||
override async listModels(): Promise<Anthropic.ModelInfo[]> {
|
||||
const sdk = (await this.getSdkInstance()) as Anthropic
|
||||
const response = await sdk.models.list()
|
||||
// prevent auto appended /v1. It's included in baseUrl.
|
||||
const response = await sdk.models.list({ path: '/models' })
|
||||
return response.data
|
||||
}
|
||||
|
||||
|
||||
@ -173,13 +173,15 @@ export class GeminiAPIClient extends BaseApiClient<
|
||||
return this.sdkInstance
|
||||
}
|
||||
|
||||
const apiVersion = this.getApiVersion()
|
||||
|
||||
this.sdkInstance = new GoogleGenAI({
|
||||
vertexai: false,
|
||||
apiKey: this.apiKey,
|
||||
apiVersion: this.getApiVersion(),
|
||||
apiVersion,
|
||||
httpOptions: {
|
||||
baseUrl: this.getBaseURL(),
|
||||
apiVersion: this.getApiVersion(),
|
||||
apiVersion,
|
||||
headers: {
|
||||
...this.provider.extra_headers
|
||||
}
|
||||
@ -200,7 +202,7 @@ export class GeminiAPIClient extends BaseApiClient<
|
||||
return trailingVersion
|
||||
}
|
||||
|
||||
return 'v1beta'
|
||||
return ''
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -25,7 +25,7 @@ import type {
|
||||
OpenAISdkRawOutput,
|
||||
ReasoningEffortOptionalParams
|
||||
} from '@renderer/types/sdk'
|
||||
import { formatApiHost, withoutTrailingSlash } from '@renderer/utils/api'
|
||||
import { withoutTrailingSlash } from '@renderer/utils/api'
|
||||
import { isOllamaProvider } from '@renderer/utils/provider'
|
||||
|
||||
import { BaseApiClient } from '../BaseApiClient'
|
||||
@ -49,8 +49,9 @@ export abstract class OpenAIBaseClient<
|
||||
}
|
||||
|
||||
// 仅适用于openai
|
||||
override getBaseURL(isSupportedAPIVerion: boolean = true): string {
|
||||
return formatApiHost(this.provider.apiHost, isSupportedAPIVerion)
|
||||
override getBaseURL(): string {
|
||||
// apiHost is formatted when called by AiProvider
|
||||
return this.provider.apiHost
|
||||
}
|
||||
|
||||
override async generateImage({
|
||||
@ -100,6 +101,17 @@ export abstract class OpenAIBaseClient<
|
||||
override async listModels(): Promise<OpenAI.Models.Model[]> {
|
||||
try {
|
||||
const sdk = await this.getSdkInstance()
|
||||
if (this.provider.id === 'openrouter') {
|
||||
// https://openrouter.ai/docs/api/api-reference/embeddings/list-embeddings-models
|
||||
const embedBaseUrl = 'https://openrouter.ai/api/v1/embeddings'
|
||||
const embedSdk = sdk.withOptions({ baseURL: embedBaseUrl })
|
||||
const modelPromise = sdk.models.list()
|
||||
const embedModelPromise = embedSdk.models.list()
|
||||
const [modelResponse, embedModelResponse] = await Promise.all([modelPromise, embedModelPromise])
|
||||
const models = [...modelResponse.data, ...embedModelResponse.data]
|
||||
const uniqueModels = Array.from(new Map(models.map((model) => [model.id, model])).values())
|
||||
return uniqueModels.filter(isSupportedModel)
|
||||
}
|
||||
if (this.provider.id === 'github') {
|
||||
// GitHub Models 其 models 和 chat completions 两个接口的 baseUrl 不一样
|
||||
const baseUrl = 'https://models.github.ai/catalog/'
|
||||
@ -118,7 +130,7 @@ export abstract class OpenAIBaseClient<
|
||||
}
|
||||
|
||||
if (isOllamaProvider(this.provider)) {
|
||||
const baseUrl = withoutTrailingSlash(this.getBaseURL(false))
|
||||
const baseUrl = withoutTrailingSlash(this.getBaseURL())
|
||||
.replace(/\/v1$/, '')
|
||||
.replace(/\/api$/, '')
|
||||
const response = await fetch(`${baseUrl}/api/tags`, {
|
||||
@ -173,6 +185,7 @@ export abstract class OpenAIBaseClient<
|
||||
|
||||
let apiKeyForSdkInstance = this.apiKey
|
||||
let baseURLForSdkInstance = this.getBaseURL()
|
||||
logger.debug('baseURLForSdkInstance', { baseURLForSdkInstance })
|
||||
let headersForSdkInstance = {
|
||||
...this.defaultHeaders(),
|
||||
...this.provider.extra_headers
|
||||
@ -184,7 +197,7 @@ export abstract class OpenAIBaseClient<
|
||||
// this.provider.apiKey不允许修改
|
||||
// this.provider.apiKey = token
|
||||
apiKeyForSdkInstance = token
|
||||
baseURLForSdkInstance = this.getBaseURL(false)
|
||||
baseURLForSdkInstance = this.getBaseURL()
|
||||
headersForSdkInstance = {
|
||||
...headersForSdkInstance,
|
||||
...COPILOT_DEFAULT_HEADERS
|
||||
|
||||
@ -122,6 +122,7 @@ export class OpenAIResponseAPIClient extends OpenAIBaseClient<
|
||||
if (this.sdkInstance) {
|
||||
return this.sdkInstance
|
||||
}
|
||||
const baseUrl = this.getBaseURL()
|
||||
|
||||
if (this.provider.id === 'azure-openai' || this.provider.type === 'azure-openai') {
|
||||
return new AzureOpenAI({
|
||||
@ -134,7 +135,7 @@ export class OpenAIResponseAPIClient extends OpenAIBaseClient<
|
||||
return new OpenAI({
|
||||
dangerouslyAllowBrowser: true,
|
||||
apiKey: this.apiKey,
|
||||
baseURL: this.getBaseURL(),
|
||||
baseURL: baseUrl,
|
||||
defaultHeaders: {
|
||||
...this.defaultHeaders(),
|
||||
...this.provider.extra_headers
|
||||
|
||||
@ -2,7 +2,6 @@ import { loggerService } from '@logger'
|
||||
import { ApiClientFactory } from '@renderer/aiCore/legacy/clients/ApiClientFactory'
|
||||
import type { BaseApiClient } from '@renderer/aiCore/legacy/clients/BaseApiClient'
|
||||
import { isDedicatedImageGenerationModel, isFunctionCallingModel } from '@renderer/config/models'
|
||||
import { getProviderByModel } from '@renderer/services/AssistantService'
|
||||
import { withSpanResult } from '@renderer/services/SpanManagerService'
|
||||
import type { StartSpanParams } from '@renderer/trace/types/ModelSpanEntity'
|
||||
import type { GenerateImageParams, Model, Provider } from '@renderer/types'
|
||||
@ -160,9 +159,6 @@ export default class AiProvider {
|
||||
public async getEmbeddingDimensions(model: Model): Promise<number> {
|
||||
try {
|
||||
// Use the SDK instance to test embedding capabilities
|
||||
if (this.apiClient instanceof OpenAIResponseAPIClient && getProviderByModel(model).type === 'azure-openai') {
|
||||
this.apiClient = this.apiClient.getClient(model) as BaseApiClient
|
||||
}
|
||||
const dimensions = await this.apiClient.getEmbeddingDimensions(model)
|
||||
return dimensions
|
||||
} catch (error) {
|
||||
|
||||
@ -137,6 +137,73 @@ describe('messageConverter', () => {
|
||||
})
|
||||
})
|
||||
|
||||
it('extracts base64 data from data URLs and preserves mediaType', async () => {
|
||||
const model = createModel()
|
||||
const message = createMessage('user')
|
||||
message.__mockContent = 'Check this image'
|
||||
message.__mockImageBlocks = [createImageBlock(message.id, { url: 'data:image/png;base64,iVBORw0KGgoAAAANS' })]
|
||||
|
||||
const result = await convertMessageToSdkParam(message, true, model)
|
||||
|
||||
expect(result).toEqual({
|
||||
role: 'user',
|
||||
content: [
|
||||
{ type: 'text', text: 'Check this image' },
|
||||
{ type: 'image', image: 'iVBORw0KGgoAAAANS', mediaType: 'image/png' }
|
||||
]
|
||||
})
|
||||
})
|
||||
|
||||
it('handles data URLs without mediaType gracefully', async () => {
|
||||
const model = createModel()
|
||||
const message = createMessage('user')
|
||||
message.__mockContent = 'Check this'
|
||||
message.__mockImageBlocks = [createImageBlock(message.id, { url: 'data:;base64,AAABBBCCC' })]
|
||||
|
||||
const result = await convertMessageToSdkParam(message, true, model)
|
||||
|
||||
expect(result).toEqual({
|
||||
role: 'user',
|
||||
content: [
|
||||
{ type: 'text', text: 'Check this' },
|
||||
{ type: 'image', image: 'AAABBBCCC' }
|
||||
]
|
||||
})
|
||||
})
|
||||
|
||||
it('skips malformed data URLs without comma separator', async () => {
|
||||
const model = createModel()
|
||||
const message = createMessage('user')
|
||||
message.__mockContent = 'Malformed data url'
|
||||
message.__mockImageBlocks = [createImageBlock(message.id, { url: 'data:image/pngAAABBB' })]
|
||||
|
||||
const result = await convertMessageToSdkParam(message, true, model)
|
||||
|
||||
expect(result).toEqual({
|
||||
role: 'user',
|
||||
content: [
|
||||
{ type: 'text', text: 'Malformed data url' }
|
||||
// Malformed data URL is excluded from the content
|
||||
]
|
||||
})
|
||||
})
|
||||
|
||||
it('handles multiple large base64 images without stack overflow', async () => {
|
||||
const model = createModel()
|
||||
const message = createMessage('user')
|
||||
// Create large base64 strings (~500KB each) to simulate real-world large images
|
||||
const largeBase64 = 'A'.repeat(500_000)
|
||||
message.__mockContent = 'Check these images'
|
||||
message.__mockImageBlocks = [
|
||||
createImageBlock(message.id, { url: `data:image/png;base64,${largeBase64}` }),
|
||||
createImageBlock(message.id, { url: `data:image/png;base64,${largeBase64}` }),
|
||||
createImageBlock(message.id, { url: `data:image/png;base64,${largeBase64}` })
|
||||
]
|
||||
|
||||
// Should not throw RangeError: Maximum call stack size exceeded
|
||||
await expect(convertMessageToSdkParam(message, true, model)).resolves.toBeDefined()
|
||||
})
|
||||
|
||||
it('returns file instructions as a system message when native uploads succeed', async () => {
|
||||
const model = createModel()
|
||||
const message = createMessage('user')
|
||||
@ -165,7 +232,7 @@ describe('messageConverter', () => {
|
||||
})
|
||||
|
||||
describe('convertMessagesToSdkMessages', () => {
|
||||
it('appends assistant images to the final user message for image enhancement models', async () => {
|
||||
it('collapses to [system?, user(image)] for image enhancement models', async () => {
|
||||
const model = createModel({ id: 'qwen-image-edit', name: 'Qwen Image Edit', provider: 'qwen', group: 'qwen' })
|
||||
const initialUser = createMessage('user')
|
||||
initialUser.__mockContent = 'Start editing'
|
||||
@ -180,14 +247,6 @@ describe('messageConverter', () => {
|
||||
const result = await convertMessagesToSdkMessages([initialUser, assistant, finalUser], model)
|
||||
|
||||
expect(result).toEqual([
|
||||
{
|
||||
role: 'user',
|
||||
content: [{ type: 'text', text: 'Start editing' }]
|
||||
},
|
||||
{
|
||||
role: 'assistant',
|
||||
content: [{ type: 'text', text: 'Here is the current preview' }]
|
||||
},
|
||||
{
|
||||
role: 'user',
|
||||
content: [
|
||||
@ -198,7 +257,7 @@ describe('messageConverter', () => {
|
||||
])
|
||||
})
|
||||
|
||||
it('preserves preceding system instructions when building enhancement payloads', async () => {
|
||||
it('preserves system messages and collapses others for enhancement payloads', async () => {
|
||||
const model = createModel({ id: 'qwen-image-edit', name: 'Qwen Image Edit', provider: 'qwen', group: 'qwen' })
|
||||
const fileUser = createMessage('user')
|
||||
fileUser.__mockContent = 'Use this document as inspiration'
|
||||
@ -221,11 +280,6 @@ describe('messageConverter', () => {
|
||||
|
||||
expect(result).toEqual([
|
||||
{ role: 'system', content: 'fileid://reference' },
|
||||
{ role: 'user', content: [{ type: 'text', text: 'Use this document as inspiration' }] },
|
||||
{
|
||||
role: 'assistant',
|
||||
content: [{ type: 'text', text: 'Generated previews ready' }]
|
||||
},
|
||||
{
|
||||
role: 'user',
|
||||
content: [
|
||||
@ -235,5 +289,120 @@ describe('messageConverter', () => {
|
||||
}
|
||||
])
|
||||
})
|
||||
|
||||
it('handles no previous assistant message with images', async () => {
|
||||
const model = createModel({ id: 'qwen-image-edit', name: 'Qwen Image Edit', provider: 'qwen', group: 'qwen' })
|
||||
const user1 = createMessage('user')
|
||||
user1.__mockContent = 'Start'
|
||||
|
||||
const user2 = createMessage('user')
|
||||
user2.__mockContent = 'Continue without images'
|
||||
|
||||
const result = await convertMessagesToSdkMessages([user1, user2], model)
|
||||
|
||||
expect(result).toEqual([
|
||||
{
|
||||
role: 'user',
|
||||
content: [{ type: 'text', text: 'Continue without images' }]
|
||||
}
|
||||
])
|
||||
})
|
||||
|
||||
it('handles assistant message without images', async () => {
|
||||
const model = createModel({ id: 'qwen-image-edit', name: 'Qwen Image Edit', provider: 'qwen', group: 'qwen' })
|
||||
const user1 = createMessage('user')
|
||||
user1.__mockContent = 'Start'
|
||||
|
||||
const assistant = createMessage('assistant')
|
||||
assistant.__mockContent = 'Text only response'
|
||||
assistant.__mockImageBlocks = []
|
||||
|
||||
const user2 = createMessage('user')
|
||||
user2.__mockContent = 'Follow up'
|
||||
|
||||
const result = await convertMessagesToSdkMessages([user1, assistant, user2], model)
|
||||
|
||||
expect(result).toEqual([
|
||||
{
|
||||
role: 'user',
|
||||
content: [{ type: 'text', text: 'Follow up' }]
|
||||
}
|
||||
])
|
||||
})
|
||||
|
||||
it('handles multiple assistant messages by using the most recent one', async () => {
|
||||
const model = createModel({ id: 'qwen-image-edit', name: 'Qwen Image Edit', provider: 'qwen', group: 'qwen' })
|
||||
const user1 = createMessage('user')
|
||||
user1.__mockContent = 'Start'
|
||||
|
||||
const assistant1 = createMessage('assistant')
|
||||
assistant1.__mockContent = 'First response'
|
||||
assistant1.__mockImageBlocks = [createImageBlock(assistant1.id, { url: 'https://example.com/old.png' })]
|
||||
|
||||
const user2 = createMessage('user')
|
||||
user2.__mockContent = 'Continue'
|
||||
|
||||
const assistant2 = createMessage('assistant')
|
||||
assistant2.__mockContent = 'Second response'
|
||||
assistant2.__mockImageBlocks = [createImageBlock(assistant2.id, { url: 'https://example.com/new.png' })]
|
||||
|
||||
const user3 = createMessage('user')
|
||||
user3.__mockContent = 'Final request'
|
||||
|
||||
const result = await convertMessagesToSdkMessages([user1, assistant1, user2, assistant2, user3], model)
|
||||
|
||||
expect(result).toEqual([
|
||||
{
|
||||
role: 'user',
|
||||
content: [
|
||||
{ type: 'text', text: 'Final request' },
|
||||
{ type: 'image', image: 'https://example.com/new.png' }
|
||||
]
|
||||
}
|
||||
])
|
||||
})
|
||||
|
||||
it('handles conversation ending with assistant message', async () => {
|
||||
const model = createModel({ id: 'qwen-image-edit', name: 'Qwen Image Edit', provider: 'qwen', group: 'qwen' })
|
||||
const user = createMessage('user')
|
||||
user.__mockContent = 'Start'
|
||||
|
||||
const assistant = createMessage('assistant')
|
||||
assistant.__mockContent = 'Response with image'
|
||||
assistant.__mockImageBlocks = [createImageBlock(assistant.id, { url: 'https://example.com/image.png' })]
|
||||
|
||||
const result = await convertMessagesToSdkMessages([user, assistant], model)
|
||||
|
||||
// The user message is the last user message, but since the assistant comes after,
|
||||
// there's no "previous" assistant message (search starts from messages.length-2 backwards)
|
||||
expect(result).toEqual([
|
||||
{
|
||||
role: 'user',
|
||||
content: [{ type: 'text', text: 'Start' }]
|
||||
}
|
||||
])
|
||||
})
|
||||
|
||||
it('handles empty content in last user message', async () => {
|
||||
const model = createModel({ id: 'qwen-image-edit', name: 'Qwen Image Edit', provider: 'qwen', group: 'qwen' })
|
||||
const user1 = createMessage('user')
|
||||
user1.__mockContent = 'Start'
|
||||
|
||||
const assistant = createMessage('assistant')
|
||||
assistant.__mockContent = 'Here is the preview'
|
||||
assistant.__mockImageBlocks = [createImageBlock(assistant.id, { url: 'https://example.com/preview.png' })]
|
||||
|
||||
const user2 = createMessage('user')
|
||||
user2.__mockContent = ''
|
||||
|
||||
const result = await convertMessagesToSdkMessages([user1, assistant, user2], model)
|
||||
|
||||
expect(result).toEqual([
|
||||
{
|
||||
role: 'user',
|
||||
content: [{ type: 'image', image: 'https://example.com/preview.png' }]
|
||||
}
|
||||
])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@ -7,6 +7,7 @@ import { loggerService } from '@logger'
|
||||
import { isImageEnhancementModel, isVisionModel } from '@renderer/config/models'
|
||||
import type { Message, Model } from '@renderer/types'
|
||||
import type { FileMessageBlock, ImageMessageBlock, ThinkingMessageBlock } from '@renderer/types/newMessage'
|
||||
import { parseDataUrlMediaType } from '@renderer/utils/image'
|
||||
import {
|
||||
findFileBlocks,
|
||||
findImageBlocks,
|
||||
@ -59,23 +60,29 @@ async function convertImageBlockToImagePart(imageBlocks: ImageMessageBlock[]): P
|
||||
mediaType: image.mime
|
||||
})
|
||||
} catch (error) {
|
||||
logger.warn('Failed to load image:', error as Error)
|
||||
logger.error('Failed to load image file, image will be excluded from message:', {
|
||||
fileId: imageBlock.file.id,
|
||||
fileName: imageBlock.file.origin_name,
|
||||
error: error as Error
|
||||
})
|
||||
}
|
||||
} else if (imageBlock.url) {
|
||||
const isBase64 = imageBlock.url.startsWith('data:')
|
||||
if (isBase64) {
|
||||
const base64 = imageBlock.url.match(/^data:[^;]*;base64,(.+)$/)![1]
|
||||
const mimeMatch = imageBlock.url.match(/^data:([^;]+)/)
|
||||
parts.push({
|
||||
type: 'image',
|
||||
image: base64,
|
||||
mediaType: mimeMatch ? mimeMatch[1] : 'image/png'
|
||||
})
|
||||
const url = imageBlock.url
|
||||
const isDataUrl = url.startsWith('data:')
|
||||
if (isDataUrl) {
|
||||
const { mediaType } = parseDataUrlMediaType(url)
|
||||
const commaIndex = url.indexOf(',')
|
||||
if (commaIndex === -1) {
|
||||
logger.error('Malformed data URL detected (missing comma separator), image will be excluded:', {
|
||||
urlPrefix: url.slice(0, 50) + '...'
|
||||
})
|
||||
continue
|
||||
}
|
||||
const base64Data = url.slice(commaIndex + 1)
|
||||
parts.push({ type: 'image', image: base64Data, ...(mediaType ? { mediaType } : {}) })
|
||||
} else {
|
||||
parts.push({
|
||||
type: 'image',
|
||||
image: imageBlock.url
|
||||
})
|
||||
// For remote URLs we keep payload minimal to match existing expectations.
|
||||
parts.push({ type: 'image', image: url })
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -194,17 +201,20 @@ async function convertMessageToAssistantModelMessage(
|
||||
* This function processes messages and transforms them into the format required by the SDK.
|
||||
* It handles special cases for vision models and image enhancement models.
|
||||
*
|
||||
* @param messages - Array of messages to convert. Must contain at least 3 messages when using image enhancement models for special handling.
|
||||
* @param messages - Array of messages to convert.
|
||||
* @param model - The model configuration that determines conversion behavior
|
||||
*
|
||||
* @returns A promise that resolves to an array of SDK-compatible model messages
|
||||
*
|
||||
* @remarks
|
||||
* For image enhancement models with 3+ messages:
|
||||
* - Examines the last 2 messages to find an assistant message containing image blocks
|
||||
* - If found, extracts images from the assistant message and appends them to the last user message content
|
||||
* - Returns all converted messages (not just the last two) with the images merged into the user message
|
||||
* - Typical pattern: [system?, assistant(image), user] -> [system?, assistant, user(image)]
|
||||
* For image enhancement models:
|
||||
* - Collapses the conversation into [system?, user(image)] format
|
||||
* - Searches backwards through all messages to find the most recent assistant message with images
|
||||
* - Preserves all system messages (including ones generated from file uploads like 'fileid://...')
|
||||
* - Extracts the last user message content and merges images from the previous assistant message
|
||||
* - Returns only the collapsed messages: system messages (if any) followed by a single user message
|
||||
* - If no user message is found, returns only system messages
|
||||
* - Typical pattern: [system?, user, assistant(image), user] -> [system?, user(image)]
|
||||
*
|
||||
* For other models:
|
||||
* - Returns all converted messages in order without special image handling
|
||||
@ -220,25 +230,66 @@ export async function convertMessagesToSdkMessages(messages: Message[], model: M
|
||||
sdkMessages.push(...(Array.isArray(sdkMessage) ? sdkMessage : [sdkMessage]))
|
||||
}
|
||||
// Special handling for image enhancement models
|
||||
// Only merge images into the user message
|
||||
// [system?, assistant(image), user] -> [system?, assistant, user(image)]
|
||||
if (isImageEnhancementModel(model) && messages.length >= 3) {
|
||||
const needUpdatedMessages = messages.slice(-2)
|
||||
const assistantMessage = needUpdatedMessages.find((m) => m.role === 'assistant')
|
||||
const userSdkMessage = sdkMessages[sdkMessages.length - 1]
|
||||
// Target behavior: Collapse the conversation into [system?, user(image)].
|
||||
// Explanation of why we don't simply use slice:
|
||||
// 1) We need to preserve all system messages: During the convertMessageToSdkParam process, native file uploads may insert `system(fileid://...)`.
|
||||
// Directly slicing the original messages or already converted sdkMessages could easily result in missing these system instructions.
|
||||
// Therefore, we first perform a full conversion and then aggregate the system messages afterward.
|
||||
// 2) The conversion process may split messages: A single user message might be broken into two SDK messages—[system, user].
|
||||
// Slicing either side could lead to obtaining semantically incorrect fragments (e.g., only the split-out system message).
|
||||
// 3) The “previous assistant message” is not necessarily the second-to-last one: There might be system messages or other message blocks inserted in between,
|
||||
// making a simple slice(-2) assumption too rigid. Here, we trace back from the end of the original messages to locate the most recent assistant message, which better aligns with business semantics.
|
||||
// 4) This is a “collapse” rather than a simple “slice”: Ultimately, we need to synthesize a new user message
|
||||
// (with text from the last user message and images from the previous assistant message). Using slice can only extract subarrays,
|
||||
// which still require reassembly; constructing directly according to the target structure is clearer and more reliable.
|
||||
if (isImageEnhancementModel(model)) {
|
||||
// Collect all system messages (including ones generated from file uploads)
|
||||
const systemMessages = sdkMessages.filter((m): m is SystemModelMessage => m.role === 'system')
|
||||
|
||||
if (assistantMessage && userSdkMessage?.role === 'user') {
|
||||
const imageBlocks = findImageBlocks(assistantMessage)
|
||||
const imageParts = await convertImageBlockToImagePart(imageBlocks)
|
||||
// Find the last user message (SDK converted)
|
||||
const lastUserSdkIndex = (() => {
|
||||
for (let i = sdkMessages.length - 1; i >= 0; i--) {
|
||||
if (sdkMessages[i].role === 'user') return i
|
||||
}
|
||||
return -1
|
||||
})()
|
||||
|
||||
if (imageParts.length > 0) {
|
||||
if (typeof userSdkMessage.content === 'string') {
|
||||
userSdkMessage.content = [{ type: 'text', text: userSdkMessage.content }, ...imageParts]
|
||||
} else if (Array.isArray(userSdkMessage.content)) {
|
||||
userSdkMessage.content.push(...imageParts)
|
||||
}
|
||||
const lastUserSdk = lastUserSdkIndex >= 0 ? (sdkMessages[lastUserSdkIndex] as UserModelMessage) : null
|
||||
|
||||
// Find the nearest preceding assistant message in original messages
|
||||
let prevAssistant: Message | null = null
|
||||
for (let i = messages.length - 2; i >= 0; i--) {
|
||||
if (messages[i].role === 'assistant') {
|
||||
prevAssistant = messages[i]
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Build the final user content parts
|
||||
let finalUserParts: Array<TextPart | FilePart | ImagePart> = []
|
||||
if (lastUserSdk) {
|
||||
if (typeof lastUserSdk.content === 'string') {
|
||||
finalUserParts.push({ type: 'text', text: lastUserSdk.content })
|
||||
} else if (Array.isArray(lastUserSdk.content)) {
|
||||
finalUserParts = [...lastUserSdk.content]
|
||||
}
|
||||
}
|
||||
|
||||
// Append images from the previous assistant message if any
|
||||
if (prevAssistant) {
|
||||
const imageBlocks = findImageBlocks(prevAssistant)
|
||||
const imageParts = await convertImageBlockToImagePart(imageBlocks)
|
||||
if (imageParts.length > 0) {
|
||||
finalUserParts.push(...imageParts)
|
||||
}
|
||||
}
|
||||
|
||||
// If we couldn't find a last user message, fall back to returning collected system messages only
|
||||
if (!lastUserSdk) {
|
||||
return systemMessages
|
||||
}
|
||||
|
||||
return [...systemMessages, { role: 'user', content: finalUserParts }]
|
||||
}
|
||||
|
||||
return sdkMessages
|
||||
|
||||
@ -28,13 +28,14 @@ import { getAnthropicThinkingBudget } from '../utils/reasoning'
|
||||
* - Disabled for models that do not support temperature.
|
||||
* - Disabled for Claude 4.5 reasoning models when TopP is enabled and temperature is disabled.
|
||||
* Otherwise, returns the temperature value if the assistant has temperature enabled.
|
||||
|
||||
*/
|
||||
export function getTemperature(assistant: Assistant, model: Model): number | undefined {
|
||||
if (assistant.settings?.reasoning_effort && isClaudeReasoningModel(model)) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
if (!isSupportTemperatureModel(model)) {
|
||||
if (!isSupportTemperatureModel(model, assistant)) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
@ -46,6 +47,10 @@ export function getTemperature(assistant: Assistant, model: Model): number | und
|
||||
return undefined
|
||||
}
|
||||
|
||||
return getTemperatureValue(assistant, model)
|
||||
}
|
||||
|
||||
function getTemperatureValue(assistant: Assistant, model: Model): number | undefined {
|
||||
const assistantSettings = getAssistantSettings(assistant)
|
||||
let temperature = assistantSettings?.temperature
|
||||
if (temperature && isMaxTemperatureOneModel(model)) {
|
||||
@ -68,13 +73,17 @@ export function getTopP(assistant: Assistant, model: Model): number | undefined
|
||||
if (assistant.settings?.reasoning_effort && isClaudeReasoningModel(model)) {
|
||||
return undefined
|
||||
}
|
||||
if (!isSupportTopPModel(model)) {
|
||||
if (!isSupportTopPModel(model, assistant)) {
|
||||
return undefined
|
||||
}
|
||||
if (isTemperatureTopPMutuallyExclusiveModel(model) && assistant.settings?.enableTemperature) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
return getTopPValue(assistant)
|
||||
}
|
||||
|
||||
function getTopPValue(assistant: Assistant): number | undefined {
|
||||
const assistantSettings = getAssistantSettings(assistant)
|
||||
// FIXME: assistant.settings.enableTopP should be always a boolean value.
|
||||
const enableTopP = assistantSettings.enableTopP ?? DEFAULT_ASSISTANT_SETTINGS.enableTopP
|
||||
|
||||
@ -42,7 +42,8 @@ vi.mock('@renderer/utils/api', () => ({
|
||||
routeToEndpoint: vi.fn((host) => ({
|
||||
baseURL: host,
|
||||
endpoint: '/chat/completions'
|
||||
}))
|
||||
})),
|
||||
isWithTrailingSharp: vi.fn((host) => host?.endsWith('#') || false)
|
||||
}))
|
||||
|
||||
vi.mock('@renderer/utils/provider', async (importOriginal) => {
|
||||
@ -227,12 +228,19 @@ describe('CherryAI provider configuration', () => {
|
||||
// Mock the functions to simulate non-CherryAI provider
|
||||
vi.mocked(isCherryAIProvider).mockReturnValue(false)
|
||||
vi.mocked(getProviderByModel).mockReturnValue(provider)
|
||||
// Mock isWithTrailingSharp to return false for this test
|
||||
vi.mocked(formatApiHost as any).mockImplementation((host, isSupportedAPIVersion = true) => {
|
||||
if (isSupportedAPIVersion === false) {
|
||||
return host
|
||||
}
|
||||
return `${host}/v1`
|
||||
})
|
||||
|
||||
// Call getActualProvider
|
||||
const actualProvider = getActualProvider(model)
|
||||
|
||||
// Verify that formatApiHost was called with default parameters (true)
|
||||
expect(formatApiHost).toHaveBeenCalledWith('https://api.openai.com')
|
||||
// Verify that formatApiHost was called with appendApiVersion parameter
|
||||
expect(formatApiHost).toHaveBeenCalledWith('https://api.openai.com', true)
|
||||
expect(actualProvider.apiHost).toBe('https://api.openai.com/v1')
|
||||
})
|
||||
|
||||
@ -303,12 +311,19 @@ describe('Perplexity provider configuration', () => {
|
||||
vi.mocked(isCherryAIProvider).mockReturnValue(false)
|
||||
vi.mocked(isPerplexityProvider).mockReturnValue(false)
|
||||
vi.mocked(getProviderByModel).mockReturnValue(provider)
|
||||
// Mock isWithTrailingSharp to return false for this test
|
||||
vi.mocked(formatApiHost as any).mockImplementation((host, isSupportedAPIVersion = true) => {
|
||||
if (isSupportedAPIVersion === false) {
|
||||
return host
|
||||
}
|
||||
return `${host}/v1`
|
||||
})
|
||||
|
||||
// Call getActualProvider
|
||||
const actualProvider = getActualProvider(model)
|
||||
|
||||
// Verify that formatApiHost was called with default parameters (true)
|
||||
expect(formatApiHost).toHaveBeenCalledWith('https://api.openai.com')
|
||||
// Verify that formatApiHost was called with appendApiVersion parameter
|
||||
expect(formatApiHost).toHaveBeenCalledWith('https://api.openai.com', true)
|
||||
expect(actualProvider.apiHost).toBe('https://api.openai.com/v1')
|
||||
})
|
||||
|
||||
|
||||
@ -9,6 +9,7 @@ import {
|
||||
} from '@renderer/hooks/useAwsBedrock'
|
||||
import { createVertexProvider, isVertexAIConfigured } from '@renderer/hooks/useVertexAI'
|
||||
import { getProviderByModel } from '@renderer/services/AssistantService'
|
||||
import { getProviderById } from '@renderer/services/ProviderService'
|
||||
import store from '@renderer/store'
|
||||
import { isSystemProvider, type Model, type Provider, SystemProviderIds } from '@renderer/types'
|
||||
import type { OpenAICompletionsStreamOptions } from '@renderer/types/aiCoreTypes'
|
||||
@ -17,6 +18,7 @@ import {
|
||||
formatAzureOpenAIApiHost,
|
||||
formatOllamaApiHost,
|
||||
formatVertexApiHost,
|
||||
isWithTrailingSharp,
|
||||
routeToEndpoint
|
||||
} from '@renderer/utils/api'
|
||||
import {
|
||||
@ -69,14 +71,15 @@ function handleSpecialProviders(model: Model, provider: Provider): Provider {
|
||||
*/
|
||||
export function formatProviderApiHost(provider: Provider): Provider {
|
||||
const formatted = { ...provider }
|
||||
const appendApiVersion = !isWithTrailingSharp(provider.apiHost)
|
||||
if (formatted.anthropicApiHost) {
|
||||
formatted.anthropicApiHost = formatApiHost(formatted.anthropicApiHost)
|
||||
formatted.anthropicApiHost = formatApiHost(formatted.anthropicApiHost, appendApiVersion)
|
||||
}
|
||||
|
||||
if (isAnthropicProvider(provider)) {
|
||||
const baseHost = formatted.anthropicApiHost || formatted.apiHost
|
||||
// AI SDK needs /v1 in baseURL, Anthropic SDK will strip it in getSdkClient
|
||||
formatted.apiHost = formatApiHost(baseHost)
|
||||
formatted.apiHost = formatApiHost(baseHost, appendApiVersion)
|
||||
if (!formatted.anthropicApiHost) {
|
||||
formatted.anthropicApiHost = formatted.apiHost
|
||||
}
|
||||
@ -85,7 +88,7 @@ export function formatProviderApiHost(provider: Provider): Provider {
|
||||
} else if (isOllamaProvider(formatted)) {
|
||||
formatted.apiHost = formatOllamaApiHost(formatted.apiHost)
|
||||
} else if (isGeminiProvider(formatted)) {
|
||||
formatted.apiHost = formatApiHost(formatted.apiHost, true, 'v1beta')
|
||||
formatted.apiHost = formatApiHost(formatted.apiHost, appendApiVersion, 'v1beta')
|
||||
} else if (isAzureOpenAIProvider(formatted)) {
|
||||
formatted.apiHost = formatAzureOpenAIApiHost(formatted.apiHost)
|
||||
} else if (isVertexProvider(formatted)) {
|
||||
@ -95,7 +98,7 @@ export function formatProviderApiHost(provider: Provider): Provider {
|
||||
} else if (isPerplexityProvider(formatted)) {
|
||||
formatted.apiHost = formatApiHost(formatted.apiHost, false)
|
||||
} else {
|
||||
formatted.apiHost = formatApiHost(formatted.apiHost)
|
||||
formatted.apiHost = formatApiHost(formatted.apiHost, appendApiVersion)
|
||||
}
|
||||
return formatted
|
||||
}
|
||||
@ -248,6 +251,12 @@ export function providerToAiSdkConfig(actualProvider: Provider, model: Model): A
|
||||
if (model.endpoint_type) {
|
||||
extraOptions.endpointType = model.endpoint_type
|
||||
}
|
||||
// CherryIN API Host
|
||||
const cherryinProvider = getProviderById(SystemProviderIds.cherryin)
|
||||
if (cherryinProvider) {
|
||||
extraOptions.anthropicBaseURL = cherryinProvider.anthropicApiHost + '/v1'
|
||||
extraOptions.geminiBaseURL = cherryinProvider.apiHost + '/v1beta/models'
|
||||
}
|
||||
}
|
||||
|
||||
if (hasProviderConfig(aiSdkProviderId) && aiSdkProviderId !== 'openai-compatible') {
|
||||
|
||||
@ -754,7 +754,8 @@ describe('reasoning utils', () => {
|
||||
const result = getGeminiReasoningParams(assistant, model)
|
||||
expect(result).toEqual({
|
||||
thinkingConfig: {
|
||||
includeThoughts: true
|
||||
includeThoughts: true,
|
||||
thinkingBudget: -1
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
@ -11,6 +11,7 @@ import {
|
||||
isGeminiModel,
|
||||
isGrokModel,
|
||||
isOpenAIModel,
|
||||
isOpenAIOpenWeightModel,
|
||||
isQwenMTModel,
|
||||
isSupportFlexServiceTierModel,
|
||||
isSupportVerbosityModel
|
||||
@ -244,7 +245,7 @@ export function buildProviderOptions(
|
||||
providerSpecificOptions = buildOpenAIProviderOptions(assistant, model, capabilities, serviceTier)
|
||||
break
|
||||
case SystemProviderIds.ollama:
|
||||
providerSpecificOptions = buildOllamaProviderOptions(assistant, capabilities)
|
||||
providerSpecificOptions = buildOllamaProviderOptions(assistant, model, capabilities)
|
||||
break
|
||||
case SystemProviderIds.gateway:
|
||||
providerSpecificOptions = buildAIGatewayOptions(assistant, model, capabilities, serviceTier, textVerbosity)
|
||||
@ -564,6 +565,7 @@ function buildBedrockProviderOptions(
|
||||
|
||||
function buildOllamaProviderOptions(
|
||||
assistant: Assistant,
|
||||
model: Model,
|
||||
capabilities: {
|
||||
enableReasoning: boolean
|
||||
enableWebSearch: boolean
|
||||
@ -574,7 +576,12 @@ function buildOllamaProviderOptions(
|
||||
const providerOptions: OllamaCompletionProviderOptions = {}
|
||||
const reasoningEffort = assistant.settings?.reasoning_effort
|
||||
if (enableReasoning) {
|
||||
providerOptions.think = !['none', undefined].includes(reasoningEffort)
|
||||
if (isOpenAIOpenWeightModel(model)) {
|
||||
// @ts-ignore upstream type error
|
||||
providerOptions.think = reasoningEffort as any
|
||||
} else {
|
||||
providerOptions.think = !['none', undefined].includes(reasoningEffort)
|
||||
}
|
||||
}
|
||||
return {
|
||||
ollama: providerOptions
|
||||
|
||||
@ -13,11 +13,11 @@ import {
|
||||
isDoubaoSeedAfter251015,
|
||||
isDoubaoThinkingAutoModel,
|
||||
isGemini3ThinkingTokenModel,
|
||||
isGPT5SeriesModel,
|
||||
isGPT51SeriesModel,
|
||||
isGrok4FastReasoningModel,
|
||||
isOpenAIDeepResearchModel,
|
||||
isOpenAIModel,
|
||||
isOpenAIReasoningModel,
|
||||
isQwenAlwaysThinkModel,
|
||||
isQwenReasoningModel,
|
||||
isReasoningModel,
|
||||
@ -134,8 +134,7 @@ export function getReasoningEffort(assistant: Assistant, model: Model): Reasonin
|
||||
// https://creator.poe.com/docs/external-applications/openai-compatible-api#additional-considerations
|
||||
// Poe provider - supports custom bot parameters via extra_body
|
||||
if (provider.id === SystemProviderIds.poe) {
|
||||
// GPT-5 series models use reasoning_effort parameter in extra_body
|
||||
if (isGPT5SeriesModel(model) || isGPT51SeriesModel(model)) {
|
||||
if (isOpenAIReasoningModel(model)) {
|
||||
return {
|
||||
extra_body: {
|
||||
reasoning_effort: reasoningEffort === 'auto' ? 'medium' : reasoningEffort
|
||||
@ -589,6 +588,7 @@ export function getGeminiReasoningParams(
|
||||
if (effortRatio > 1) {
|
||||
return {
|
||||
thinkingConfig: {
|
||||
thinkingBudget: -1,
|
||||
includeThoughts: true
|
||||
}
|
||||
}
|
||||
@ -634,6 +634,8 @@ export function getXAIReasoningParams(assistant: Assistant, model: Model): Pick<
|
||||
case 'low':
|
||||
case 'high':
|
||||
return { reasoningEffort }
|
||||
case 'xhigh':
|
||||
return { reasoningEffort: 'high' }
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -29,7 +29,7 @@ const HtmlArtifactsPopup: React.FC<HtmlArtifactsPopupProps> = ({ open, title, ht
|
||||
const [fontSize] = usePreference('chat.message.font_size')
|
||||
const { activeCmTheme } = useCodeStyle()
|
||||
const [viewMode, setViewMode] = useState<ViewMode>('split')
|
||||
const [isFullscreen, setIsFullscreen] = useState(false)
|
||||
const [isFullscreen, setIsFullscreen] = useState(true)
|
||||
const [saved, setSaved] = useTemporaryValue(false, 2000)
|
||||
const codeEditorRef = useRef<CodeEditorHandles>(null)
|
||||
const previewFrameRef = useRef<HTMLIFrameElement>(null)
|
||||
@ -82,7 +82,7 @@ const HtmlArtifactsPopup: React.FC<HtmlArtifactsPopupProps> = ({ open, title, ht
|
||||
</HeaderLeft>
|
||||
|
||||
<HeaderCenter>
|
||||
<ViewControls onDoubleClick={(e) => e.stopPropagation()}>
|
||||
<ViewControls onDoubleClick={(e) => e.stopPropagation()} className="nodrag">
|
||||
<ViewButton
|
||||
size="sm"
|
||||
variant={viewMode === 'split' ? 'default' : 'secondary'}
|
||||
|
||||
@ -1,6 +1,5 @@
|
||||
import { Button, Tooltip } from '@cherrystudio/ui'
|
||||
import { loggerService } from '@logger'
|
||||
import AiProvider from '@renderer/aiCore'
|
||||
import { RefreshIcon } from '@renderer/components/Icons'
|
||||
import { useProvider } from '@renderer/hooks/useProvider'
|
||||
import type { Model } from '@renderer/types'
|
||||
@ -9,6 +8,8 @@ import { InputNumber, Space } from 'antd'
|
||||
import { memo, useCallback, useMemo, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
||||
import AiProviderNew from '../aiCore/index_new'
|
||||
|
||||
const logger = loggerService.withContext('DimensionsInput')
|
||||
|
||||
interface InputEmbeddingDimensionProps {
|
||||
@ -48,7 +49,7 @@ const InputEmbeddingDimension = ({
|
||||
|
||||
setLoading(true)
|
||||
try {
|
||||
const aiProvider = new AiProvider(provider)
|
||||
const aiProvider = new AiProviderNew(provider)
|
||||
const dimension = await aiProvider.getEmbeddingDimensions(model)
|
||||
// for controlled input
|
||||
if (ref?.current) {
|
||||
|
||||
@ -107,6 +107,51 @@ const WebviewContainer = memo(
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [appid, url])
|
||||
|
||||
// Setup keyboard shortcuts handler for print and save
|
||||
useEffect(() => {
|
||||
if (!webviewRef.current) return
|
||||
|
||||
const unsubscribe = window.api?.webview?.onFindShortcut?.(async (payload) => {
|
||||
// Get webviewId when event is triggered
|
||||
const webviewId = webviewRef.current?.getWebContentsId()
|
||||
|
||||
// Only handle events for this webview
|
||||
if (!webviewId || payload.webviewId !== webviewId) return
|
||||
|
||||
const key = payload.key?.toLowerCase()
|
||||
const isModifier = payload.control || payload.meta
|
||||
|
||||
if (!isModifier || !key) return
|
||||
|
||||
try {
|
||||
if (key === 'p') {
|
||||
// Print to PDF
|
||||
logger.info(`Printing webview ${appid} to PDF`)
|
||||
const filePath = await window.api.webview.printToPDF(webviewId)
|
||||
if (filePath) {
|
||||
window.toast?.success?.(`PDF saved to: ${filePath}`)
|
||||
logger.info(`PDF saved to: ${filePath}`)
|
||||
}
|
||||
} else if (key === 's') {
|
||||
// Save as HTML
|
||||
logger.info(`Saving webview ${appid} as HTML`)
|
||||
const filePath = await window.api.webview.saveAsHTML(webviewId)
|
||||
if (filePath) {
|
||||
window.toast?.success?.(`HTML saved to: ${filePath}`)
|
||||
logger.info(`HTML saved to: ${filePath}`)
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Failed to handle shortcut for webview ${appid}:`, error as Error)
|
||||
window.toast?.error?.(`Failed: ${(error as Error).message}`)
|
||||
}
|
||||
})
|
||||
|
||||
return () => {
|
||||
unsubscribe?.()
|
||||
}
|
||||
}, [appid])
|
||||
|
||||
// Update webview settings when they change
|
||||
useEffect(() => {
|
||||
if (!webviewRef.current) return
|
||||
|
||||
@ -60,6 +60,7 @@ const PopupContainer: React.FC<Props> = ({ agent, afterSubmit, resolve }) => {
|
||||
|
||||
const [form, setForm] = useState<BaseAgentForm>(() => buildAgentForm(agent))
|
||||
const [hasGitBash, setHasGitBash] = useState<boolean>(true)
|
||||
const [customGitBashPath, setCustomGitBashPath] = useState<string>('')
|
||||
|
||||
useEffect(() => {
|
||||
if (open) {
|
||||
@ -70,7 +71,11 @@ const PopupContainer: React.FC<Props> = ({ agent, afterSubmit, resolve }) => {
|
||||
const checkGitBash = useCallback(
|
||||
async (showToast = false) => {
|
||||
try {
|
||||
const gitBashInstalled = await window.api.system.checkGitBash()
|
||||
const [gitBashInstalled, savedPath] = await Promise.all([
|
||||
window.api.system.checkGitBash(),
|
||||
window.api.system.getGitBashPath().catch(() => null)
|
||||
])
|
||||
setCustomGitBashPath(savedPath ?? '')
|
||||
setHasGitBash(gitBashInstalled)
|
||||
if (showToast) {
|
||||
if (gitBashInstalled) {
|
||||
@ -93,6 +98,46 @@ const PopupContainer: React.FC<Props> = ({ agent, afterSubmit, resolve }) => {
|
||||
|
||||
const selectedPermissionMode = form.configuration?.permission_mode ?? 'default'
|
||||
|
||||
const handlePickGitBash = useCallback(async () => {
|
||||
try {
|
||||
const selected = await window.api.file.select({
|
||||
title: t('agent.gitBash.pick.title', 'Select Git Bash executable'),
|
||||
filters: [{ name: 'Executable', extensions: ['exe'] }],
|
||||
properties: ['openFile']
|
||||
})
|
||||
|
||||
if (!selected || selected.length === 0) {
|
||||
return
|
||||
}
|
||||
|
||||
const pickedPath = selected[0].path
|
||||
const ok = await window.api.system.setGitBashPath(pickedPath)
|
||||
if (!ok) {
|
||||
window.toast.error(
|
||||
t('agent.gitBash.pick.invalidPath', 'Selected file is not a valid Git Bash executable (bash.exe).')
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
setCustomGitBashPath(pickedPath)
|
||||
await checkGitBash(true)
|
||||
} catch (error) {
|
||||
logger.error('Failed to pick Git Bash path', error as Error)
|
||||
window.toast.error(t('agent.gitBash.pick.failed', 'Failed to set Git Bash path'))
|
||||
}
|
||||
}, [checkGitBash, t])
|
||||
|
||||
const handleClearGitBash = useCallback(async () => {
|
||||
try {
|
||||
await window.api.system.setGitBashPath(null)
|
||||
setCustomGitBashPath('')
|
||||
await checkGitBash(true)
|
||||
} catch (error) {
|
||||
logger.error('Failed to clear Git Bash path', error as Error)
|
||||
window.toast.error(t('agent.gitBash.pick.failed', 'Failed to set Git Bash path'))
|
||||
}
|
||||
}, [checkGitBash, t])
|
||||
|
||||
const onPermissionModeChange = useCallback((value: PermissionMode) => {
|
||||
setForm((prev) => {
|
||||
const parsedConfiguration = AgentConfigurationSchema.parse(prev.configuration ?? {})
|
||||
@ -324,6 +369,9 @@ const PopupContainer: React.FC<Props> = ({ agent, afterSubmit, resolve }) => {
|
||||
<Button size="small" onClick={() => checkGitBash(true)}>
|
||||
{t('agent.gitBash.error.recheck', 'Recheck Git Bash Installation')}
|
||||
</Button>
|
||||
<Button size="small" style={{ marginLeft: 8 }} onClick={handlePickGitBash}>
|
||||
{t('agent.gitBash.pick.button', 'Select Git Bash Path')}
|
||||
</Button>
|
||||
</div>
|
||||
}
|
||||
type="error"
|
||||
@ -331,6 +379,33 @@ const PopupContainer: React.FC<Props> = ({ agent, afterSubmit, resolve }) => {
|
||||
style={{ marginBottom: 16 }}
|
||||
/>
|
||||
)}
|
||||
|
||||
{hasGitBash && customGitBashPath && (
|
||||
<Alert
|
||||
message={t('agent.gitBash.found.title', 'Git Bash configured')}
|
||||
description={
|
||||
<div style={{ display: 'flex', flexDirection: 'column', gap: 8 }}>
|
||||
<div>
|
||||
{t('agent.gitBash.customPath', {
|
||||
defaultValue: 'Using custom path: {{path}}',
|
||||
path: customGitBashPath
|
||||
})}
|
||||
</div>
|
||||
<div style={{ display: 'flex', gap: 8 }}>
|
||||
<Button size="small" onClick={handlePickGitBash}>
|
||||
{t('agent.gitBash.pick.button', 'Select Git Bash Path')}
|
||||
</Button>
|
||||
<Button size="small" onClick={handleClearGitBash}>
|
||||
{t('agent.gitBash.clear.button', 'Clear custom path')}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
type="success"
|
||||
showIcon
|
||||
style={{ marginBottom: 16 }}
|
||||
/>
|
||||
)}
|
||||
<FormRow>
|
||||
<FormItem style={{ flex: 1 }}>
|
||||
<Label>
|
||||
|
||||
@ -86,7 +86,7 @@ vi.mock('@cherrystudio/ui', () => ({
|
||||
}))
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('@renderer/aiCore', () => ({
|
||||
vi.mock('@renderer/aiCore/index_new', () => ({
|
||||
default: vi.fn().mockImplementation(() => ({
|
||||
getEmbeddingDimensions: mocks.aiCore.getEmbeddingDimensions
|
||||
}))
|
||||
|
||||
@ -88,10 +88,7 @@ const Sidebar: FC = () => {
|
||||
)}
|
||||
</MainMenusContainer>
|
||||
<Menus>
|
||||
<Tooltip
|
||||
placement="right"
|
||||
content={t('settings.theme.title') + ': ' + getThemeModeLabel(settedTheme)}
|
||||
delay={800}>
|
||||
<Tooltip placement="right" content={t('settings.theme.title') + ': ' + getThemeModeLabel(settedTheme)}>
|
||||
<Icon theme={theme} onClick={toggleTheme}>
|
||||
{settedTheme === ThemeMode.dark ? (
|
||||
<Moon size={20} className="icon" />
|
||||
|
||||
@ -193,7 +193,7 @@ export function getModelLogoById(modelId: string): string | undefined {
|
||||
'gpt-5.1': GPT51ModelLogo,
|
||||
'gpt-5': GPT5ModelLogo,
|
||||
gpts: isLight ? ChatGPT4ModelLogo : ChatGPT4ModelLogoDark,
|
||||
'gpt-oss(?:-[\\w-]+)': isLight ? ChatGptModelLogo : ChatGptModelLogoDark,
|
||||
'gpt-oss(?::|-[\\w-]+)': isLight ? ChatGptModelLogo : ChatGptModelLogoDark,
|
||||
'text-moderation': isLight ? ChatGptModelLogo : ChatGptModelLogoDark,
|
||||
'babbage-': isLight ? ChatGptModelLogo : ChatGptModelLogoDark,
|
||||
'(sora-|sora_)': isLight ? ChatGptModelLogo : ChatGptModelLogoDark,
|
||||
|
||||
@ -35,6 +35,16 @@ export const isGPT5ProModel = (model: Model) => {
|
||||
return modelId.includes('gpt-5-pro')
|
||||
}
|
||||
|
||||
export const isGPT52ProModel = (model: Model) => {
|
||||
const modelId = getLowerBaseModelName(model.id)
|
||||
return modelId.includes('gpt-5.2-pro')
|
||||
}
|
||||
|
||||
export const isGPT51CodexMaxModel = (model: Model) => {
|
||||
const modelId = getLowerBaseModelName(model.id)
|
||||
return modelId.includes('gpt-5.1-codex-max')
|
||||
}
|
||||
|
||||
export const isOpenAIOpenWeightModel = (model: Model) => {
|
||||
const modelId = getLowerBaseModelName(model.id)
|
||||
return modelId.includes('gpt-oss')
|
||||
@ -42,7 +52,7 @@ export const isOpenAIOpenWeightModel = (model: Model) => {
|
||||
|
||||
export const isGPT5SeriesModel = (model: Model) => {
|
||||
const modelId = getLowerBaseModelName(model.id)
|
||||
return modelId.includes('gpt-5') && !modelId.includes('gpt-5.1')
|
||||
return modelId.includes('gpt-5') && !modelId.includes('gpt-5.1') && !modelId.includes('gpt-5.2')
|
||||
}
|
||||
|
||||
export const isGPT5SeriesReasoningModel = (model: Model) => {
|
||||
@ -55,6 +65,11 @@ export const isGPT51SeriesModel = (model: Model) => {
|
||||
return modelId.includes('gpt-5.1')
|
||||
}
|
||||
|
||||
export const isGPT52SeriesModel = (model: Model) => {
|
||||
const modelId = getLowerBaseModelName(model.id)
|
||||
return modelId.includes('gpt-5.2')
|
||||
}
|
||||
|
||||
export function isSupportVerbosityModel(model: Model): boolean {
|
||||
const modelId = getLowerBaseModelName(model.id)
|
||||
return (isGPT5SeriesModel(model) || isGPT51SeriesModel(model)) && !modelId.includes('chat')
|
||||
@ -86,7 +101,7 @@ export function isSupportedReasoningEffortOpenAIModel(model: Model): boolean {
|
||||
modelId.includes('o3') ||
|
||||
modelId.includes('o4') ||
|
||||
modelId.includes('gpt-oss') ||
|
||||
((isGPT5SeriesModel(model) || isGPT51SeriesModel(model)) && !modelId.includes('chat'))
|
||||
((isGPT5SeriesModel(model) || isGPT51SeriesModel(model) || isGPT52SeriesModel(model)) && !modelId.includes('chat'))
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@ -11,7 +11,10 @@ import { isEmbeddingModel, isRerankModel } from './embedding'
|
||||
import {
|
||||
isGPT5ProModel,
|
||||
isGPT5SeriesModel,
|
||||
isGPT51CodexMaxModel,
|
||||
isGPT51SeriesModel,
|
||||
isGPT52ProModel,
|
||||
isGPT52SeriesModel,
|
||||
isOpenAIDeepResearchModel,
|
||||
isOpenAIReasoningModel,
|
||||
isSupportedReasoningEffortOpenAIModel
|
||||
@ -33,7 +36,10 @@ export const MODEL_SUPPORTED_REASONING_EFFORT: ReasoningEffortConfig = {
|
||||
gpt5_codex: ['low', 'medium', 'high'] as const,
|
||||
gpt5_1: ['none', 'low', 'medium', 'high'] as const,
|
||||
gpt5_1_codex: ['none', 'medium', 'high'] as const,
|
||||
gpt5_1_codex_max: ['none', 'medium', 'high', 'xhigh'] as const,
|
||||
gpt5_2: ['none', 'low', 'medium', 'high', 'xhigh'] as const,
|
||||
gpt5pro: ['high'] as const,
|
||||
gpt52pro: ['medium', 'high', 'xhigh'] as const,
|
||||
grok: ['low', 'high'] as const,
|
||||
grok4_fast: ['auto'] as const,
|
||||
gemini: ['low', 'medium', 'high', 'auto'] as const,
|
||||
@ -60,6 +66,9 @@ export const MODEL_SUPPORTED_OPTIONS: ThinkingOptionConfig = {
|
||||
gpt5_codex: MODEL_SUPPORTED_REASONING_EFFORT.gpt5_codex,
|
||||
gpt5_1: MODEL_SUPPORTED_REASONING_EFFORT.gpt5_1,
|
||||
gpt5_1_codex: MODEL_SUPPORTED_REASONING_EFFORT.gpt5_1_codex,
|
||||
gpt5_2: MODEL_SUPPORTED_REASONING_EFFORT.gpt5_2,
|
||||
gpt5_1_codex_max: MODEL_SUPPORTED_REASONING_EFFORT.gpt5_1_codex_max,
|
||||
gpt52pro: MODEL_SUPPORTED_REASONING_EFFORT.gpt52pro,
|
||||
grok: MODEL_SUPPORTED_REASONING_EFFORT.grok,
|
||||
grok4_fast: ['none', ...MODEL_SUPPORTED_REASONING_EFFORT.grok4_fast] as const,
|
||||
gemini: ['none', ...MODEL_SUPPORTED_REASONING_EFFORT.gemini] as const,
|
||||
@ -84,6 +93,7 @@ const withModelIdAndNameAsId = <T>(model: Model, fn: (model: Model) => T): { idR
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: add ut
|
||||
const _getThinkModelType = (model: Model): ThinkingModelType => {
|
||||
let thinkingModelType: ThinkingModelType = 'default'
|
||||
const modelId = getLowerBaseModelName(model.id)
|
||||
@ -93,9 +103,17 @@ const _getThinkModelType = (model: Model): ThinkingModelType => {
|
||||
if (isGPT51SeriesModel(model)) {
|
||||
if (modelId.includes('codex')) {
|
||||
thinkingModelType = 'gpt5_1_codex'
|
||||
if (isGPT51CodexMaxModel(model)) {
|
||||
thinkingModelType = 'gpt5_1_codex_max'
|
||||
}
|
||||
} else {
|
||||
thinkingModelType = 'gpt5_1'
|
||||
}
|
||||
} else if (isGPT52SeriesModel(model)) {
|
||||
thinkingModelType = 'gpt5_2'
|
||||
if (isGPT52ProModel(model)) {
|
||||
thinkingModelType = 'gpt52pro'
|
||||
}
|
||||
} else if (isGPT5SeriesModel(model)) {
|
||||
if (modelId.includes('codex')) {
|
||||
thinkingModelType = 'gpt5_codex'
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
import type OpenAI from '@cherrystudio/openai'
|
||||
import { isEmbeddingModel, isRerankModel } from '@renderer/config/models/embedding'
|
||||
import type { Assistant } from '@renderer/types'
|
||||
import { type Model, SystemProviderIds } from '@renderer/types'
|
||||
import type { OpenAIVerbosity, ValidOpenAIVerbosity } from '@renderer/types/aiCoreTypes'
|
||||
import { getLowerBaseModelName } from '@renderer/utils'
|
||||
@ -8,6 +9,7 @@ import {
|
||||
isGPT5ProModel,
|
||||
isGPT5SeriesModel,
|
||||
isGPT51SeriesModel,
|
||||
isGPT52SeriesModel,
|
||||
isOpenAIChatCompletionOnlyModel,
|
||||
isOpenAIOpenWeightModel,
|
||||
isOpenAIReasoningModel,
|
||||
@ -48,13 +50,16 @@ export function isSupportedModel(model: OpenAI.Models.Model): boolean {
|
||||
* @param model - The model to check
|
||||
* @returns true if the model supports temperature parameter
|
||||
*/
|
||||
export function isSupportTemperatureModel(model: Model | undefined | null): boolean {
|
||||
export function isSupportTemperatureModel(model: Model | undefined | null, assistant?: Assistant): boolean {
|
||||
if (!model) {
|
||||
return false
|
||||
}
|
||||
|
||||
// OpenAI reasoning models (except open weight) don't support temperature
|
||||
if (isOpenAIReasoningModel(model) && !isOpenAIOpenWeightModel(model)) {
|
||||
if (isGPT52SeriesModel(model) && assistant?.settings?.reasoning_effort === 'none') {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
@ -76,13 +81,16 @@ export function isSupportTemperatureModel(model: Model | undefined | null): bool
|
||||
* @param model - The model to check
|
||||
* @returns true if the model supports top_p parameter
|
||||
*/
|
||||
export function isSupportTopPModel(model: Model | undefined | null): boolean {
|
||||
export function isSupportTopPModel(model: Model | undefined | null, assistant?: Assistant): boolean {
|
||||
if (!model) {
|
||||
return false
|
||||
}
|
||||
|
||||
// OpenAI reasoning models (except open weight) don't support top_p
|
||||
if (isOpenAIReasoningModel(model) && !isOpenAIOpenWeightModel(model)) {
|
||||
if (isGPT52SeriesModel(model) && assistant?.settings?.reasoning_effort === 'none') {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
|
||||
@ -163,6 +163,7 @@ export const useKnowledge = (baseId: string) => {
|
||||
processingProgress: 0,
|
||||
processingError: '',
|
||||
uniqueId: undefined,
|
||||
retryCount: 0,
|
||||
updated_at: Date.now()
|
||||
})
|
||||
checkAllBases()
|
||||
@ -182,6 +183,7 @@ export const useKnowledge = (baseId: string) => {
|
||||
processingProgress: 0,
|
||||
processingError: '',
|
||||
uniqueId: undefined,
|
||||
retryCount: 0,
|
||||
updated_at: Date.now()
|
||||
})
|
||||
setTimeout(() => KnowledgeQueue.checkAllBases(), 0)
|
||||
|
||||
@ -316,7 +316,8 @@ const reasoningEffortOptionsKeyMap: Record<ThinkingOption, string> = {
|
||||
high: 'assistants.settings.reasoning_effort.high',
|
||||
low: 'assistants.settings.reasoning_effort.low',
|
||||
medium: 'assistants.settings.reasoning_effort.medium',
|
||||
auto: 'assistants.settings.reasoning_effort.default'
|
||||
auto: 'assistants.settings.reasoning_effort.default',
|
||||
xhigh: 'assistants.settings.reasoning_effort.xhigh'
|
||||
} as const
|
||||
|
||||
export const getReasoningEffortOptionsLabel = (key: string): string => {
|
||||
|
||||
@ -31,12 +31,26 @@
|
||||
}
|
||||
},
|
||||
"gitBash": {
|
||||
"autoDetected": "Using auto-detected Git Bash",
|
||||
"clear": {
|
||||
"button": "Clear custom path"
|
||||
},
|
||||
"customPath": "Using custom path: {{path}}",
|
||||
"error": {
|
||||
"description": "Git Bash is required to run agents on Windows. The agent cannot function without it. Please install Git for Windows from",
|
||||
"recheck": "Recheck Git Bash Installation",
|
||||
"title": "Git Bash Required"
|
||||
},
|
||||
"found": {
|
||||
"title": "Git Bash configured"
|
||||
},
|
||||
"notFound": "Git Bash not found. Please install it first.",
|
||||
"pick": {
|
||||
"button": "Select Git Bash Path",
|
||||
"failed": "Failed to set Git Bash path",
|
||||
"invalidPath": "Selected file is not a valid Git Bash executable (bash.exe).",
|
||||
"title": "Select Git Bash executable"
|
||||
},
|
||||
"success": "Git Bash detected successfully!"
|
||||
},
|
||||
"input": {
|
||||
@ -471,6 +485,14 @@
|
||||
"url_placeholder": "Enter JSON URL"
|
||||
},
|
||||
"manage": {
|
||||
"batch_delete": {
|
||||
"button": "Batch Delete",
|
||||
"confirm": "Are you sure you want to delete the selected {{count}} assistants?"
|
||||
},
|
||||
"mode": {
|
||||
"delete": "Delete",
|
||||
"sort": "Sort"
|
||||
},
|
||||
"title": "Manage Assistants"
|
||||
},
|
||||
"my_agents": "My Assistants",
|
||||
@ -524,7 +546,8 @@
|
||||
"low": "Low",
|
||||
"medium": "Medium",
|
||||
"minimal": "Minimal",
|
||||
"off": "Off"
|
||||
"off": "Off",
|
||||
"xhigh": "Extra High"
|
||||
},
|
||||
"regular_phrases": {
|
||||
"add": "Add Phrase",
|
||||
@ -1185,6 +1208,7 @@
|
||||
"saved": "Saved",
|
||||
"search": "Search",
|
||||
"select": "Select",
|
||||
"select_all": "Select All",
|
||||
"selected": "Selected",
|
||||
"selectedItems": "Selected {{count}} items",
|
||||
"selectedMessages": "Selected {{count}} messages",
|
||||
@ -2303,7 +2327,7 @@
|
||||
"failed": {
|
||||
"install": "Install OVMS failed:",
|
||||
"install_code_100": "Unknown Error",
|
||||
"install_code_101": "Only supports Intel(R) Core(TM) Ultra CPU",
|
||||
"install_code_101": "Only supports Intel(R) CPU",
|
||||
"install_code_102": "Only supports Windows",
|
||||
"install_code_103": "Download OVMS runtime failed",
|
||||
"install_code_104": "Failed to install OVMS runtime",
|
||||
@ -3912,6 +3936,7 @@
|
||||
"jsonSaveError": "Failed to save JSON configuration.",
|
||||
"jsonSaveSuccess": "JSON configuration has been saved.",
|
||||
"logoUrl": "Logo URL",
|
||||
"logs": "Logs",
|
||||
"longRunning": "Long Running Mode",
|
||||
"longRunningTooltip": "When enabled, the server supports long-running tasks. When receiving progress notifications, the timeout will be reset and the maximum execution time will be extended to 10 minutes.",
|
||||
"marketplaces": "Marketplaces",
|
||||
@ -3931,6 +3956,7 @@
|
||||
"name": "Name",
|
||||
"newServer": "MCP Server",
|
||||
"noDescriptionAvailable": "No description available",
|
||||
"noLogs": "No logs yet",
|
||||
"noServers": "No servers configured",
|
||||
"not_support": "Model not supported",
|
||||
"npx_list": {
|
||||
|
||||
@ -31,12 +31,26 @@
|
||||
}
|
||||
},
|
||||
"gitBash": {
|
||||
"autoDetected": "使用自动检测的 Git Bash",
|
||||
"clear": {
|
||||
"button": "清除自定义路径"
|
||||
},
|
||||
"customPath": "使用自定义路径:{{path}}",
|
||||
"error": {
|
||||
"description": "在 Windows 上运行智能体需要 Git Bash。没有它智能体无法运行。请从以下地址安装 Git for Windows",
|
||||
"recheck": "重新检测 Git Bash 安装",
|
||||
"title": "需要 Git Bash"
|
||||
},
|
||||
"found": {
|
||||
"title": "已配置 Git Bash"
|
||||
},
|
||||
"notFound": "未找到 Git Bash。请先安装。",
|
||||
"pick": {
|
||||
"button": "选择 Git Bash 路径",
|
||||
"failed": "设置 Git Bash 路径失败",
|
||||
"invalidPath": "选择的文件不是有效的 Git Bash 可执行文件(bash.exe)。",
|
||||
"title": "选择 Git Bash 可执行文件"
|
||||
},
|
||||
"success": "成功检测到 Git Bash!"
|
||||
},
|
||||
"input": {
|
||||
@ -471,6 +485,14 @@
|
||||
"url_placeholder": "输入 JSON URL"
|
||||
},
|
||||
"manage": {
|
||||
"batch_delete": {
|
||||
"button": "批量删除",
|
||||
"confirm": "确定要删除选中的 {{count}} 个助手吗?"
|
||||
},
|
||||
"mode": {
|
||||
"delete": "删除",
|
||||
"sort": "排序"
|
||||
},
|
||||
"title": "管理助手"
|
||||
},
|
||||
"my_agents": "我的助手",
|
||||
@ -524,7 +546,8 @@
|
||||
"low": "浮想",
|
||||
"medium": "斟酌",
|
||||
"minimal": "微念",
|
||||
"off": "关闭"
|
||||
"off": "关闭",
|
||||
"xhigh": "穷究"
|
||||
},
|
||||
"regular_phrases": {
|
||||
"add": "添加短语",
|
||||
@ -1185,6 +1208,7 @@
|
||||
"saved": "已保存",
|
||||
"search": "搜索",
|
||||
"select": "选择",
|
||||
"select_all": "全选",
|
||||
"selected": "已选择",
|
||||
"selectedItems": "已选择 {{count}} 项",
|
||||
"selectedMessages": "选中 {{count}} 条消息",
|
||||
@ -2303,7 +2327,7 @@
|
||||
"failed": {
|
||||
"install": "安装 OVMS 失败:",
|
||||
"install_code_100": "未知错误",
|
||||
"install_code_101": "仅支持 Intel(R) Core(TM) Ultra CPU",
|
||||
"install_code_101": "仅支持 Intel(R) CPU",
|
||||
"install_code_102": "仅支持 Windows",
|
||||
"install_code_103": "下载 OVMS runtime 失败",
|
||||
"install_code_104": "安装 OVMS runtime 失败",
|
||||
@ -3912,6 +3936,7 @@
|
||||
"jsonSaveError": "保存 JSON 配置失败",
|
||||
"jsonSaveSuccess": "JSON 配置已保存",
|
||||
"logoUrl": "标志网址",
|
||||
"logs": "日志",
|
||||
"longRunning": "长时间运行模式",
|
||||
"longRunningTooltip": "启用后,服务器支持长时间任务,接收到进度通知时会重置超时计时器,并延长最大超时时间至10分钟",
|
||||
"marketplaces": "市场",
|
||||
@ -3931,6 +3956,7 @@
|
||||
"name": "名称",
|
||||
"newServer": "MCP 服务器",
|
||||
"noDescriptionAvailable": "暂无描述",
|
||||
"noLogs": "暂无日志",
|
||||
"noServers": "未配置服务器",
|
||||
"not_support": "模型不支持",
|
||||
"npx_list": {
|
||||
|
||||
@ -31,12 +31,26 @@
|
||||
}
|
||||
},
|
||||
"gitBash": {
|
||||
"autoDetected": "使用自動偵測的 Git Bash",
|
||||
"clear": {
|
||||
"button": "清除自訂路徑"
|
||||
},
|
||||
"customPath": "使用自訂路徑:{{path}}",
|
||||
"error": {
|
||||
"description": "在 Windows 上執行代理程式需要 Git Bash。沒有它代理程式無法運作。請從以下地址安裝 Git for Windows",
|
||||
"recheck": "重新檢測 Git Bash 安裝",
|
||||
"title": "需要 Git Bash"
|
||||
},
|
||||
"found": {
|
||||
"title": "已配置 Git Bash"
|
||||
},
|
||||
"notFound": "找不到 Git Bash。請先安裝。",
|
||||
"pick": {
|
||||
"button": "選擇 Git Bash 路徑",
|
||||
"failed": "設定 Git Bash 路徑失敗",
|
||||
"invalidPath": "選擇的檔案不是有效的 Git Bash 可執行檔(bash.exe)。",
|
||||
"title": "選擇 Git Bash 可執行檔"
|
||||
},
|
||||
"success": "成功偵測到 Git Bash!"
|
||||
},
|
||||
"input": {
|
||||
@ -471,6 +485,14 @@
|
||||
"url_placeholder": "輸入 JSON URL"
|
||||
},
|
||||
"manage": {
|
||||
"batch_delete": {
|
||||
"button": "批次刪除",
|
||||
"confirm": "您確定要刪除所選的 {{count}} 個助理嗎?"
|
||||
},
|
||||
"mode": {
|
||||
"delete": "刪除",
|
||||
"sort": "排序"
|
||||
},
|
||||
"title": "管理助手"
|
||||
},
|
||||
"my_agents": "我的助手",
|
||||
@ -524,7 +546,8 @@
|
||||
"low": "稍微思考",
|
||||
"medium": "正常思考",
|
||||
"minimal": "最少思考",
|
||||
"off": "關閉"
|
||||
"off": "關閉",
|
||||
"xhigh": "極力思考"
|
||||
},
|
||||
"regular_phrases": {
|
||||
"add": "添加短语",
|
||||
@ -1185,6 +1208,7 @@
|
||||
"saved": "已儲存",
|
||||
"search": "搜尋",
|
||||
"select": "選擇",
|
||||
"select_all": "全選",
|
||||
"selected": "已選擇",
|
||||
"selectedItems": "已選擇 {{count}} 項",
|
||||
"selectedMessages": "選中 {{count}} 條訊息",
|
||||
@ -2303,7 +2327,7 @@
|
||||
"failed": {
|
||||
"install": "安裝 OVMS 失敗:",
|
||||
"install_code_100": "未知錯誤",
|
||||
"install_code_101": "僅支援 Intel(R) Core(TM) Ultra CPU",
|
||||
"install_code_101": "僅支援 Intel(R) CPU",
|
||||
"install_code_102": "僅支援 Windows",
|
||||
"install_code_103": "下載 OVMS runtime 失敗",
|
||||
"install_code_104": "安裝 OVMS runtime 失敗",
|
||||
@ -3912,6 +3936,7 @@
|
||||
"jsonSaveError": "保存 JSON 配置失敗",
|
||||
"jsonSaveSuccess": "JSON 配置已儲存",
|
||||
"logoUrl": "標誌網址",
|
||||
"logs": "日誌",
|
||||
"longRunning": "長時間運行模式",
|
||||
"longRunningTooltip": "啟用後,伺服器支援長時間任務,接收到進度通知時會重置超時計時器,並延長最大超時時間至10分鐘",
|
||||
"marketplaces": "市場",
|
||||
@ -3931,6 +3956,7 @@
|
||||
"name": "名稱",
|
||||
"newServer": "MCP 伺服器",
|
||||
"noDescriptionAvailable": "描述不存在",
|
||||
"noLogs": "暫無日誌",
|
||||
"noServers": "未設定伺服器",
|
||||
"not_support": "不支援此模型",
|
||||
"npx_list": {
|
||||
|
||||
@ -31,12 +31,26 @@
|
||||
}
|
||||
},
|
||||
"gitBash": {
|
||||
"autoDetected": "Automatisch ermitteltes Git Bash wird verwendet",
|
||||
"clear": {
|
||||
"button": "Benutzerdefinierten Pfad löschen"
|
||||
},
|
||||
"customPath": "Benutzerdefinierter Pfad: {{path}}",
|
||||
"error": {
|
||||
"description": "Git Bash ist erforderlich, um Agents unter Windows auszuführen. Der Agent kann ohne es nicht funktionieren. Bitte installieren Sie Git für Windows von",
|
||||
"recheck": "Überprüfe die Git Bash-Installation erneut",
|
||||
"title": "Git Bash erforderlich"
|
||||
},
|
||||
"found": {
|
||||
"title": "Git Bash konfiguriert"
|
||||
},
|
||||
"notFound": "Git Bash nicht gefunden. Bitte installieren Sie es zuerst.",
|
||||
"pick": {
|
||||
"button": "Git Bash Pfad auswählen",
|
||||
"failed": "Git Bash Pfad konnte nicht gesetzt werden",
|
||||
"invalidPath": "Die ausgewählte Datei ist keine gültige Git Bash ausführbare Datei (bash.exe).",
|
||||
"title": "Git Bash ausführbare Datei auswählen"
|
||||
},
|
||||
"success": "Git Bash erfolgreich erkannt!"
|
||||
},
|
||||
"input": {
|
||||
@ -471,6 +485,14 @@
|
||||
"url_placeholder": "JSON-URL eingeben"
|
||||
},
|
||||
"manage": {
|
||||
"batch_delete": {
|
||||
"button": "Stapel löschen",
|
||||
"confirm": "Sind Sie sicher, dass Sie die ausgewählten {{count}} Assistenten löschen möchten?"
|
||||
},
|
||||
"mode": {
|
||||
"delete": "Löschen",
|
||||
"sort": "Sortieren"
|
||||
},
|
||||
"title": "Assistenten verwalten"
|
||||
},
|
||||
"my_agents": "Meine Assistenten",
|
||||
@ -524,7 +546,8 @@
|
||||
"low": "Spontan",
|
||||
"medium": "Überlegt",
|
||||
"minimal": "Minimal",
|
||||
"off": "Aus"
|
||||
"off": "Aus",
|
||||
"xhigh": "Extra hoch"
|
||||
},
|
||||
"regular_phrases": {
|
||||
"add": "Phrase hinzufügen",
|
||||
@ -1185,6 +1208,7 @@
|
||||
"saved": "Gespeichert",
|
||||
"search": "Suchen",
|
||||
"select": "Auswählen",
|
||||
"select_all": "Alle auswählen",
|
||||
"selected": "Ausgewählt",
|
||||
"selectedItems": "{{count}} Elemente ausgewählt",
|
||||
"selectedMessages": "{{count}} Nachrichten ausgewählt",
|
||||
@ -3912,6 +3936,7 @@
|
||||
"jsonSaveError": "JSON-Konfiguration speichern fehlgeschlagen",
|
||||
"jsonSaveSuccess": "JSON-Konfiguration erfolgreich gespeichert",
|
||||
"logoUrl": "Logo-URL",
|
||||
"logs": "Protokolle",
|
||||
"longRunning": "Lang laufender Modus",
|
||||
"longRunningTooltip": "Nach Aktivierung unterstützt der Server lange Aufgaben. Wenn ein Fortschrittsbenachrichtigung empfangen wird, wird der Timeout-Timer zurückgesetzt und die maximale Timeout-Zeit auf 10 Minuten verlängert",
|
||||
"marketplaces": "Marktplätze",
|
||||
@ -3931,6 +3956,7 @@
|
||||
"name": "Name",
|
||||
"newServer": "MCP-Server",
|
||||
"noDescriptionAvailable": "Keine Beschreibung",
|
||||
"noLogs": "Noch keine Protokolle",
|
||||
"noServers": "Server nicht konfiguriert",
|
||||
"not_support": "Modell nicht unterstützt",
|
||||
"npx_list": {
|
||||
|
||||
@ -31,12 +31,26 @@
|
||||
}
|
||||
},
|
||||
"gitBash": {
|
||||
"autoDetected": "[to be translated]:Using auto-detected Git Bash",
|
||||
"clear": {
|
||||
"button": "[to be translated]:Clear custom path"
|
||||
},
|
||||
"customPath": "[to be translated]:Using custom path: {{path}}",
|
||||
"error": {
|
||||
"description": "Το Git Bash απαιτείται για την εκτέλεση πρακτόρων στα Windows. Ο πράκτορας δεν μπορεί να λειτουργήσει χωρίς αυτό. Παρακαλούμε εγκαταστήστε το Git για Windows από",
|
||||
"recheck": "Επανέλεγχος Εγκατάστασης του Git Bash",
|
||||
"title": "Απαιτείται Git Bash"
|
||||
},
|
||||
"found": {
|
||||
"title": "[to be translated]:Git Bash configured"
|
||||
},
|
||||
"notFound": "Το Git Bash δεν βρέθηκε. Παρακαλώ εγκαταστήστε το πρώτα.",
|
||||
"pick": {
|
||||
"button": "[to be translated]:Select Git Bash Path",
|
||||
"failed": "[to be translated]:Failed to set Git Bash path",
|
||||
"invalidPath": "[to be translated]:Selected file is not a valid Git Bash executable (bash.exe).",
|
||||
"title": "[to be translated]:Select Git Bash executable"
|
||||
},
|
||||
"success": "Το Git Bash εντοπίστηκε με επιτυχία!"
|
||||
},
|
||||
"input": {
|
||||
@ -471,6 +485,14 @@
|
||||
"url_placeholder": "Εισάγετε JSON URL"
|
||||
},
|
||||
"manage": {
|
||||
"batch_delete": {
|
||||
"button": "Μαζική Διαγραφή",
|
||||
"confirm": "Είστε βέβαιοι ότι θέλετε να διαγράψετε τους επιλεγμένους {{count}} βοηθούς;"
|
||||
},
|
||||
"mode": {
|
||||
"delete": "Διαγραφή",
|
||||
"sort": "Ταξινόμηση"
|
||||
},
|
||||
"title": "Διαχείριση βοηθών"
|
||||
},
|
||||
"my_agents": "Οι βοηθοί μου",
|
||||
@ -524,7 +546,8 @@
|
||||
"low": "Μικρό",
|
||||
"medium": "Μεσαίο",
|
||||
"minimal": "ελάχιστος",
|
||||
"off": "Απενεργοποίηση"
|
||||
"off": "Απενεργοποίηση",
|
||||
"xhigh": "[to be translated]:Extra High"
|
||||
},
|
||||
"regular_phrases": {
|
||||
"add": "Προσθήκη φράσης",
|
||||
@ -1185,6 +1208,7 @@
|
||||
"saved": "Αποθηκεύτηκε",
|
||||
"search": "Αναζήτηση",
|
||||
"select": "Επιλογή",
|
||||
"select_all": "Επιλογή Όλων",
|
||||
"selected": "Επιλεγμένο",
|
||||
"selectedItems": "Επιλέχθηκαν {{count}} αντικείμενα",
|
||||
"selectedMessages": "Επιλέχθηκαν {{count}} μηνύματα",
|
||||
@ -3912,6 +3936,7 @@
|
||||
"jsonSaveError": "Αποτυχία αποθήκευσης της διαμορφωτικής ρύθμισης JSON",
|
||||
"jsonSaveSuccess": "Η διαμορφωτική ρύθμιση JSON αποθηκεύτηκε επιτυχώς",
|
||||
"logoUrl": "URL Λογότυπου",
|
||||
"logs": "Αρχεία καταγραφής",
|
||||
"longRunning": "Μακροχρόνια λειτουργία",
|
||||
"longRunningTooltip": "Όταν ενεργοποιηθεί, ο διακομιστής υποστηρίζει μακροχρόνιες εργασίες, επαναφέρει το χρονικό όριο μετά από λήψη ειδοποίησης προόδου και επεκτείνει το μέγιστο χρονικό όριο σε 10 λεπτά.",
|
||||
"marketplaces": "Αγορές",
|
||||
@ -3931,6 +3956,7 @@
|
||||
"name": "Όνομα",
|
||||
"newServer": "Διακομιστής MCP",
|
||||
"noDescriptionAvailable": "Δεν υπάρχει διαθέσιμη περιγραφή",
|
||||
"noLogs": "Δεν υπάρχουν αρχεία καταγραφής ακόμα",
|
||||
"noServers": "Δεν έχουν ρυθμιστεί διακομιστές",
|
||||
"not_support": "Το μοντέλο δεν υποστηρίζεται",
|
||||
"npx_list": {
|
||||
|
||||
@ -31,12 +31,26 @@
|
||||
}
|
||||
},
|
||||
"gitBash": {
|
||||
"autoDetected": "Usando Git Bash detectado automáticamente",
|
||||
"clear": {
|
||||
"button": "Borrar ruta personalizada"
|
||||
},
|
||||
"customPath": "Usando ruta personalizada: {{path}}",
|
||||
"error": {
|
||||
"description": "Se requiere Git Bash para ejecutar agentes en Windows. El agente no puede funcionar sin él. Instale Git para Windows desde",
|
||||
"recheck": "Volver a verificar la instalación de Git Bash",
|
||||
"title": "Git Bash Requerido"
|
||||
},
|
||||
"found": {
|
||||
"title": "Git Bash configurado"
|
||||
},
|
||||
"notFound": "Git Bash no encontrado. Por favor, instálalo primero.",
|
||||
"pick": {
|
||||
"button": "Seleccionar ruta de Git Bash",
|
||||
"failed": "No se pudo configurar la ruta de Git Bash",
|
||||
"invalidPath": "El archivo seleccionado no es un ejecutable válido de Git Bash (bash.exe).",
|
||||
"title": "Seleccionar ejecutable de Git Bash"
|
||||
},
|
||||
"success": "¡Git Bash detectado con éxito!"
|
||||
},
|
||||
"input": {
|
||||
@ -471,6 +485,14 @@
|
||||
"url_placeholder": "Introducir URL JSON"
|
||||
},
|
||||
"manage": {
|
||||
"batch_delete": {
|
||||
"button": "Eliminación por lotes",
|
||||
"confirm": "¿Estás seguro de que quieres eliminar los {{count}} asistentes seleccionados?"
|
||||
},
|
||||
"mode": {
|
||||
"delete": "Eliminar",
|
||||
"sort": "Ordenar"
|
||||
},
|
||||
"title": "Gestionar asistentes"
|
||||
},
|
||||
"my_agents": "Mis asistentes",
|
||||
@ -524,7 +546,8 @@
|
||||
"low": "Corto",
|
||||
"medium": "Medio",
|
||||
"minimal": "minimal",
|
||||
"off": "Apagado"
|
||||
"off": "Apagado",
|
||||
"xhigh": "[to be translated]:Extra High"
|
||||
},
|
||||
"regular_phrases": {
|
||||
"add": "Agregar frase",
|
||||
@ -1185,6 +1208,7 @@
|
||||
"saved": "Guardado",
|
||||
"search": "Buscar",
|
||||
"select": "Seleccionar",
|
||||
"select_all": "Seleccionar todo",
|
||||
"selected": "Seleccionado",
|
||||
"selectedItems": "{{count}} elementos seleccionados",
|
||||
"selectedMessages": "{{count}} mensajes seleccionados",
|
||||
@ -3912,6 +3936,7 @@
|
||||
"jsonSaveError": "Fallo al guardar la configuración JSON",
|
||||
"jsonSaveSuccess": "Configuración JSON guardada exitosamente",
|
||||
"logoUrl": "URL del logotipo",
|
||||
"logs": "Registros",
|
||||
"longRunning": "Modo de ejecución prolongada",
|
||||
"longRunningTooltip": "Una vez habilitado, el servidor admite tareas de larga duración, reinicia el temporizador de tiempo de espera al recibir notificaciones de progreso y amplía el tiempo máximo de espera hasta 10 minutos.",
|
||||
"marketplaces": "Mercados",
|
||||
@ -3931,6 +3956,7 @@
|
||||
"name": "Nombre",
|
||||
"newServer": "Servidor MCP",
|
||||
"noDescriptionAvailable": "Sin descripción disponible por ahora",
|
||||
"noLogs": "Aún no hay registros",
|
||||
"noServers": "No se han configurado servidores",
|
||||
"not_support": "El modelo no es compatible",
|
||||
"npx_list": {
|
||||
|
||||
@ -31,12 +31,26 @@
|
||||
}
|
||||
},
|
||||
"gitBash": {
|
||||
"autoDetected": "Utilisation de Git Bash détecté automatiquement",
|
||||
"clear": {
|
||||
"button": "Effacer le chemin personnalisé"
|
||||
},
|
||||
"customPath": "Utilisation du chemin personnalisé : {{path}}",
|
||||
"error": {
|
||||
"description": "Git Bash est requis pour exécuter des agents sur Windows. L'agent ne peut pas fonctionner sans. Veuillez installer Git pour Windows depuis",
|
||||
"recheck": "Revérifier l'installation de Git Bash",
|
||||
"title": "Git Bash requis"
|
||||
},
|
||||
"notFound": "Git Bash introuvable. Veuillez l’installer d’abord.",
|
||||
"found": {
|
||||
"title": "Git Bash configuré"
|
||||
},
|
||||
"notFound": "Git Bash non trouvé. Veuillez l'installer d'abord.",
|
||||
"pick": {
|
||||
"button": "Sélectionner le chemin Git Bash",
|
||||
"failed": "Échec de la configuration du chemin Git Bash",
|
||||
"invalidPath": "Le fichier sélectionné n'est pas un exécutable Git Bash valide (bash.exe).",
|
||||
"title": "Sélectionner l'exécutable Git Bash"
|
||||
},
|
||||
"success": "Git Bash détecté avec succès !"
|
||||
},
|
||||
"input": {
|
||||
@ -471,6 +485,14 @@
|
||||
"url_placeholder": "Saisir l'URL JSON"
|
||||
},
|
||||
"manage": {
|
||||
"batch_delete": {
|
||||
"button": "Suppression par lot",
|
||||
"confirm": "Êtes-vous sûr de vouloir supprimer les {{count}} assistants sélectionnés ?"
|
||||
},
|
||||
"mode": {
|
||||
"delete": "Supprimer",
|
||||
"sort": "Trier"
|
||||
},
|
||||
"title": "Gérer les assistants"
|
||||
},
|
||||
"my_agents": "Mes assistants",
|
||||
@ -524,7 +546,8 @@
|
||||
"low": "Court",
|
||||
"medium": "Moyen",
|
||||
"minimal": "minimal",
|
||||
"off": "Off"
|
||||
"off": "Off",
|
||||
"xhigh": "[to be translated]:Extra High"
|
||||
},
|
||||
"regular_phrases": {
|
||||
"add": "Добавить фразу",
|
||||
@ -1185,6 +1208,7 @@
|
||||
"saved": "enregistré",
|
||||
"search": "Rechercher",
|
||||
"select": "Sélectionner",
|
||||
"select_all": "Tout sélectionner",
|
||||
"selected": "Sélectionné",
|
||||
"selectedItems": "{{count}} éléments sélectionnés",
|
||||
"selectedMessages": "{{count}} messages sélectionnés",
|
||||
@ -3912,6 +3936,7 @@
|
||||
"jsonSaveError": "Échec de la sauvegarde de la configuration JSON",
|
||||
"jsonSaveSuccess": "Configuration JSON sauvegardée",
|
||||
"logoUrl": "Адрес логотипа",
|
||||
"logs": "Journaux",
|
||||
"longRunning": "Mode d'exécution prolongée",
|
||||
"longRunningTooltip": "Une fois activé, le serveur prend en charge les tâches de longue durée, réinitialise le minuteur de temporisation à la réception des notifications de progression, et prolonge le délai d'expiration maximal à 10 minutes.",
|
||||
"marketplaces": "Places de marché",
|
||||
@ -3931,6 +3956,7 @@
|
||||
"name": "Nom",
|
||||
"newServer": "Сервер MCP",
|
||||
"noDescriptionAvailable": "Aucune description disponible pour le moment",
|
||||
"noLogs": "Aucun journal pour le moment",
|
||||
"noServers": "Aucun serveur configuré",
|
||||
"not_support": "Модель не поддерживается",
|
||||
"npx_list": {
|
||||
|
||||
@ -31,12 +31,26 @@
|
||||
}
|
||||
},
|
||||
"gitBash": {
|
||||
"autoDetected": "[to be translated]:Using auto-detected Git Bash",
|
||||
"clear": {
|
||||
"button": "[to be translated]:Clear custom path"
|
||||
},
|
||||
"customPath": "[to be translated]:Using custom path: {{path}}",
|
||||
"error": {
|
||||
"description": "Windowsでエージェントを実行するにはGit Bashが必要です。これがないとエージェントは動作しません。以下からGit for Windowsをインストールしてください。",
|
||||
"recheck": "Git Bashのインストールを再確認してください",
|
||||
"title": "Git Bashが必要です"
|
||||
},
|
||||
"found": {
|
||||
"title": "[to be translated]:Git Bash configured"
|
||||
},
|
||||
"notFound": "Git Bash が見つかりません。先にインストールしてください。",
|
||||
"pick": {
|
||||
"button": "[to be translated]:Select Git Bash Path",
|
||||
"failed": "[to be translated]:Failed to set Git Bash path",
|
||||
"invalidPath": "[to be translated]:Selected file is not a valid Git Bash executable (bash.exe).",
|
||||
"title": "[to be translated]:Select Git Bash executable"
|
||||
},
|
||||
"success": "Git Bashが正常に検出されました!"
|
||||
},
|
||||
"input": {
|
||||
@ -471,6 +485,14 @@
|
||||
"url_placeholder": "JSON URLを入力"
|
||||
},
|
||||
"manage": {
|
||||
"batch_delete": {
|
||||
"button": "バッチ削除",
|
||||
"confirm": "選択した{{count}}件のアシスタントを削除してもよろしいですか?"
|
||||
},
|
||||
"mode": {
|
||||
"delete": "削除",
|
||||
"sort": "並べ替え"
|
||||
},
|
||||
"title": "アシスタントを管理"
|
||||
},
|
||||
"my_agents": "マイアシスタント",
|
||||
@ -524,7 +546,8 @@
|
||||
"low": "少しの思考",
|
||||
"medium": "普通の思考",
|
||||
"minimal": "最小限の思考",
|
||||
"off": "オフ"
|
||||
"off": "オフ",
|
||||
"xhigh": "[to be translated]:Extra High"
|
||||
},
|
||||
"regular_phrases": {
|
||||
"add": "プロンプトを追加",
|
||||
@ -1185,6 +1208,7 @@
|
||||
"saved": "保存されました",
|
||||
"search": "検索",
|
||||
"select": "選択",
|
||||
"select_all": "すべて選択",
|
||||
"selected": "選択済み",
|
||||
"selectedItems": "{{count}}件の項目を選択しました",
|
||||
"selectedMessages": "{{count}}件のメッセージを選択しました",
|
||||
@ -3912,6 +3936,7 @@
|
||||
"jsonSaveError": "JSON設定の保存に失敗しました",
|
||||
"jsonSaveSuccess": "JSON設定が保存されました。",
|
||||
"logoUrl": "ロゴURL",
|
||||
"logs": "ログ",
|
||||
"longRunning": "長時間運行モード",
|
||||
"longRunningTooltip": "このオプションを有効にすると、サーバーは長時間のタスクをサポートします。進行状況通知を受信すると、タイムアウトがリセットされ、最大実行時間が10分に延長されます。",
|
||||
"marketplaces": "マーケットプレイス",
|
||||
@ -3931,6 +3956,7 @@
|
||||
"name": "名前",
|
||||
"newServer": "MCP サーバー",
|
||||
"noDescriptionAvailable": "説明がありません",
|
||||
"noLogs": "ログはまだありません",
|
||||
"noServers": "サーバーが設定されていません",
|
||||
"not_support": "モデルはサポートされていません",
|
||||
"npx_list": {
|
||||
|
||||
@ -31,12 +31,26 @@
|
||||
}
|
||||
},
|
||||
"gitBash": {
|
||||
"autoDetected": "Usando Git Bash detectado automaticamente",
|
||||
"clear": {
|
||||
"button": "Limpar caminho personalizado"
|
||||
},
|
||||
"customPath": "Usando caminho personalizado: {{path}}",
|
||||
"error": {
|
||||
"description": "O Git Bash é necessário para executar agentes no Windows. O agente não pode funcionar sem ele. Por favor, instale o Git para Windows a partir de",
|
||||
"recheck": "Reverificar a Instalação do Git Bash",
|
||||
"title": "Git Bash Necessário"
|
||||
},
|
||||
"found": {
|
||||
"title": "Git Bash configurado"
|
||||
},
|
||||
"notFound": "Git Bash não encontrado. Por favor, instale-o primeiro.",
|
||||
"pick": {
|
||||
"button": "Selecionar caminho do Git Bash",
|
||||
"failed": "Falha ao configurar o caminho do Git Bash",
|
||||
"invalidPath": "O arquivo selecionado não é um executável válido do Git Bash (bash.exe).",
|
||||
"title": "Selecionar executável do Git Bash"
|
||||
},
|
||||
"success": "Git Bash detectado com sucesso!"
|
||||
},
|
||||
"input": {
|
||||
@ -471,6 +485,14 @@
|
||||
"url_placeholder": "Inserir URL JSON"
|
||||
},
|
||||
"manage": {
|
||||
"batch_delete": {
|
||||
"button": "Exclusão em Lote",
|
||||
"confirm": "Tem certeza de que deseja excluir os {{count}} assistentes selecionados?"
|
||||
},
|
||||
"mode": {
|
||||
"delete": "Excluir",
|
||||
"sort": "Ordenar"
|
||||
},
|
||||
"title": "Gerir assistentes"
|
||||
},
|
||||
"my_agents": "Os meus assistentes",
|
||||
@ -524,7 +546,8 @@
|
||||
"low": "Curto",
|
||||
"medium": "Médio",
|
||||
"minimal": "mínimo",
|
||||
"off": "Desligado"
|
||||
"off": "Desligado",
|
||||
"xhigh": "[to be translated]:Extra High"
|
||||
},
|
||||
"regular_phrases": {
|
||||
"add": "Adicionar Frase",
|
||||
@ -1185,6 +1208,7 @@
|
||||
"saved": "Guardado",
|
||||
"search": "Pesquisar",
|
||||
"select": "Selecionar",
|
||||
"select_all": "Selecionar Tudo",
|
||||
"selected": "Selecionado",
|
||||
"selectedItems": "{{count}} itens selecionados",
|
||||
"selectedMessages": "{{count}} mensagens selecionadas",
|
||||
@ -3912,6 +3936,7 @@
|
||||
"jsonSaveError": "Falha ao salvar configuração JSON",
|
||||
"jsonSaveSuccess": "Configuração JSON salva com sucesso",
|
||||
"logoUrl": "URL do Logotipo",
|
||||
"logs": "Registros",
|
||||
"longRunning": "Modo de execução prolongada",
|
||||
"longRunningTooltip": "Quando ativado, o servidor suporta tarefas de longa duração, redefinindo o temporizador de tempo limite ao receber notificações de progresso e estendendo o tempo máximo de tempo limite para 10 minutos.",
|
||||
"marketplaces": "Mercados",
|
||||
@ -3931,6 +3956,7 @@
|
||||
"name": "Nome",
|
||||
"newServer": "Servidor MCP",
|
||||
"noDescriptionAvailable": "Nenhuma descrição disponível no momento",
|
||||
"noLogs": "Ainda sem registos",
|
||||
"noServers": "Nenhum servidor configurado",
|
||||
"not_support": "Modelo Não Suportado",
|
||||
"npx_list": {
|
||||
|
||||
@ -31,12 +31,26 @@
|
||||
}
|
||||
},
|
||||
"gitBash": {
|
||||
"autoDetected": "Используется автоматически обнаруженный Git Bash",
|
||||
"clear": {
|
||||
"button": "Очистить пользовательский путь"
|
||||
},
|
||||
"customPath": "Используется пользовательский путь: {{path}}",
|
||||
"error": {
|
||||
"description": "Для запуска агентов в Windows требуется Git Bash. Без него агент не может работать. Пожалуйста, установите Git для Windows с",
|
||||
"recheck": "Повторная проверка установки Git Bash",
|
||||
"title": "Требуется Git Bash"
|
||||
},
|
||||
"found": {
|
||||
"title": "Git Bash настроен"
|
||||
},
|
||||
"notFound": "Git Bash не найден. Пожалуйста, сначала установите его.",
|
||||
"pick": {
|
||||
"button": "Выбрать путь Git Bash",
|
||||
"failed": "Не удалось настроить путь Git Bash",
|
||||
"invalidPath": "Выбранный файл не является допустимым исполняемым файлом Git Bash (bash.exe).",
|
||||
"title": "Выберите исполняемый файл Git Bash"
|
||||
},
|
||||
"success": "Git Bash успешно обнаружен!"
|
||||
},
|
||||
"input": {
|
||||
@ -471,6 +485,14 @@
|
||||
"url_placeholder": "Введите JSON URL"
|
||||
},
|
||||
"manage": {
|
||||
"batch_delete": {
|
||||
"button": "Массовое удаление",
|
||||
"confirm": "Вы уверены, что хотите удалить выбранных {{count}} ассистентов?"
|
||||
},
|
||||
"mode": {
|
||||
"delete": "Удалить",
|
||||
"sort": "Сортировать"
|
||||
},
|
||||
"title": "Управление помощниками"
|
||||
},
|
||||
"my_agents": "Мои помощники",
|
||||
@ -524,7 +546,8 @@
|
||||
"low": "Меньше думать",
|
||||
"medium": "Среднее",
|
||||
"minimal": "минимальный",
|
||||
"off": "Выключить"
|
||||
"off": "Выключить",
|
||||
"xhigh": "[to be translated]:Extra High"
|
||||
},
|
||||
"regular_phrases": {
|
||||
"add": "Добавить подсказку",
|
||||
@ -1185,6 +1208,7 @@
|
||||
"saved": "Сохранено",
|
||||
"search": "Поиск",
|
||||
"select": "Выбрать",
|
||||
"select_all": "Выбрать все",
|
||||
"selected": "Выбрано",
|
||||
"selectedItems": "Выбрано {{count}} элементов",
|
||||
"selectedMessages": "Выбрано {{count}} сообщений",
|
||||
@ -3912,6 +3936,7 @@
|
||||
"jsonSaveError": "Не удалось сохранить конфигурацию JSON",
|
||||
"jsonSaveSuccess": "JSON конфигурация сохранена",
|
||||
"logoUrl": "URL логотипа",
|
||||
"logs": "Журналы",
|
||||
"longRunning": "Длительный режим работы",
|
||||
"longRunningTooltip": "Включив эту опцию, сервер будет поддерживать длительные задачи. При получении уведомлений о ходе выполнения будет сброшен тайм-аут и максимальное время выполнения будет увеличено до 10 минут.",
|
||||
"marketplaces": "Торговые площадки",
|
||||
@ -3931,6 +3956,7 @@
|
||||
"name": "Имя",
|
||||
"newServer": "MCP сервер",
|
||||
"noDescriptionAvailable": "Описание отсутствует",
|
||||
"noLogs": "Логов пока нет",
|
||||
"noServers": "Серверы не настроены",
|
||||
"not_support": "Модель не поддерживается",
|
||||
"npx_list": {
|
||||
|
||||
@ -17,6 +17,7 @@ import styled from 'styled-components'
|
||||
|
||||
import AssistantsDrawer from './components/AssistantsDrawer'
|
||||
import ChatNavbarContent from './components/ChatNavbarContent'
|
||||
import SettingsButton from './components/SettingsButton'
|
||||
import UpdateAppButton from './components/UpdateAppButton'
|
||||
|
||||
interface Props {
|
||||
@ -65,14 +66,6 @@ const HeaderNavbar: FC<Props> = ({ activeAssistant, setActiveAssistant, activeTo
|
||||
})
|
||||
}
|
||||
|
||||
// const handleUpdateModel = useCallback(
|
||||
// async (model: ApiModel) => {
|
||||
// if (!activeSession || !activeAgent) return
|
||||
// return updateModel(activeSession.id, model.id, { showSuccessToast: false })
|
||||
// },
|
||||
// [activeAgent, activeSession, updateModel]
|
||||
// )
|
||||
|
||||
return (
|
||||
<NavbarHeader className="home-navbar" style={{ height: 'var(--navbar-height)' }}>
|
||||
<div className="flex h-full min-w-0 flex-1 shrink items-center overflow-auto">
|
||||
@ -107,6 +100,7 @@ const HeaderNavbar: FC<Props> = ({ activeAssistant, setActiveAssistant, activeTo
|
||||
</div>
|
||||
<RowFlex className="items-center gap-2">
|
||||
{isTopNavbar && <UpdateAppButton />}
|
||||
<SettingsButton assistant={assistant} />
|
||||
{isTopNavbar && (
|
||||
<Tooltip placement="bottom" content={t('navbar.expand')} delay={800}>
|
||||
<NarrowIcon onClick={handleNarrowModeToggle}>
|
||||
|
||||
@ -521,7 +521,8 @@ export const InputbarCore: FC<InputbarCoreProps> = ({
|
||||
const handleFocus = useCallback(() => {
|
||||
setInputFocus(true)
|
||||
setSearching(false)
|
||||
if (quickPanel.isVisible && quickPanel.triggerInfo?.type !== 'input') {
|
||||
// Don't close panel in multiple selection mode, or if triggered by input
|
||||
if (quickPanel.isVisible && quickPanel.triggerInfo?.type !== 'input' && !quickPanel.multiple) {
|
||||
quickPanel.close()
|
||||
}
|
||||
PasteService.setLastFocusedComponent('inputbar')
|
||||
|
||||
@ -92,14 +92,6 @@ const KnowledgeBaseButton: FC<Props> = ({ quickPanel, selectedBases, onSelect, d
|
||||
}
|
||||
}, [openQuickPanel, quickPanelHook])
|
||||
|
||||
// 监听 selectedBases 变化,动态更新已打开的 QuickPanel 列表状态
|
||||
useEffect(() => {
|
||||
if (quickPanelHook.isVisible && quickPanelHook.symbol === QuickPanelReservedSymbol.KnowledgeBase) {
|
||||
// 直接使用重新计算的 baseItems,因为它已经包含了最新的 isSelected 状态
|
||||
quickPanelHook.updateList(baseItems)
|
||||
}
|
||||
}, [selectedBases, quickPanelHook, baseItems])
|
||||
|
||||
useEffect(() => {
|
||||
const disposeRootMenu = quickPanel.registerRootMenu([
|
||||
{
|
||||
|
||||
@ -6,7 +6,8 @@ import {
|
||||
MdiLightbulbOn,
|
||||
MdiLightbulbOn30,
|
||||
MdiLightbulbOn50,
|
||||
MdiLightbulbOn80
|
||||
MdiLightbulbOn80,
|
||||
MdiLightbulbOn90
|
||||
} from '@renderer/components/Icons/SVGIcon'
|
||||
import { QuickPanelReservedSymbol, useQuickPanel } from '@renderer/components/QuickPanel'
|
||||
import {
|
||||
@ -93,7 +94,7 @@ const ThinkingButton: FC<Props> = ({ quickPanel, model, assistantId }): ReactEle
|
||||
level: option,
|
||||
label: getReasoningEffortOptionsLabel(option),
|
||||
description: '',
|
||||
icon: ThinkingIcon(option),
|
||||
icon: ThinkingIcon({ option }),
|
||||
isSelected: currentReasoningEffort === option,
|
||||
action: () => onThinkingChange(option)
|
||||
}))
|
||||
@ -135,7 +136,7 @@ const ThinkingButton: FC<Props> = ({ quickPanel, model, assistantId }): ReactEle
|
||||
{
|
||||
label: t('assistants.settings.reasoning_effort.label'),
|
||||
description: '',
|
||||
icon: ThinkingIcon(currentReasoningEffort),
|
||||
icon: ThinkingIcon({ option: currentReasoningEffort }),
|
||||
isMenu: true,
|
||||
action: () => openQuickPanel()
|
||||
}
|
||||
@ -163,37 +164,43 @@ const ThinkingButton: FC<Props> = ({ quickPanel, model, assistantId }): ReactEle
|
||||
aria-label={ariaLabel}
|
||||
aria-pressed={currentReasoningEffort !== 'none'}
|
||||
style={isFixedReasoning ? { cursor: 'default' } : undefined}
|
||||
icon={ThinkingIcon(currentReasoningEffort)}
|
||||
icon={ThinkingIcon({ option: currentReasoningEffort, isFixedReasoning })}
|
||||
/>
|
||||
</Tooltip>
|
||||
)
|
||||
}
|
||||
|
||||
const ThinkingIcon = (option?: ThinkingOption) => {
|
||||
const ThinkingIcon = (props: { option?: ThinkingOption; isFixedReasoning?: boolean }) => {
|
||||
let IconComponent: React.FC<React.SVGProps<SVGSVGElement>> | null = null
|
||||
|
||||
switch (option) {
|
||||
case 'minimal':
|
||||
IconComponent = MdiLightbulbOn30
|
||||
break
|
||||
case 'low':
|
||||
IconComponent = MdiLightbulbOn50
|
||||
break
|
||||
case 'medium':
|
||||
IconComponent = MdiLightbulbOn80
|
||||
break
|
||||
case 'high':
|
||||
IconComponent = MdiLightbulbOn
|
||||
break
|
||||
case 'auto':
|
||||
IconComponent = MdiLightbulbAutoOutline
|
||||
break
|
||||
case 'none':
|
||||
IconComponent = MdiLightbulbOffOutline
|
||||
break
|
||||
default:
|
||||
IconComponent = MdiLightbulbOffOutline
|
||||
break
|
||||
if (props.isFixedReasoning) {
|
||||
IconComponent = MdiLightbulbAutoOutline
|
||||
} else {
|
||||
switch (props.option) {
|
||||
case 'minimal':
|
||||
IconComponent = MdiLightbulbOn30
|
||||
break
|
||||
case 'low':
|
||||
IconComponent = MdiLightbulbOn50
|
||||
break
|
||||
case 'medium':
|
||||
IconComponent = MdiLightbulbOn80
|
||||
break
|
||||
case 'high':
|
||||
IconComponent = MdiLightbulbOn90
|
||||
break
|
||||
case 'xhigh':
|
||||
IconComponent = MdiLightbulbOn
|
||||
break
|
||||
case 'auto':
|
||||
IconComponent = MdiLightbulbAutoOutline
|
||||
break
|
||||
case 'none':
|
||||
IconComponent = MdiLightbulbOffOutline
|
||||
break
|
||||
default:
|
||||
IconComponent = MdiLightbulbOffOutline
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return <IconComponent className="icon" width={18} height={18} style={{ marginTop: -2 }} />
|
||||
|
||||
@ -427,7 +427,7 @@ const FileBlocksContainer = styled.div`
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 8px;
|
||||
padding: 0 15px;
|
||||
padding: 0;
|
||||
margin: 8px 0;
|
||||
background: transparent;
|
||||
border-radius: 4px;
|
||||
|
||||
@ -85,11 +85,11 @@ const MessageHeader: FC<Props> = memo(({ assistant, model, message, topic, isGro
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [model?.provider, showMinappIcon])
|
||||
|
||||
const hideHeader = isBubbleStyle ? isUserMessage && !isMultiSelectMode : false
|
||||
|
||||
if (hideHeader) {
|
||||
return null
|
||||
}
|
||||
const userNameJustifyContent = useMemo(() => {
|
||||
if (!isBubbleStyle) return 'flex-start'
|
||||
if (isUserMessage && !isMultiSelectMode) return 'flex-end'
|
||||
return 'flex-start'
|
||||
}, [isBubbleStyle, isUserMessage, isMultiSelectMode])
|
||||
|
||||
return (
|
||||
<Container className="message-header">
|
||||
@ -123,7 +123,7 @@ const MessageHeader: FC<Props> = memo(({ assistant, model, message, topic, isGro
|
||||
</>
|
||||
)}
|
||||
<UserWrap>
|
||||
<RowFlex className="items-center">
|
||||
<RowFlex className="items-center" style={{ justifyContent: userNameJustifyContent }}>
|
||||
<UserName isBubbleStyle={isBubbleStyle} theme={theme}>
|
||||
{username}
|
||||
</UserName>
|
||||
|
||||
@ -63,7 +63,10 @@ const Prompt: FC<Props> = ({ assistant, topic }) => {
|
||||
}
|
||||
|
||||
return (
|
||||
<Container className="system-prompt" onClick={() => AssistantSettingsPopup.show({ assistant })} $isDark={isDark}>
|
||||
<Container
|
||||
className="system-prompt"
|
||||
onClick={() => AssistantSettingsPopup.show({ assistant, tab: 'prompt' })}
|
||||
$isDark={isDark}>
|
||||
<Text $isVisible={isVisible}>{displayText}</Text>
|
||||
</Container>
|
||||
)
|
||||
|
||||
@ -1,45 +0,0 @@
|
||||
import type { useUpdateSession } from '@renderer/hooks/agents/useUpdateSession'
|
||||
import { SettingDivider } from '@renderer/pages/settings'
|
||||
import { SessionSettingsPopup } from '@renderer/pages/settings/AgentSettings'
|
||||
import AdvancedSettings from '@renderer/pages/settings/AgentSettings/AdvancedSettings'
|
||||
import EssentialSettings from '@renderer/pages/settings/AgentSettings/EssentialSettings'
|
||||
import type { GetAgentSessionResponse } from '@renderer/types'
|
||||
import { Button } from 'antd'
|
||||
import type { FC } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
||||
interface Props {
|
||||
session: GetAgentSessionResponse | undefined | null
|
||||
update: ReturnType<typeof useUpdateSession>['updateSession']
|
||||
}
|
||||
|
||||
const SessionSettingsTab: FC<Props> = ({ session, update }) => {
|
||||
const { t } = useTranslation()
|
||||
|
||||
const onMoreSetting = () => {
|
||||
if (session?.id) {
|
||||
SessionSettingsPopup.show({
|
||||
agentId: session.agent_id,
|
||||
sessionId: session.id
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (!session) {
|
||||
return null
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="w-[var(--assistants-width)] p-2 px-3 pt-4">
|
||||
<EssentialSettings agentBase={session} update={update} showModelSetting={false} />
|
||||
<SettingDivider />
|
||||
<AdvancedSettings agentBase={session} update={update} />
|
||||
<SettingDivider />
|
||||
<Button size="small" block onClick={onMoreSetting}>
|
||||
{t('settings.moresetting.label')}
|
||||
</Button>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default SessionSettingsTab
|
||||
@ -1,8 +1,6 @@
|
||||
import {
|
||||
Button,
|
||||
DescriptionSwitch,
|
||||
HelpTooltip,
|
||||
RowFlex,
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
@ -13,38 +11,30 @@ import {
|
||||
import { useMultiplePreferences, usePreference } from '@data/hooks/usePreference'
|
||||
import EditableNumber from '@renderer/components/EditableNumber'
|
||||
import Scrollbar from '@renderer/components/Scrollbar'
|
||||
import {
|
||||
DEFAULT_CONTEXTCOUNT,
|
||||
DEFAULT_MAX_TOKENS,
|
||||
DEFAULT_TEMPERATURE,
|
||||
MAX_CONTEXT_COUNT
|
||||
} from '@renderer/config/constant'
|
||||
import { isOpenAIModel, isSupportVerbosityModel } from '@renderer/config/models'
|
||||
import { UNKNOWN } from '@renderer/config/translate'
|
||||
import { useCodeStyle } from '@renderer/context/CodeStyleProvider'
|
||||
import { useTheme } from '@renderer/context/ThemeProvider'
|
||||
import { cacheService } from '@renderer/data/CacheService'
|
||||
import { useAssistant } from '@renderer/hooks/useAssistant'
|
||||
import { useProvider } from '@renderer/hooks/useProvider'
|
||||
import useTranslate from '@renderer/hooks/useTranslate'
|
||||
import { SettingDivider, SettingRow, SettingRowTitle } from '@renderer/pages/settings'
|
||||
import AssistantSettingsPopup from '@renderer/pages/settings/AssistantSettings'
|
||||
import { CollapsibleSettingGroup } from '@renderer/pages/settings/SettingGroup'
|
||||
import { getDefaultModel } from '@renderer/services/AssistantService'
|
||||
import type { Assistant, AssistantSettings, CodeStyleVarious, MathEngine } from '@renderer/types'
|
||||
import type { Assistant, CodeStyleVarious, MathEngine } from '@renderer/types'
|
||||
import { isGroqSystemProvider } from '@renderer/types'
|
||||
import { modalConfirm } from '@renderer/utils'
|
||||
import { getSendMessageShortcutLabel } from '@renderer/utils/input'
|
||||
import {
|
||||
isOpenAICompatibleProvider,
|
||||
isSupportServiceTierProvider,
|
||||
isSupportVerbosityProvider
|
||||
} from '@renderer/utils/provider'
|
||||
import type { MultiModelMessageStyle, SendMessageShortcut } from '@shared/data/preference/preferenceTypes'
|
||||
import type { SendMessageShortcut } from '@shared/data/preference/preferenceTypes'
|
||||
import { ThemeMode } from '@shared/data/preference/preferenceTypes'
|
||||
import { Col, InputNumber, Row, Slider } from 'antd'
|
||||
import { Settings2 } from 'lucide-react'
|
||||
import { Col, Row, Slider } from 'antd'
|
||||
import type { FC } from 'react'
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { useCallback, useMemo, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import styled from 'styled-components'
|
||||
|
||||
@ -113,20 +103,14 @@ const SettingsTab: FC<Props> = (props) => {
|
||||
})
|
||||
const [codeFancyBlock, setCodeFancyBlock] = usePreference('chat.code.fancy_block')
|
||||
|
||||
const { assistant, updateAssistantSettings } = useAssistant(props.assistant.id)
|
||||
const { assistant } = useAssistant(props.assistant.id)
|
||||
const { provider } = useProvider(assistant.model.provider)
|
||||
|
||||
const { theme } = useTheme()
|
||||
const { themeNames } = useCodeStyle()
|
||||
|
||||
// FIXME: We should use useMemo to calculate these states instead of using useEffect to sync
|
||||
const [temperature, setTemperature] = useState(assistant?.settings?.temperature ?? DEFAULT_TEMPERATURE)
|
||||
const [enableTemperature, setEnableTemperature] = useState(assistant?.settings?.enableTemperature ?? true)
|
||||
const [contextCount, setContextCount] = useState(assistant?.settings?.contextCount ?? DEFAULT_CONTEXTCOUNT)
|
||||
const [enableMaxTokens, setEnableMaxTokens] = useState(assistant?.settings?.enableMaxTokens ?? false)
|
||||
const [maxTokens, setMaxTokens] = useState(assistant?.settings?.maxTokens ?? 0)
|
||||
const [fontSizeValue, setFontSizeValue] = useState(fontSize)
|
||||
const [streamOutput, setStreamOutput] = useState(assistant?.settings?.streamOutput)
|
||||
const { translateLanguages } = useTranslate()
|
||||
|
||||
const { t } = useTranslation()
|
||||
@ -139,16 +123,6 @@ const SettingsTab: FC<Props> = (props) => {
|
||||
[t]
|
||||
)
|
||||
|
||||
const multiModelMessageStyleItems = useMemo<SelectorItem<MultiModelMessageStyle>[]>(
|
||||
() => [
|
||||
{ value: 'fold', label: t('message.message.multi_model_style.fold.label') },
|
||||
{ value: 'vertical', label: t('message.message.multi_model_style.vertical') },
|
||||
{ value: 'horizontal', label: t('message.message.multi_model_style.horizontal') },
|
||||
{ value: 'grid', label: t('message.message.multi_model_style.grid') }
|
||||
],
|
||||
[t]
|
||||
)
|
||||
|
||||
const messageNavigationItems = useMemo<SelectorItem<'none' | 'buttons' | 'anchor'>[]>(
|
||||
() => [
|
||||
{ value: 'none', label: t('settings.messages.navigation.none') },
|
||||
@ -188,28 +162,6 @@ const SettingsTab: FC<Props> = (props) => {
|
||||
[]
|
||||
)
|
||||
|
||||
const onUpdateAssistantSettings = (settings: Partial<AssistantSettings>) => {
|
||||
updateAssistantSettings(settings)
|
||||
}
|
||||
|
||||
const onTemperatureChange = (value) => {
|
||||
if (!isNaN(value as number)) {
|
||||
onUpdateAssistantSettings({ temperature: value })
|
||||
}
|
||||
}
|
||||
|
||||
const onContextCountChange = (value) => {
|
||||
if (!isNaN(value as number)) {
|
||||
onUpdateAssistantSettings({ contextCount: value })
|
||||
}
|
||||
}
|
||||
|
||||
const onMaxTokensChange = (value) => {
|
||||
if (!isNaN(value as number)) {
|
||||
onUpdateAssistantSettings({ maxTokens: value })
|
||||
}
|
||||
}
|
||||
|
||||
const codeStyle = useMemo(() => {
|
||||
return codeEditor.enabled
|
||||
? theme === ThemeMode.light
|
||||
@ -236,15 +188,6 @@ const SettingsTab: FC<Props> = (props) => {
|
||||
[theme, codeEditor.enabled, setCodeEditor, setCodeViewer]
|
||||
)
|
||||
|
||||
useEffect(() => {
|
||||
setTemperature(assistant?.settings?.temperature ?? DEFAULT_TEMPERATURE)
|
||||
setEnableTemperature(assistant?.settings?.enableTemperature ?? true)
|
||||
setContextCount(assistant?.settings?.contextCount ?? DEFAULT_CONTEXTCOUNT)
|
||||
setEnableMaxTokens(assistant?.settings?.enableMaxTokens ?? false)
|
||||
setMaxTokens(assistant?.settings?.maxTokens ?? DEFAULT_MAX_TOKENS)
|
||||
setStreamOutput(assistant?.settings?.streamOutput ?? true)
|
||||
}, [assistant])
|
||||
|
||||
const model = assistant.model || getDefaultModel()
|
||||
|
||||
const showOpenAiSettings =
|
||||
@ -253,173 +196,40 @@ const SettingsTab: FC<Props> = (props) => {
|
||||
isSupportServiceTierProvider(provider) ||
|
||||
(isSupportVerbosityModel(model) && isSupportVerbosityProvider(provider))
|
||||
|
||||
const activeTopicOrSession = cacheService.get('chat.active_view')
|
||||
const isTopicSettings = activeTopicOrSession === 'topic'
|
||||
|
||||
return (
|
||||
<Container className="settings-tab">
|
||||
{props.assistant.id !== 'fake' && (
|
||||
<CollapsibleSettingGroup
|
||||
title={t('assistants.settings.title')}
|
||||
defaultExpanded={true}
|
||||
extra={
|
||||
<RowFlex className="items-center gap-0.5">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon-sm"
|
||||
onClick={() => AssistantSettingsPopup.show({ assistant, tab: 'model' })}>
|
||||
<Settings2 size={16} />
|
||||
</Button>
|
||||
</RowFlex>
|
||||
}>
|
||||
<SettingGroup style={{ marginTop: 5 }}>
|
||||
<Row align="middle">
|
||||
<SettingRowTitleSmall>
|
||||
{t('chat.settings.temperature.label')}
|
||||
<HelpTooltip title={t('chat.settings.temperature.tip')} />
|
||||
</SettingRowTitleSmall>
|
||||
<Switch
|
||||
style={{ marginLeft: 'auto' }}
|
||||
checked={enableTemperature}
|
||||
onCheckedChange={(enabled) => {
|
||||
setEnableTemperature(enabled)
|
||||
onUpdateAssistantSettings({ enableTemperature: enabled })
|
||||
}}
|
||||
/>
|
||||
</Row>
|
||||
{enableTemperature ? (
|
||||
<Row align="middle" gutter={10}>
|
||||
<Col span={23}>
|
||||
<Slider
|
||||
min={0}
|
||||
max={2}
|
||||
onChange={setTemperature}
|
||||
onChangeComplete={onTemperatureChange}
|
||||
value={typeof temperature === 'number' ? temperature : 0}
|
||||
step={0.1}
|
||||
/>
|
||||
</Col>
|
||||
</Row>
|
||||
) : (
|
||||
<SettingDivider />
|
||||
)}
|
||||
<Row align="middle" gutter={10} justify="space-between">
|
||||
<SettingRowTitleSmall>
|
||||
{t('chat.settings.context_count.label')}
|
||||
<HelpTooltip title={t('chat.settings.context_count.tip')} />
|
||||
</SettingRowTitleSmall>
|
||||
<Col span={8}>
|
||||
<EditableNumber
|
||||
min={0}
|
||||
max={20}
|
||||
step={1}
|
||||
value={contextCount}
|
||||
changeOnBlur
|
||||
onChange={(value) => {
|
||||
if (value !== null && value >= 0 && value <= 20) {
|
||||
setContextCount(value)
|
||||
onContextCountChange(value)
|
||||
}
|
||||
}}
|
||||
formatter={(value) => (value === MAX_CONTEXT_COUNT ? t('chat.settings.max') : (value ?? ''))}
|
||||
style={{ width: '100%' }}
|
||||
/>
|
||||
</Col>
|
||||
</Row>
|
||||
<Row align="middle" gutter={10}>
|
||||
<Col span={24}>
|
||||
<Slider
|
||||
min={0}
|
||||
max={20}
|
||||
onChange={setContextCount}
|
||||
onChangeComplete={onContextCountChange}
|
||||
value={Math.min(contextCount, 20)}
|
||||
tooltip={{ open: false }}
|
||||
step={1}
|
||||
marks={{
|
||||
0: '0',
|
||||
10: '10',
|
||||
20: '20'
|
||||
}}
|
||||
/>
|
||||
</Col>
|
||||
</Row>
|
||||
<SettingDivider />
|
||||
<SettingRow>
|
||||
<SettingRowTitleSmall>{t('models.stream_output')}</SettingRowTitleSmall>
|
||||
<Switch
|
||||
checked={streamOutput}
|
||||
onCheckedChange={(checked) => {
|
||||
setStreamOutput(checked)
|
||||
onUpdateAssistantSettings({ streamOutput: checked })
|
||||
}}
|
||||
/>
|
||||
</SettingRow>
|
||||
<SettingDivider />
|
||||
<SettingRow>
|
||||
<Row align="middle">
|
||||
<SettingRowTitleSmall>
|
||||
{t('chat.settings.max_tokens.label')}
|
||||
<HelpTooltip title={t('chat.settings.max_tokens.tip')} />
|
||||
</SettingRowTitleSmall>
|
||||
</Row>
|
||||
<Switch
|
||||
checked={enableMaxTokens}
|
||||
onCheckedChange={async (enabled) => {
|
||||
if (enabled) {
|
||||
const confirmed = await modalConfirm({
|
||||
title: t('chat.settings.max_tokens.confirm'),
|
||||
content: t('chat.settings.max_tokens.confirm_content'),
|
||||
okButtonProps: {
|
||||
danger: true
|
||||
}
|
||||
})
|
||||
if (!confirmed) return
|
||||
}
|
||||
setEnableMaxTokens(enabled)
|
||||
onUpdateAssistantSettings({ enableMaxTokens: enabled })
|
||||
}}
|
||||
/>
|
||||
</SettingRow>
|
||||
{enableMaxTokens && (
|
||||
<Row align="middle" gutter={10} style={{ marginTop: 10 }}>
|
||||
<Col span={24}>
|
||||
<InputNumber
|
||||
disabled={!enableMaxTokens}
|
||||
min={0}
|
||||
max={10000000}
|
||||
step={100}
|
||||
value={typeof maxTokens === 'number' ? maxTokens : 0}
|
||||
changeOnBlur
|
||||
onChange={(value) => value && setMaxTokens(value)}
|
||||
onBlur={() => onMaxTokensChange(maxTokens)}
|
||||
style={{ width: '100%' }}
|
||||
/>
|
||||
</Col>
|
||||
</Row>
|
||||
)}
|
||||
<SettingDivider />
|
||||
</SettingGroup>
|
||||
</CollapsibleSettingGroup>
|
||||
)}
|
||||
{showOpenAiSettings && (
|
||||
<OpenAISettingsGroup
|
||||
model={model}
|
||||
providerId={provider.id}
|
||||
SettingGroup={SettingGroup}
|
||||
SettingRowTitleSmall={SettingRowTitleSmall}
|
||||
/>
|
||||
)}
|
||||
{isGroqSystemProvider(provider) && (
|
||||
<GroqSettingsGroup SettingGroup={SettingGroup} SettingRowTitleSmall={SettingRowTitleSmall} />
|
||||
{isTopicSettings && (
|
||||
<>
|
||||
{showOpenAiSettings && (
|
||||
<OpenAISettingsGroup
|
||||
model={model}
|
||||
providerId={provider.id}
|
||||
SettingGroup={SettingGroup}
|
||||
SettingRowTitleSmall={SettingRowTitleSmall}
|
||||
/>
|
||||
)}
|
||||
{isGroqSystemProvider(provider) && (
|
||||
<GroqSettingsGroup SettingGroup={SettingGroup} SettingRowTitleSmall={SettingRowTitleSmall} />
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
<CollapsibleSettingGroup title={t('settings.messages.title')} defaultExpanded={true}>
|
||||
<SettingGroup>
|
||||
<SettingRow>
|
||||
<DescriptionSwitch
|
||||
checked={showPrompt}
|
||||
onCheckedChange={setShowPrompt}
|
||||
label={t('settings.messages.prompt')}
|
||||
/>
|
||||
</SettingRow>
|
||||
<SettingDivider />
|
||||
{isTopicSettings && (
|
||||
<>
|
||||
<SettingRow>
|
||||
<DescriptionSwitch
|
||||
checked={showPrompt}
|
||||
onCheckedChange={setShowPrompt}
|
||||
label={t('settings.messages.prompt')}
|
||||
/>
|
||||
</SettingRow>
|
||||
<SettingDivider />
|
||||
</>
|
||||
)}
|
||||
<SettingRow>
|
||||
<DescriptionSwitch
|
||||
checked={messageFont === 'serif'}
|
||||
@ -437,14 +247,19 @@ const SettingsTab: FC<Props> = (props) => {
|
||||
/>
|
||||
</SettingRow>
|
||||
<SettingDivider />
|
||||
<SettingRow>
|
||||
<DescriptionSwitch
|
||||
checked={showMessageOutline}
|
||||
onCheckedChange={setShowMessageOutline}
|
||||
label={t('settings.messages.show_message_outline')}
|
||||
/>
|
||||
</SettingRow>
|
||||
<SettingDivider />
|
||||
{isTopicSettings && (
|
||||
<>
|
||||
<SettingRow>
|
||||
<SettingRowTitleSmall>{t('settings.messages.show_message_outline')}</SettingRowTitleSmall>
|
||||
<Switch
|
||||
size="sm"
|
||||
checked={showMessageOutline}
|
||||
onCheckedChange={(checked) => setShowMessageOutline(checked)}
|
||||
/>
|
||||
</SettingRow>
|
||||
<SettingDivider />
|
||||
</>
|
||||
)}
|
||||
<SettingRow>
|
||||
<SettingRowTitleSmall>{t('message.message.style.label')}</SettingRowTitleSmall>
|
||||
<Select value={messageStyle} onValueChange={setMessageStyle}>
|
||||
@ -461,22 +276,30 @@ const SettingsTab: FC<Props> = (props) => {
|
||||
</Select>
|
||||
</SettingRow>
|
||||
<SettingDivider />
|
||||
<SettingRow>
|
||||
<SettingRowTitleSmall>{t('message.message.multi_model_style.label')}</SettingRowTitleSmall>
|
||||
<Select value={multiModelMessageStyle} onValueChange={setMultiModelMessageStyle}>
|
||||
<SelectTrigger size="sm" className="w-[180px]">
|
||||
<SelectValue />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
{multiModelMessageStyleItems.map((item) => (
|
||||
<SelectItem key={item.value} value={item.value}>
|
||||
{item.label}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
</SettingRow>
|
||||
<SettingDivider />
|
||||
{isTopicSettings && (
|
||||
<>
|
||||
<SettingRow>
|
||||
<SettingRowTitleSmall>{t('message.message.multi_model_style.label')}</SettingRowTitleSmall>
|
||||
<Select value={multiModelMessageStyle} onValueChange={setMultiModelMessageStyle}>
|
||||
<SelectContent>
|
||||
<SelectItem key="fold" value="fold">
|
||||
{t('message.message.multi_model_style.fold.label')}
|
||||
</SelectItem>
|
||||
<SelectItem key="vertical" value="vertical">
|
||||
{t('message.message.multi_model_style.vertical')}
|
||||
</SelectItem>
|
||||
<SelectItem key="horizontal" value="horizontal">
|
||||
{t('message.message.multi_model_style.horizontal')}
|
||||
</SelectItem>
|
||||
<SelectItem key="grid" value="grid">
|
||||
{t('message.message.multi_model_style.grid')}
|
||||
</SelectItem>
|
||||
</SelectContent>
|
||||
</Select>
|
||||
</SettingRow>
|
||||
<SettingDivider />
|
||||
</>
|
||||
)}
|
||||
<SettingRow>
|
||||
<SettingRowTitleSmall>{t('settings.messages.navigation.label')}</SettingRowTitleSmall>
|
||||
<Select value={messageNavigation} onValueChange={setMessageNavigation}>
|
||||
@ -682,14 +505,15 @@ const SettingsTab: FC<Props> = (props) => {
|
||||
</CollapsibleSettingGroup>
|
||||
<CollapsibleSettingGroup title={t('settings.messages.input.title')} defaultExpanded={false}>
|
||||
<SettingGroup>
|
||||
<SettingRow>
|
||||
<DescriptionSwitch
|
||||
checked={showInputEstimatedTokens}
|
||||
onCheckedChange={setShowInputEstimatedTokens}
|
||||
label={t('settings.messages.input.show_estimated_tokens')}
|
||||
/>
|
||||
</SettingRow>
|
||||
<SettingDivider />
|
||||
{isTopicSettings && (
|
||||
<>
|
||||
<SettingRow>
|
||||
<SettingRowTitleSmall>{t('settings.messages.input.show_estimated_tokens')}</SettingRowTitleSmall>
|
||||
<Switch size="sm" checked={showInputEstimatedTokens} onCheckedChange={setShowInputEstimatedTokens} />
|
||||
</SettingRow>
|
||||
<SettingDivider />
|
||||
</>
|
||||
)}
|
||||
<SettingRow>
|
||||
<DescriptionSwitch
|
||||
checked={pasteLongTextAsFile}
|
||||
|
||||
@ -1,6 +1,5 @@
|
||||
import { usePreference } from '@data/hooks/usePreference'
|
||||
import { DeleteIcon, EditIcon } from '@renderer/components/Icons'
|
||||
import { useSessions } from '@renderer/hooks/agents/useSessions'
|
||||
import AgentSettingsPopup from '@renderer/pages/settings/AgentSettings/AgentSettingsPopup'
|
||||
import { AgentLabel } from '@renderer/pages/settings/AgentSettings/shared'
|
||||
import { EVENT_NAMES, EventEmitter } from '@renderer/services/EventService'
|
||||
@ -8,7 +7,7 @@ import type { AgentEntity } from '@renderer/types'
|
||||
import { cn } from '@renderer/utils'
|
||||
import type { MenuProps } from 'antd'
|
||||
import { Dropdown, Tooltip } from 'antd'
|
||||
import { Bot } from 'lucide-react'
|
||||
import { Bot, MoreVertical } from 'lucide-react'
|
||||
import type { FC } from 'react'
|
||||
import { memo, useCallback, useMemo } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
@ -24,9 +23,9 @@ interface AgentItemProps {
|
||||
|
||||
const AgentItem: FC<AgentItemProps> = ({ agent, isActive, onDelete, onPress }) => {
|
||||
const { t } = useTranslation()
|
||||
const { sessions } = useSessions(agent.id)
|
||||
const [topicPosition] = usePreference('topic.position')
|
||||
const [clickAssistantToShowTopic] = usePreference('assistant.click_to_show_topic')
|
||||
const [assistantIconType] = usePreference('assistant.icon_type')
|
||||
|
||||
const handlePress = useCallback(() => {
|
||||
// Show session sidebar if setting is enabled (reusing the assistant setting for consistency)
|
||||
@ -38,6 +37,14 @@ const AgentItem: FC<AgentItemProps> = ({ agent, isActive, onDelete, onPress }) =
|
||||
onPress()
|
||||
}, [clickAssistantToShowTopic, topicPosition, onPress])
|
||||
|
||||
const handleMoreClick = useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
e.stopPropagation()
|
||||
agent.id && AgentSettingsPopup.show({ agentId: agent.id })
|
||||
},
|
||||
[agent.id]
|
||||
)
|
||||
|
||||
const menuItems: MenuProps['items'] = useMemo(
|
||||
() => [
|
||||
{
|
||||
@ -73,14 +80,14 @@ const AgentItem: FC<AgentItemProps> = ({ agent, isActive, onDelete, onPress }) =
|
||||
<Container onClick={handlePress} isActive={isActive}>
|
||||
<AssistantNameRow className="name" title={agent.name ?? agent.id}>
|
||||
<AgentNameWrapper>
|
||||
<AgentLabel agent={agent} />
|
||||
<AgentLabel agent={agent} hideIcon={assistantIconType === 'none'} />
|
||||
</AgentNameWrapper>
|
||||
{isActive && (
|
||||
<MenuButton>
|
||||
<SessionCount>{sessions.length}</SessionCount>
|
||||
<MenuButton onClick={handleMoreClick}>
|
||||
<MoreVertical size={14} className="text-[var(--color-text-secondary)]" />
|
||||
</MenuButton>
|
||||
)}
|
||||
{!isActive && <BotIcon />}
|
||||
{!isActive && assistantIconType !== 'none' && <BotIcon />}
|
||||
</AssistantNameRow>
|
||||
</Container>
|
||||
</Dropdown>
|
||||
@ -117,7 +124,7 @@ export const AgentNameWrapper: React.FC<React.HTMLAttributes<HTMLDivElement>> =
|
||||
export const MenuButton: React.FC<React.HTMLAttributes<HTMLDivElement>> = ({ className, ...props }) => (
|
||||
<div
|
||||
className={cn(
|
||||
'flex h-5 min-h-5 w-5 flex-row items-center justify-center rounded-full border border-[var(--color-border)] bg-[var(--color-background)]',
|
||||
'flex h-[22px] min-h-[22px] min-w-[22px] flex-row items-center justify-center rounded-[11px] border-[0.5px] border-[var(--color-border)] bg-[var(--color-background)] px-[5px]',
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
|
||||
@ -22,6 +22,7 @@ import {
|
||||
ArrowUpAZ,
|
||||
BrushCleaning,
|
||||
Check,
|
||||
MoreVertical,
|
||||
Plus,
|
||||
Save,
|
||||
Settings2,
|
||||
@ -152,6 +153,14 @@ const AssistantItem: FC<AssistantItemProps> = ({
|
||||
[assistant.emoji, assistantName]
|
||||
)
|
||||
|
||||
const handleMoreClick = useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
e.stopPropagation()
|
||||
AssistantSettingsPopup.show({ assistant })
|
||||
},
|
||||
[assistant]
|
||||
)
|
||||
|
||||
return (
|
||||
<Dropdown
|
||||
menu={{ items: menuItems }}
|
||||
@ -176,8 +185,8 @@ const AssistantItem: FC<AssistantItemProps> = ({
|
||||
<AssistantName className="text-nowrap">{assistantName}</AssistantName>
|
||||
</AssistantNameRow>
|
||||
{isActive && (
|
||||
<MenuButton onClick={() => EventEmitter.emit(EVENT_NAMES.SWITCH_TOPIC_SIDEBAR)}>
|
||||
<TopicCount className="topics-count">{assistant.topics.length}</TopicCount>
|
||||
<MenuButton onClick={handleMoreClick}>
|
||||
<MoreVertical size={14} className="text-[var(--color-text-secondary)]" />
|
||||
</MenuButton>
|
||||
)}
|
||||
</Container>
|
||||
@ -449,19 +458,4 @@ const MenuButton = ({
|
||||
</div>
|
||||
)
|
||||
|
||||
const TopicCount = ({
|
||||
children,
|
||||
className,
|
||||
...props
|
||||
}: PropsWithChildren<{} & React.HTMLAttributes<HTMLDivElement>>) => (
|
||||
<div
|
||||
{...props}
|
||||
className={cn(
|
||||
'flex flex-row items-center justify-center rounded-[10px] text-[10px] text-[var(--color-text)]',
|
||||
className
|
||||
)}>
|
||||
{children}
|
||||
</div>
|
||||
)
|
||||
|
||||
export default memo(AssistantItem)
|
||||
|
||||
@ -233,12 +233,11 @@ const SessionListItem = styled.div`
|
||||
}
|
||||
|
||||
&.singlealone {
|
||||
border-radius: 0 !important;
|
||||
&:hover {
|
||||
background-color: var(--color-background-soft);
|
||||
}
|
||||
&.active {
|
||||
border-left: 2px solid var(--color-primary);
|
||||
background-color: var(--color-background-mute);
|
||||
box-shadow: none;
|
||||
}
|
||||
}
|
||||
|
||||
@ -15,8 +15,6 @@ import styled from 'styled-components'
|
||||
import AddButton from './AddButton'
|
||||
import SessionItem from './SessionItem'
|
||||
|
||||
// const logger = loggerService.withContext('SessionsTab')
|
||||
|
||||
interface SessionsProps {
|
||||
agentId: string
|
||||
}
|
||||
|
||||
@ -518,7 +518,7 @@ export const Topics: React.FC<Props> = ({ assistant: _assistant, activeTopic, se
|
||||
className="topics-tab"
|
||||
list={sortedTopics}
|
||||
onUpdate={updateTopics}
|
||||
style={{ height: '100%', padding: '11px 0 10px 10px' }}
|
||||
style={{ height: '100%', padding: '9px 0 10px 10px' }}
|
||||
itemContainerStyle={{ paddingBottom: '8px' }}
|
||||
header={
|
||||
<>
|
||||
@ -651,12 +651,11 @@ const TopicListItem = styled.div`
|
||||
}
|
||||
}
|
||||
&.singlealone {
|
||||
border-radius: 0 !important;
|
||||
&:hover {
|
||||
background-color: var(--color-background-soft);
|
||||
}
|
||||
&.active {
|
||||
border-left: 2px solid var(--color-primary);
|
||||
background-color: var(--color-background-mute);
|
||||
box-shadow: none;
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,24 +1,19 @@
|
||||
import { usePreference } from '@data/hooks/usePreference'
|
||||
import AddAssistantPopup from '@renderer/components/Popups/AddAssistantPopup'
|
||||
import { useCache } from '@renderer/data/hooks/useCache'
|
||||
import { useActiveSession } from '@renderer/hooks/agents/useActiveSession'
|
||||
import { useUpdateSession } from '@renderer/hooks/agents/useUpdateSession'
|
||||
import { useAssistants, useDefaultAssistant } from '@renderer/hooks/useAssistant'
|
||||
import { useNavbarPosition } from '@renderer/hooks/useNavbar'
|
||||
import { useShowTopics } from '@renderer/hooks/useStore'
|
||||
import { EVENT_NAMES, EventEmitter } from '@renderer/services/EventService'
|
||||
import type { Assistant, Topic } from '@renderer/types'
|
||||
import type { Tab } from '@renderer/types/chat'
|
||||
import { classNames, getErrorMessage, uuid } from '@renderer/utils'
|
||||
import { Alert, Skeleton } from 'antd'
|
||||
import { classNames, uuid } from '@renderer/utils'
|
||||
import type { FC } from 'react'
|
||||
import { useEffect, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import styled from 'styled-components'
|
||||
|
||||
import Assistants from './AssistantsTab'
|
||||
import SessionSettingsTab from './SessionSettingsTab'
|
||||
import Settings from './SettingsTab'
|
||||
import Topics from './TopicsTab'
|
||||
|
||||
interface Props {
|
||||
@ -48,13 +43,8 @@ const HomeTabs: FC<Props> = ({
|
||||
const { toggleShowTopics } = useShowTopics()
|
||||
const { isLeftNavbar } = useNavbarPosition()
|
||||
const { t } = useTranslation()
|
||||
const [activeAgentId, setActiveAgentId] = useCache('agent.active_id')
|
||||
const [activeTopicOrSession, setActiveTopicOrSession] = useCache('chat.active_view')
|
||||
const { session, isLoading: isSessionLoading, error: sessionError } = useActiveSession()
|
||||
const { updateSession } = useUpdateSession(activeAgentId)
|
||||
|
||||
const isSessionView = activeTopicOrSession === 'session'
|
||||
const isTopicView = activeTopicOrSession === 'topic'
|
||||
const [, setActiveAgentId] = useCache('agent.active_id')
|
||||
const [, setActiveTopicOrSession] = useCache('chat.active_view')
|
||||
|
||||
const [tab, setTab] = useState<Tab>(position === 'left' ? _tab || 'assistants' : 'topic')
|
||||
const borderStyle = '0.5px solid var(--color-border)'
|
||||
@ -94,9 +84,6 @@ const HomeTabs: FC<Props> = ({
|
||||
EventEmitter.on(EVENT_NAMES.SHOW_TOPIC_SIDEBAR, (): any => {
|
||||
showTab && setTab('topic')
|
||||
}),
|
||||
EventEmitter.on(EVENT_NAMES.SHOW_CHAT_SETTINGS, (): any => {
|
||||
showTab && setTab('settings')
|
||||
}),
|
||||
EventEmitter.on(EVENT_NAMES.SWITCH_TOPIC_SIDEBAR, () => {
|
||||
showTab && setTab('topic')
|
||||
if (position === 'left' && topicPosition === 'right') {
|
||||
@ -111,7 +98,7 @@ const HomeTabs: FC<Props> = ({
|
||||
if (position === 'right' && topicPosition === 'right' && tab === 'assistants') {
|
||||
setTab('topic')
|
||||
}
|
||||
if (position === 'left' && topicPosition === 'right' && (tab === 'topic' || tab === 'settings')) {
|
||||
if (position === 'left' && topicPosition === 'right' && tab === 'topic') {
|
||||
setTab('assistants')
|
||||
}
|
||||
}, [position, tab, topicPosition, forceToSeeAllTab])
|
||||
@ -128,20 +115,6 @@ const HomeTabs: FC<Props> = ({
|
||||
<TabItem active={tab === 'topic'} onClick={() => setTab('topic')}>
|
||||
{t('common.topics')}
|
||||
</TabItem>
|
||||
<TabItem active={tab === 'settings'} onClick={() => setTab('settings')}>
|
||||
{t('settings.title')}
|
||||
</TabItem>
|
||||
</CustomTabs>
|
||||
)}
|
||||
|
||||
{position === 'right' && topicPosition === 'right' && (
|
||||
<CustomTabs>
|
||||
<TabItem active={tab === 'topic'} onClick={() => setTab('topic')}>
|
||||
{t('common.topics')}
|
||||
</TabItem>
|
||||
<TabItem active={tab === 'settings'} onClick={() => setTab('settings')}>
|
||||
{t('settings.title')}
|
||||
</TabItem>
|
||||
</CustomTabs>
|
||||
)}
|
||||
|
||||
@ -162,22 +135,6 @@ const HomeTabs: FC<Props> = ({
|
||||
position={position}
|
||||
/>
|
||||
)}
|
||||
{tab === 'settings' && isTopicView && <Settings assistant={activeAssistant} />}
|
||||
{tab === 'settings' && isSessionView && !sessionError && (
|
||||
<Skeleton loading={isSessionLoading} active style={{ height: '100%', padding: '16px' }}>
|
||||
<SessionSettingsTab session={session} update={updateSession} />
|
||||
</Skeleton>
|
||||
)}
|
||||
{tab === 'settings' && isSessionView && sessionError && (
|
||||
<div className="w-[var(--assistants-width)] p-2 px-3 pt-4">
|
||||
<Alert
|
||||
type="error"
|
||||
message={t('agent.session.get.error.failed')}
|
||||
description={getErrorMessage(sessionError)}
|
||||
style={{ padding: '10px 15px' }}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</TabContent>
|
||||
</Container>
|
||||
)
|
||||
|
||||
@ -1,14 +1,17 @@
|
||||
import EmojiIcon from '@renderer/components/EmojiIcon'
|
||||
import HorizontalScrollContainer from '@renderer/components/HorizontalScrollContainer'
|
||||
import { useCache } from '@renderer/data/hooks/useCache'
|
||||
import { useActiveAgent } from '@renderer/hooks/agents/useActiveAgent'
|
||||
import { useActiveSession } from '@renderer/hooks/agents/useActiveSession'
|
||||
import { useUpdateSession } from '@renderer/hooks/agents/useUpdateSession'
|
||||
import AssistantSettingsPopup from '@renderer/pages/settings/AssistantSettings'
|
||||
import type { AgentEntity, AgentSessionEntity, ApiModel, Assistant } from '@renderer/types'
|
||||
import { getLeadingEmoji } from '@renderer/utils'
|
||||
import { formatErrorMessageWithPrefix } from '@renderer/utils/error'
|
||||
import { t } from 'i18next'
|
||||
import { ChevronRight, Folder } from 'lucide-react'
|
||||
import type { FC, ReactNode } from 'react'
|
||||
import { useCallback } from 'react'
|
||||
import { useCallback, useMemo } from 'react'
|
||||
import { twMerge } from 'tailwind-merge'
|
||||
|
||||
import { AgentSettingsPopup, SessionSettingsPopup } from '../../settings/AgentSettings'
|
||||
@ -28,6 +31,8 @@ const ChatNavbarContent: FC<Props> = ({ assistant }) => {
|
||||
const { session: activeSession } = useActiveSession()
|
||||
const { updateModel } = useUpdateSession(activeAgent?.id ?? null)
|
||||
|
||||
const assistantName = useMemo(() => assistant.name || t('chat.default.name'), [assistant.name])
|
||||
|
||||
const handleUpdateModel = useCallback(
|
||||
async (model: ApiModel) => {
|
||||
if (!activeAgent || !activeSession) return
|
||||
@ -38,7 +43,25 @@ const ChatNavbarContent: FC<Props> = ({ assistant }) => {
|
||||
|
||||
return (
|
||||
<>
|
||||
{activeTopicOrSession === 'topic' && <SelectModelButton assistant={assistant} />}
|
||||
{activeTopicOrSession === 'topic' && (
|
||||
<HorizontalScrollContainer className="ml-2 flex-initial">
|
||||
<div className="flex flex-nowrap items-center gap-2">
|
||||
{/* Assistant Label */}
|
||||
<div
|
||||
className="flex h-full cursor-pointer items-center gap-1.5"
|
||||
onClick={() => AssistantSettingsPopup.show({ assistant })}>
|
||||
<EmojiIcon emoji={assistant.emoji || getLeadingEmoji(assistantName)} size={24} />
|
||||
<span className="max-w-40 truncate text-xs">{assistantName}</span>
|
||||
</div>
|
||||
|
||||
{/* Separator */}
|
||||
<ChevronRight className="h-4 w-4 text-gray-400" />
|
||||
|
||||
{/* Model Button */}
|
||||
<SelectModelButton assistant={assistant} />
|
||||
</div>
|
||||
</HorizontalScrollContainer>
|
||||
)}
|
||||
{activeTopicOrSession === 'session' && activeAgent && (
|
||||
<HorizontalScrollContainer className="ml-2 flex-initial">
|
||||
<div className="flex flex-nowrap items-center gap-2">
|
||||
|
||||
38
src/renderer/src/pages/home/components/SettingsButton.tsx
Normal file
38
src/renderer/src/pages/home/components/SettingsButton.tsx
Normal file
@ -0,0 +1,38 @@
|
||||
import type { Assistant } from '@renderer/types'
|
||||
import { Drawer, Tooltip } from 'antd'
|
||||
import { t } from 'i18next'
|
||||
import { Settings2 } from 'lucide-react'
|
||||
import type { FC } from 'react'
|
||||
import { useState } from 'react'
|
||||
|
||||
import { NavbarIcon } from '../ChatNavbar'
|
||||
import HomeSettings from '../Tabs/SettingsTab'
|
||||
|
||||
interface Props {
|
||||
assistant: Assistant
|
||||
}
|
||||
|
||||
const SettingsButton: FC<Props> = ({ assistant }) => {
|
||||
const [settingsOpen, setSettingsOpen] = useState(false)
|
||||
|
||||
return (
|
||||
<>
|
||||
<Tooltip title={t('settings.title')} mouseEnterDelay={0.8}>
|
||||
<NavbarIcon onClick={() => setSettingsOpen(true)}>
|
||||
<Settings2 size={18} />
|
||||
</NavbarIcon>
|
||||
</Tooltip>
|
||||
<Drawer
|
||||
placement="right"
|
||||
open={settingsOpen}
|
||||
onClose={() => setSettingsOpen(false)}
|
||||
width="var(--assistants-width)"
|
||||
closable={false}
|
||||
styles={{ body: { padding: 0, paddingTop: 'var(--navbar-height)' } }}>
|
||||
<HomeSettings assistant={assistant} />
|
||||
</Drawer>
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
export default SettingsButton
|
||||
@ -119,7 +119,7 @@ const AgentSettingPopupContainer: React.FC<AgentSettingPopupParams> = ({ tab, ag
|
||||
onOk={onOk}
|
||||
onCancel={onCancel}
|
||||
afterClose={afterClose}
|
||||
maskClosable={false}
|
||||
maskClosable={menu !== 'prompt'}
|
||||
footer={null}
|
||||
title={<AgentLabel agent={agent} />}
|
||||
transitionName="animation-move-down"
|
||||
|
||||
@ -28,14 +28,15 @@ export type AgentLabelProps = {
|
||||
avatar?: string
|
||||
name?: string
|
||||
}
|
||||
hideIcon?: boolean
|
||||
}
|
||||
|
||||
export const AgentLabel: React.FC<AgentLabelProps> = ({ agent, classNames }) => {
|
||||
export const AgentLabel: React.FC<AgentLabelProps> = ({ agent, classNames, hideIcon }) => {
|
||||
const emoji = agent?.configuration?.avatar
|
||||
|
||||
return (
|
||||
<div className={cn('flex w-full items-center gap-2 truncate', classNames?.container)}>
|
||||
<EmojiIcon emoji={emoji || '⭐️'} className={classNames?.avatar} />
|
||||
{!hideIcon && <EmojiIcon emoji={emoji || '⭐️'} className={classNames?.avatar} size={24} />}
|
||||
<span className={cn('truncate', 'text-[var(--color-text)]', classNames?.name)}>
|
||||
{agent?.name ?? (agent?.type ? getAgentTypeLabel(agent.type) : '')}
|
||||
</span>
|
||||
|
||||
@ -37,7 +37,7 @@ interface Props extends AssistantSettingPopupShowParams {
|
||||
const AssistantSettingPopupContainer: React.FC<Props> = ({ resolve, tab, ...props }) => {
|
||||
const [open, setOpen] = useState(true)
|
||||
const { t } = useTranslation()
|
||||
const [menu, setMenu] = useState<AssistantSettingPopupTab>(tab || 'prompt')
|
||||
const [menu, setMenu] = useState<AssistantSettingPopupTab>(tab || 'model')
|
||||
|
||||
const _useAssistant = useAssistant(props.assistant.id)
|
||||
const _useAgent = useAssistantPreset(props.assistant.id)
|
||||
@ -64,14 +64,14 @@ const AssistantSettingPopupContainer: React.FC<Props> = ({ resolve, tab, ...prop
|
||||
}
|
||||
|
||||
const items = [
|
||||
{
|
||||
key: 'prompt',
|
||||
label: t('assistants.settings.prompt')
|
||||
},
|
||||
{
|
||||
key: 'model',
|
||||
label: t('assistants.settings.model')
|
||||
},
|
||||
{
|
||||
key: 'prompt',
|
||||
label: t('assistants.settings.prompt')
|
||||
},
|
||||
showKnowledgeIcon && {
|
||||
key: 'knowledge_base',
|
||||
label: t('assistants.settings.knowledge_base.label')
|
||||
@ -96,7 +96,7 @@ const AssistantSettingPopupContainer: React.FC<Props> = ({ resolve, tab, ...prop
|
||||
onOk={onOk}
|
||||
onCancel={onCancel}
|
||||
afterClose={afterClose}
|
||||
maskClosable={false}
|
||||
maskClosable={menu !== 'prompt'}
|
||||
footer={null}
|
||||
title={assistant.name}
|
||||
transitionName="animation-move-down"
|
||||
@ -116,22 +116,22 @@ const AssistantSettingPopupContainer: React.FC<Props> = ({ resolve, tab, ...prop
|
||||
<RowFlex>
|
||||
<LeftMenu>
|
||||
<StyledMenu
|
||||
defaultSelectedKeys={[tab || 'prompt']}
|
||||
defaultSelectedKeys={[tab || 'model']}
|
||||
mode="vertical"
|
||||
items={items}
|
||||
onSelect={({ key }) => setMenu(key as AssistantSettingPopupTab)}
|
||||
/>
|
||||
</LeftMenu>
|
||||
<Settings>
|
||||
{menu === 'prompt' && (
|
||||
<AssistantPromptSettings
|
||||
{menu === 'model' && (
|
||||
<AssistantModelSettings
|
||||
assistant={assistant}
|
||||
updateAssistant={updateAssistant}
|
||||
updateAssistantSettings={updateAssistantSettings}
|
||||
/>
|
||||
)}
|
||||
{menu === 'model' && (
|
||||
<AssistantModelSettings
|
||||
{menu === 'prompt' && (
|
||||
<AssistantPromptSettings
|
||||
assistant={assistant}
|
||||
updateAssistant={updateAssistant}
|
||||
updateAssistantSettings={updateAssistantSettings}
|
||||
|
||||
@ -11,8 +11,9 @@ import MCPDescription from '@renderer/pages/settings/MCPSettings/McpDescription'
|
||||
import type { MCPPrompt, MCPResource, MCPServer, MCPTool } from '@renderer/types'
|
||||
import { parseKeyValueString } from '@renderer/utils/env'
|
||||
import { formatMcpError } from '@renderer/utils/error'
|
||||
import type { MCPServerLogEntry } from '@shared/config/types'
|
||||
import type { TabsProps } from 'antd'
|
||||
import { Badge, Form, Input, Radio, Select, Tabs } from 'antd'
|
||||
import { Badge, Form, Input, Modal, Radio, Select, Tabs, Tag, Typography } from 'antd'
|
||||
import TextArea from 'antd/es/input/TextArea'
|
||||
import { ChevronDown, SaveIcon } from 'lucide-react'
|
||||
import React, { useCallback, useEffect, useState } from 'react'
|
||||
@ -90,8 +91,11 @@ const McpSettings: React.FC = () => {
|
||||
|
||||
const [showAdvanced, setShowAdvanced] = useState(false)
|
||||
const [serverVersion, setServerVersion] = useState<string | null>(null)
|
||||
const [logModalOpen, setLogModalOpen] = useState(false)
|
||||
const [logs, setLogs] = useState<(MCPServerLogEntry & { serverId?: string })[]>([])
|
||||
|
||||
const { theme } = useTheme()
|
||||
const { Text } = Typography
|
||||
|
||||
const navigate = useNavigate()
|
||||
|
||||
@ -236,12 +240,43 @@ const McpSettings: React.FC = () => {
|
||||
}
|
||||
}
|
||||
|
||||
const fetchServerLogs = async () => {
|
||||
try {
|
||||
const history = await window.api.mcp.getServerLogs(server)
|
||||
setLogs(history)
|
||||
} catch (error) {
|
||||
logger.warn('Failed to load server logs', error as Error)
|
||||
}
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
const unsubscribe = window.api.mcp.onServerLog((log) => {
|
||||
if (log.serverId && log.serverId !== server.id) return
|
||||
setLogs((prev) => {
|
||||
const merged = [...prev, log]
|
||||
if (merged.length > 200) {
|
||||
return merged.slice(merged.length - 200)
|
||||
}
|
||||
return merged
|
||||
})
|
||||
})
|
||||
|
||||
return () => {
|
||||
unsubscribe?.()
|
||||
}
|
||||
}, [server.id])
|
||||
|
||||
useEffect(() => {
|
||||
setLogs([])
|
||||
}, [server.id])
|
||||
|
||||
useEffect(() => {
|
||||
if (server.isActive) {
|
||||
fetchTools()
|
||||
fetchPrompts()
|
||||
fetchResources()
|
||||
fetchServerVersion()
|
||||
fetchServerLogs()
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [server.id, server.isActive])
|
||||
@ -738,6 +773,9 @@ const McpSettings: React.FC = () => {
|
||||
<ServerName className="text-nowrap">{server?.name}</ServerName>
|
||||
{serverVersion && <VersionBadge count={serverVersion} color="blue" />}
|
||||
</Flex>
|
||||
<Button size="sm" variant="ghost" onClick={() => setLogModalOpen(true)}>
|
||||
{t('settings.mcp.logs', 'View Logs')}
|
||||
</Button>
|
||||
<Button size="sm" variant="ghost" onClick={() => onDeleteMcpServer(server)}>
|
||||
<DeleteIcon size={14} className="lucide-custom text-destructive" />
|
||||
</Button>
|
||||
@ -769,6 +807,37 @@ const McpSettings: React.FC = () => {
|
||||
/>
|
||||
</SettingGroup>
|
||||
</SettingContainer>
|
||||
|
||||
<Modal
|
||||
title={t('settings.mcp.logs', 'Server Logs')}
|
||||
open={logModalOpen}
|
||||
onCancel={() => setLogModalOpen(false)}
|
||||
footer={null}
|
||||
width={720}
|
||||
centered
|
||||
transitionName="animation-move-down"
|
||||
bodyStyle={{ maxHeight: '60vh', minHeight: '40vh', overflowY: 'auto' }}
|
||||
afterOpenChange={(open) => {
|
||||
if (open) {
|
||||
fetchServerLogs()
|
||||
}
|
||||
}}>
|
||||
<LogList>
|
||||
{logs.length === 0 && <Text type="secondary">{t('settings.mcp.noLogs', 'No logs yet')}</Text>}
|
||||
{logs.map((log, idx) => (
|
||||
<LogItem key={`${log.timestamp}-${idx}`}>
|
||||
<Flex className="items-baseline gap-8">
|
||||
<Timestamp>{new Date(log.timestamp).toLocaleTimeString()}</Timestamp>
|
||||
<Tag color={mapLogLevelColor(log.level)}>{log.level}</Tag>
|
||||
<Text>{log.message}</Text>
|
||||
</Flex>
|
||||
{log.data && (
|
||||
<PreBlock>{typeof log.data === 'string' ? log.data : JSON.stringify(log.data, null, 2)}</PreBlock>
|
||||
)}
|
||||
</LogItem>
|
||||
))}
|
||||
</LogList>
|
||||
</Modal>
|
||||
</Container>
|
||||
)
|
||||
}
|
||||
@ -791,6 +860,52 @@ const AdvancedSettingsButton = styled.div`
|
||||
align-items: center;
|
||||
`
|
||||
|
||||
const LogList = styled.div`
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 12px;
|
||||
`
|
||||
|
||||
const LogItem = styled.div`
|
||||
background: var(--color-bg-2, #1f1f1f);
|
||||
color: var(--color-text-1, #e6e6e6);
|
||||
border-radius: 8px;
|
||||
padding: 10px 12px;
|
||||
border: 1px solid var(--color-border, rgba(255, 255, 255, 0.08));
|
||||
`
|
||||
|
||||
const Timestamp = styled.span`
|
||||
color: var(--color-text-3, #9aa2b1);
|
||||
font-size: 12px;
|
||||
`
|
||||
|
||||
const PreBlock = styled.pre`
|
||||
margin: 6px 0 0;
|
||||
padding: 8px;
|
||||
background: var(--color-bg-3, #111418);
|
||||
color: var(--color-text-1, #e6e6e6);
|
||||
border-radius: 6px;
|
||||
font-size: 12px;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-word;
|
||||
border: 1px solid var(--color-border, rgba(255, 255, 255, 0.08));
|
||||
`
|
||||
|
||||
function mapLogLevelColor(level: MCPServerLogEntry['level']) {
|
||||
switch (level) {
|
||||
case 'error':
|
||||
case 'stderr':
|
||||
return 'red'
|
||||
case 'warn':
|
||||
return 'orange'
|
||||
case 'info':
|
||||
case 'stdout':
|
||||
return 'blue'
|
||||
default:
|
||||
return 'default'
|
||||
}
|
||||
}
|
||||
|
||||
const VersionBadge = styled(Badge)`
|
||||
.ant-badge-count {
|
||||
background-color: var(--color-primary);
|
||||
|
||||
@ -0,0 +1,74 @@
|
||||
import { useProvider } from '@renderer/hooks/useProvider'
|
||||
import { Select } from 'antd'
|
||||
import type { FC } from 'react'
|
||||
import { useCallback, useMemo } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
||||
interface CherryINSettingsProps {
|
||||
providerId: string
|
||||
apiHost: string
|
||||
setApiHost: (host: string) => void
|
||||
}
|
||||
|
||||
const API_HOST_OPTIONS = [
|
||||
{
|
||||
value: 'https://open.cherryin.cc',
|
||||
labelKey: '加速域名',
|
||||
description: 'open.cherryin.cc'
|
||||
},
|
||||
{
|
||||
value: 'https://open.cherryin.net',
|
||||
labelKey: '国际域名',
|
||||
description: 'open.cherryin.net'
|
||||
},
|
||||
{
|
||||
value: 'https://open.cherryin.ai',
|
||||
labelKey: '备用域名',
|
||||
description: 'open.cherryin.ai'
|
||||
}
|
||||
]
|
||||
|
||||
const CherryINSettings: FC<CherryINSettingsProps> = ({ providerId, apiHost, setApiHost }) => {
|
||||
const { updateProvider } = useProvider(providerId)
|
||||
const { t } = useTranslation()
|
||||
|
||||
const getCurrentHost = useMemo(() => {
|
||||
const matchedOption = API_HOST_OPTIONS.find((option) => apiHost?.includes(option.value.replace('https://', '')))
|
||||
return matchedOption?.value ?? API_HOST_OPTIONS[0].value
|
||||
}, [apiHost])
|
||||
|
||||
const handleHostChange = useCallback(
|
||||
(value: string) => {
|
||||
setApiHost(value)
|
||||
updateProvider({ apiHost: value, anthropicApiHost: value })
|
||||
},
|
||||
[setApiHost, updateProvider]
|
||||
)
|
||||
|
||||
const options = useMemo(
|
||||
() =>
|
||||
API_HOST_OPTIONS.map((option) => ({
|
||||
value: option.value,
|
||||
label: (
|
||||
<div className="flex flex-col gap-0.5">
|
||||
<span>{option.labelKey}</span>
|
||||
<span className="text-[var(--color-text-3)] text-xs">{t(option.description)}</span>
|
||||
</div>
|
||||
)
|
||||
})),
|
||||
[t]
|
||||
)
|
||||
|
||||
return (
|
||||
<Select
|
||||
value={getCurrentHost}
|
||||
onChange={handleHostChange}
|
||||
options={options}
|
||||
style={{ width: '100%', marginTop: 5 }}
|
||||
optionLabelProp="label"
|
||||
labelRender={(option) => option.value}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
export default CherryINSettings
|
||||
@ -10,7 +10,6 @@ import { PROVIDER_URLS } from '@renderer/config/providers'
|
||||
import { useTheme } from '@renderer/context/ThemeProvider'
|
||||
import { useAllProviders, useProvider, useProviders } from '@renderer/hooks/useProvider'
|
||||
import { useTimer } from '@renderer/hooks/useTimer'
|
||||
import i18n from '@renderer/i18n'
|
||||
import AnthropicSettings from '@renderer/pages/settings/ProviderSettings/AnthropicSettings'
|
||||
import { ModelList } from '@renderer/pages/settings/ProviderSettings/ModelList'
|
||||
import { checkApi } from '@renderer/services/ApiService'
|
||||
@ -53,6 +52,7 @@ import {
|
||||
} from '..'
|
||||
import ApiOptionsSettingsPopup from './ApiOptionsSettings/ApiOptionsSettingsPopup'
|
||||
import AwsBedrockSettings from './AwsBedrockSettings'
|
||||
import CherryINSettings from './CherryINSettings'
|
||||
import CustomHeaderPopup from './CustomHeaderPopup'
|
||||
import DMXAPISettings from './DMXAPISettings'
|
||||
import GithubCopilotSettings from './GithubCopilotSettings'
|
||||
@ -99,13 +99,15 @@ const ProviderSetting: FC<Props> = ({ providerId }) => {
|
||||
const [anthropicApiHost, setAnthropicHost] = useState<string | undefined>(provider.anthropicApiHost)
|
||||
const [apiVersion, setApiVersion] = useState(provider.apiVersion)
|
||||
const [activeHostField, setActiveHostField] = useState<HostField>('apiHost')
|
||||
const { t } = useTranslation()
|
||||
const { t, i18n } = useTranslation()
|
||||
const { theme } = useTheme()
|
||||
const { setTimeoutTimer } = useTimer()
|
||||
const dispatch = useAppDispatch()
|
||||
|
||||
const isAzureOpenAI = isAzureOpenAIProvider(provider)
|
||||
const isDmxapi = provider.id === 'dmxapi'
|
||||
const isCherryIN = provider.id === 'cherryin'
|
||||
const isChineseUser = i18n.language.startsWith('zh')
|
||||
const noAPIInputProviders = ['aws-bedrock'] as const satisfies SystemProviderId[]
|
||||
const hideApiInput = noAPIInputProviders.some((id) => id === provider.id)
|
||||
const noAPIKeyInputProviders = ['copilot', 'vertexai'] as const satisfies SystemProviderId[]
|
||||
@ -339,13 +341,16 @@ const ProviderSetting: FC<Props> = ({ providerId }) => {
|
||||
}, [provider.anthropicApiHost])
|
||||
|
||||
const canConfigureAnthropicHost = useMemo(() => {
|
||||
if (isCherryIN) {
|
||||
return false
|
||||
}
|
||||
if (isNewApiProvider(provider)) {
|
||||
return true
|
||||
}
|
||||
return (
|
||||
provider.type !== 'anthropic' && isSystemProviderId(provider.id) && isAnthropicCompatibleProviderId(provider.id)
|
||||
)
|
||||
}, [provider])
|
||||
}, [isCherryIN, provider])
|
||||
|
||||
const anthropicHostPreview = useMemo(() => {
|
||||
const rawHost = anthropicApiHost ?? provider.anthropicApiHost
|
||||
@ -513,19 +518,23 @@ const ProviderSetting: FC<Props> = ({ providerId }) => {
|
||||
</SettingSubtitle>
|
||||
{activeHostField === 'apiHost' && (
|
||||
<>
|
||||
<Space.Compact style={{ width: '100%', marginTop: 5 }}>
|
||||
<Input
|
||||
value={apiHost}
|
||||
placeholder={t('settings.provider.api_host')}
|
||||
onChange={(e) => setApiHost(e.target.value)}
|
||||
onBlur={onUpdateApiHost}
|
||||
/>
|
||||
{isApiHostResettable && (
|
||||
<Button variant="destructive" onClick={onReset}>
|
||||
{t('settings.provider.api.url.reset')}
|
||||
</Button>
|
||||
)}
|
||||
</Space.Compact>
|
||||
{isCherryIN && isChineseUser ? (
|
||||
<CherryINSettings providerId={provider.id} apiHost={apiHost} setApiHost={setApiHost} />
|
||||
) : (
|
||||
<Space.Compact style={{ width: '100%', marginTop: 5 }}>
|
||||
<Input
|
||||
value={apiHost}
|
||||
placeholder={t('settings.provider.api_host')}
|
||||
onChange={(e) => setApiHost(e.target.value)}
|
||||
onBlur={onUpdateApiHost}
|
||||
/>
|
||||
{isApiHostResettable && (
|
||||
<Button variant="destructive" onClick={onReset}>
|
||||
{t('settings.provider.api.url.reset')}
|
||||
</Button>
|
||||
)}
|
||||
</Space.Compact>
|
||||
)}
|
||||
{isVertexProvider(provider) && (
|
||||
<SettingHelpTextRow>
|
||||
<SettingHelpText>{t('settings.provider.vertex_ai.api_host_help')}</SettingHelpText>
|
||||
|
||||
@ -9,8 +9,8 @@ import { DEFAULT_SEARCH_ENGINES } from '../components/SelectionActionSearchModal
|
||||
|
||||
const logger = loggerService.withContext('useSettingsActionsList')
|
||||
|
||||
const MAX_CUSTOM_ITEMS = 8
|
||||
const MAX_ENABLED_ITEMS = 6
|
||||
const MAX_CUSTOM_ITEMS = 10
|
||||
const MAX_ENABLED_ITEMS = 8
|
||||
|
||||
export const useActionItems = (
|
||||
initialItems: SelectionActionItem[] | undefined,
|
||||
|
||||
@ -11,7 +11,7 @@ import type { AssistantPreset } from '@renderer/types'
|
||||
import { uuid } from '@renderer/utils'
|
||||
import { Empty, Input } from 'antd'
|
||||
import { omit } from 'lodash'
|
||||
import { Import, Plus, Rss, Search } from 'lucide-react'
|
||||
import { Import, Plus, Rss, Search, Settings2 } from 'lucide-react'
|
||||
import type { FC } from 'react'
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
@ -25,6 +25,7 @@ import AssistantPresetCard from './components/AssistantPresetCard'
|
||||
import { AssistantPresetGroupIcon } from './components/AssistantPresetGroupIcon'
|
||||
import AssistantsSubscribeUrlSettings from './components/AssistantsSubscribeUrlSettings'
|
||||
import ImportAssistantPresetPopup from './components/ImportAssistantPresetPopup'
|
||||
import ManageAssistantPresetsPopup from './components/ManageAssistantPresetsPopup'
|
||||
|
||||
const AssistantPresetsPage: FC = () => {
|
||||
const [search, setSearch] = useState('')
|
||||
@ -185,6 +186,10 @@ const AssistantPresetsPage: FC = () => {
|
||||
})
|
||||
}
|
||||
|
||||
const handleManageAgents = () => {
|
||||
ManageAssistantPresetsPopup.show()
|
||||
}
|
||||
|
||||
return (
|
||||
<Container>
|
||||
<Navbar>
|
||||
@ -290,6 +295,10 @@ const AssistantPresetsPage: FC = () => {
|
||||
<Rss size={18} color="var(--color-icon)" />
|
||||
{t('assistants.presets.settings.title')}
|
||||
</Button>
|
||||
<Button variant="ghost" onClick={handleManageAgents}>
|
||||
<Settings2 size={18} color="var(--color-icon)" />
|
||||
{t('assistants.presets.manage.title')}
|
||||
</Button>
|
||||
<Button variant="ghost" onClick={handleAddAgent}>
|
||||
<Plus size={18} color="var(--color-icon)" />
|
||||
{t('assistants.presets.add.title')}
|
||||
|
||||
@ -8,7 +8,8 @@ import type { AssistantPreset } from '@renderer/types'
|
||||
import { getLeadingEmoji } from '@renderer/utils'
|
||||
import { Dropdown } from 'antd'
|
||||
import { t } from 'i18next'
|
||||
import { ArrowDownAZ, Ellipsis, PlusIcon, SquareArrowOutUpRight } from 'lucide-react'
|
||||
import { isArray } from 'lodash'
|
||||
import { Ellipsis, PlusIcon, Settings2, SquareArrowOutUpRight } from 'lucide-react'
|
||||
import { type FC, memo, useCallback, useEffect, useRef, useState } from 'react'
|
||||
import styled from 'styled-components'
|
||||
|
||||
@ -77,9 +78,9 @@ const AssistantPresetCard: FC<Props> = ({ preset, onClick, activegroup, getLocal
|
||||
}
|
||||
},
|
||||
{
|
||||
key: 'sort',
|
||||
label: t('assistants.presets.sorting.title'),
|
||||
icon: <ArrowDownAZ size={14} />,
|
||||
key: 'manage',
|
||||
label: t('assistants.presets.manage.title'),
|
||||
icon: <Settings2 size={14} />,
|
||||
onClick: (e: any) => {
|
||||
e.domEvent.stopPropagation()
|
||||
ManageAssistantPresetsPopup.show()
|
||||
@ -143,7 +144,7 @@ const AssistantPresetCard: FC<Props> = ({ preset, onClick, activegroup, getLocal
|
||||
{getLocalizedGroupName('我的')}
|
||||
</CustomTag>
|
||||
)}
|
||||
{!!preset.group?.length &&
|
||||
{isArray(preset.group) &&
|
||||
preset.group.map((group) => (
|
||||
<CustomTag key={group} color="#A0A0A0" size={11}>
|
||||
{getLocalizedGroupName(group)}
|
||||
|
||||
@ -1,21 +1,23 @@
|
||||
import { MenuOutlined } from '@ant-design/icons'
|
||||
import { Box, RowFlex } from '@cherrystudio/ui'
|
||||
import { DraggableList } from '@renderer/components/DraggableList'
|
||||
import { DeleteIcon } from '@renderer/components/Icons'
|
||||
import { TopView } from '@renderer/components/TopView'
|
||||
import { useAssistantPresets } from '@renderer/hooks/useAssistantPresets'
|
||||
import { Empty, Modal } from 'antd'
|
||||
import { useEffect, useState } from 'react'
|
||||
import type { AssistantPreset } from '@renderer/types'
|
||||
import { Button, Checkbox, Empty, Modal, Segmented } from 'antd'
|
||||
import { useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import styled from 'styled-components'
|
||||
|
||||
type Mode = 'sort' | 'delete'
|
||||
|
||||
const PopupContainer: React.FC = () => {
|
||||
const [open, setOpen] = useState(true)
|
||||
const { t } = useTranslation()
|
||||
const { presets, setAssistantPresets } = useAssistantPresets()
|
||||
|
||||
const onOk = () => {
|
||||
setOpen(false)
|
||||
}
|
||||
const [mode, setMode] = useState<Mode>(() => (presets.length > 50 ? 'delete' : 'sort'))
|
||||
const [selectedIds, setSelectedIds] = useState<Set<string>>(new Set())
|
||||
|
||||
const onCancel = () => {
|
||||
setOpen(false)
|
||||
@ -25,17 +27,74 @@ const PopupContainer: React.FC = () => {
|
||||
ManageAssistantPresetsPopup.hide()
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
if (presets.length === 0) {
|
||||
setOpen(false)
|
||||
const handleModeChange = (value: Mode) => {
|
||||
setMode(value)
|
||||
setSelectedIds(new Set())
|
||||
}
|
||||
|
||||
const handleSelectAll = () => {
|
||||
if (selectedIds.size === presets.length) {
|
||||
setSelectedIds(new Set())
|
||||
} else {
|
||||
setSelectedIds(new Set(presets.map((p) => p.id)))
|
||||
}
|
||||
}, [presets])
|
||||
}
|
||||
|
||||
const handleSelectNext100 = () => {
|
||||
// Find the last selected preset's index
|
||||
let startIndex = 0
|
||||
if (selectedIds.size > 0) {
|
||||
for (let i = presets.length - 1; i >= 0; i--) {
|
||||
if (selectedIds.has(presets[i].id)) {
|
||||
startIndex = i + 1
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Select next 100 unselected presets starting from startIndex
|
||||
const newSelected = new Set(selectedIds)
|
||||
let count = 0
|
||||
for (let i = startIndex; i < presets.length && count < 100; i++) {
|
||||
if (!newSelected.has(presets[i].id)) {
|
||||
newSelected.add(presets[i].id)
|
||||
count++
|
||||
}
|
||||
}
|
||||
setSelectedIds(newSelected)
|
||||
}
|
||||
|
||||
const handleSelect = (preset: AssistantPreset) => {
|
||||
const newSelected = new Set(selectedIds)
|
||||
if (newSelected.has(preset.id)) {
|
||||
newSelected.delete(preset.id)
|
||||
} else {
|
||||
newSelected.add(preset.id)
|
||||
}
|
||||
setSelectedIds(newSelected)
|
||||
}
|
||||
|
||||
const handleBatchDelete = () => {
|
||||
if (selectedIds.size === 0) return
|
||||
|
||||
window.modal.confirm({
|
||||
centered: true,
|
||||
content: t('assistants.presets.manage.batch_delete.confirm', { count: selectedIds.size }),
|
||||
onOk: () => {
|
||||
const remainingPresets = presets.filter((p) => !selectedIds.has(p.id))
|
||||
setAssistantPresets(remainingPresets)
|
||||
setSelectedIds(new Set())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const isAllSelected = presets.length > 0 && selectedIds.size === presets.length
|
||||
const isIndeterminate = selectedIds.size > 0 && selectedIds.size < presets.length
|
||||
|
||||
return (
|
||||
<Modal
|
||||
title={t('assistants.presets.manage.title')}
|
||||
open={open}
|
||||
onOk={onOk}
|
||||
onCancel={onCancel}
|
||||
afterClose={onClose}
|
||||
footer={null}
|
||||
@ -43,18 +102,78 @@ const PopupContainer: React.FC = () => {
|
||||
centered>
|
||||
<Container>
|
||||
{presets.length > 0 && (
|
||||
<DraggableList list={presets} onUpdate={setAssistantPresets}>
|
||||
{(item) => (
|
||||
<AgentItem>
|
||||
<Box className="mr-8">
|
||||
{item.emoji} {item.name}
|
||||
</Box>
|
||||
<RowFlex className="gap-[15px]">
|
||||
<MenuOutlined style={{ cursor: 'move' }} />
|
||||
<>
|
||||
<ActionBar>
|
||||
{mode === 'delete' ? (
|
||||
<RowFlex className="items-center">
|
||||
<Checkbox checked={isAllSelected} indeterminate={isIndeterminate} onChange={handleSelectAll}>
|
||||
{t('common.select_all')}
|
||||
</Checkbox>
|
||||
{presets.length > 100 && selectedIds.size < presets.length && (
|
||||
<Button type="link" size="small" onClick={handleSelectNext100} style={{ padding: 0 }}>
|
||||
+100
|
||||
</Button>
|
||||
)}
|
||||
</RowFlex>
|
||||
</AgentItem>
|
||||
) : (
|
||||
<div />
|
||||
)}
|
||||
<RowFlex className="items-center gap-8px">
|
||||
{mode === 'delete' && (
|
||||
<Button
|
||||
danger
|
||||
type="text"
|
||||
icon={<DeleteIcon size={14} />}
|
||||
disabled={selectedIds.size === 0}
|
||||
onClick={handleBatchDelete}>
|
||||
{t('assistants.presets.manage.batch_delete.button')} ({selectedIds.size})
|
||||
</Button>
|
||||
)}
|
||||
<Segmented
|
||||
size="small"
|
||||
value={mode}
|
||||
onChange={(value) => handleModeChange(value as Mode)}
|
||||
options={[
|
||||
{ label: t('assistants.presets.manage.mode.sort'), value: 'sort' },
|
||||
{ label: t('assistants.presets.manage.mode.delete'), value: 'delete' }
|
||||
]}
|
||||
/>
|
||||
</RowFlex>
|
||||
</ActionBar>
|
||||
|
||||
{mode === 'sort' ? (
|
||||
<AgentList>
|
||||
<DraggableList list={presets} onUpdate={setAssistantPresets}>
|
||||
{(item) => (
|
||||
<AgentItem>
|
||||
<Box className="mr-8">
|
||||
{item.emoji} {item.name}
|
||||
</Box>
|
||||
<RowFlex className="gap-[15px]">
|
||||
<MenuOutlined style={{ cursor: 'move' }} />
|
||||
</RowFlex>
|
||||
</AgentItem>
|
||||
)}
|
||||
</DraggableList>
|
||||
</AgentList>
|
||||
) : (
|
||||
<AgentList>
|
||||
{presets.map((item) => (
|
||||
<SelectableAgentItem
|
||||
key={item.id}
|
||||
onClick={() => handleSelect(item)}
|
||||
$selected={selectedIds.has(item.id)}>
|
||||
<RowFlex className="items-center gap-8px">
|
||||
<Checkbox checked={selectedIds.has(item.id)} onChange={() => handleSelect(item)} />
|
||||
<Box>
|
||||
{item.emoji} {item.name}
|
||||
</Box>
|
||||
</RowFlex>
|
||||
</SelectableAgentItem>
|
||||
))}
|
||||
</AgentList>
|
||||
)}
|
||||
</DraggableList>
|
||||
</>
|
||||
)}
|
||||
{presets.length === 0 && <Empty description="" />}
|
||||
</Container>
|
||||
@ -65,6 +184,21 @@ const PopupContainer: React.FC = () => {
|
||||
const Container = styled.div`
|
||||
padding: 12px 0;
|
||||
height: 50vh;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
`
|
||||
|
||||
const ActionBar = styled.div`
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: 0 8px 12px;
|
||||
border-bottom: 1px solid var(--color-border);
|
||||
margin-bottom: 12px;
|
||||
`
|
||||
|
||||
const AgentList = styled.div`
|
||||
flex: 1;
|
||||
overflow-y: auto;
|
||||
&::-webkit-scrollbar {
|
||||
display: none;
|
||||
@ -90,6 +224,23 @@ const AgentItem = styled.div`
|
||||
}
|
||||
`
|
||||
|
||||
const SelectableAgentItem = styled.div<{ $selected: boolean }>`
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: 8px;
|
||||
border-radius: 8px;
|
||||
user-select: none;
|
||||
background-color: ${(props) => (props.$selected ? 'var(--color-primary-mute)' : 'var(--color-background-soft)')};
|
||||
margin-bottom: 8px;
|
||||
cursor: pointer;
|
||||
transition: background-color 0.2s ease;
|
||||
&:hover {
|
||||
background-color: ${(props) => (props.$selected ? 'var(--color-primary-mute)' : 'var(--color-background-mute)')};
|
||||
}
|
||||
`
|
||||
|
||||
export default class ManageAssistantPresetsPopup {
|
||||
static topviewId = 0
|
||||
static hide() {
|
||||
|
||||
@ -75,7 +75,7 @@ class KnowledgeQueue {
|
||||
|
||||
let processableItem = findProcessableItem()
|
||||
while (processableItem) {
|
||||
this.processItem(baseId, processableItem).then()
|
||||
await this.processItem(baseId, processableItem)
|
||||
processableItem = findProcessableItem()
|
||||
}
|
||||
} finally {
|
||||
@ -99,7 +99,27 @@ class KnowledgeQueue {
|
||||
const userId = await preferenceService.get('app.user.id')
|
||||
try {
|
||||
if (item.retryCount && item.retryCount >= this.MAX_RETRIES) {
|
||||
logger.info(`Item ${item.id} has reached max retries, skipping`)
|
||||
const errorMessage = item.processingError
|
||||
? `Max retries exceeded: ${item.processingError}`
|
||||
: 'Max retries exceeded'
|
||||
logger.warn(`Item ${item.id} has reached max retries, marking as failed`)
|
||||
notificationService.send({
|
||||
id: uuid(),
|
||||
type: 'error',
|
||||
title: t('common.knowledge_base'),
|
||||
message: t('notification.knowledge.error', { error: errorMessage }),
|
||||
silent: false,
|
||||
timestamp: Date.now(),
|
||||
source: 'knowledge'
|
||||
})
|
||||
store.dispatch(
|
||||
updateItemProcessingStatus({
|
||||
baseId,
|
||||
itemId: item.id,
|
||||
status: 'failed',
|
||||
error: errorMessage
|
||||
})
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@ -23,7 +23,7 @@ import { isPromptToolUse, isSupportedToolUse } from '@renderer/utils/mcp-tools'
|
||||
import { findFileBlocks, getMainTextContent } from '@renderer/utils/messageUtils/find'
|
||||
import { containsSupportedVariables, replacePromptVariables } from '@renderer/utils/prompt'
|
||||
import { NOT_SUPPORT_API_KEY_PROVIDER_TYPES, NOT_SUPPORT_API_KEY_PROVIDERS } from '@renderer/utils/provider'
|
||||
import { cloneDeep, isEmpty, takeRight } from 'lodash'
|
||||
import { isEmpty, takeRight } from 'lodash'
|
||||
|
||||
import type { ModernAiProviderConfig } from '../aiCore/index_new'
|
||||
import AiProviderNew from '../aiCore/index_new'
|
||||
@ -100,9 +100,11 @@ export async function fetchChatCompletion({
|
||||
})
|
||||
|
||||
// Get base provider and apply API key rotation
|
||||
// NOTE: Shallow copy is intentional. Provider objects are not mutated by downstream code.
|
||||
// Nested properties (if any) are never modified after creation.
|
||||
const baseProvider = getProviderByModel(assistant.model || getDefaultModel())
|
||||
const providerWithRotatedKey = {
|
||||
...cloneDeep(baseProvider),
|
||||
...baseProvider,
|
||||
apiKey: getRotatedApiKey(baseProvider)
|
||||
}
|
||||
|
||||
@ -184,8 +186,10 @@ export async function fetchMessagesSummary({ messages, assistant }: { messages:
|
||||
}
|
||||
|
||||
// Apply API key rotation
|
||||
// NOTE: Shallow copy is intentional. Provider objects are not mutated by downstream code.
|
||||
// Nested properties (if any) are never modified after creation.
|
||||
const providerWithRotatedKey = {
|
||||
...cloneDeep(provider),
|
||||
...provider,
|
||||
apiKey: getRotatedApiKey(provider)
|
||||
}
|
||||
|
||||
@ -289,8 +293,10 @@ export async function fetchNoteSummary({ content, assistant }: { content: string
|
||||
}
|
||||
|
||||
// Apply API key rotation
|
||||
// NOTE: Shallow copy is intentional. Provider objects are not mutated by downstream code.
|
||||
// Nested properties (if any) are never modified after creation.
|
||||
const providerWithRotatedKey = {
|
||||
...cloneDeep(provider),
|
||||
...provider,
|
||||
apiKey: getRotatedApiKey(provider)
|
||||
}
|
||||
|
||||
@ -383,8 +389,10 @@ export async function fetchGenerate({
|
||||
}
|
||||
|
||||
// Apply API key rotation
|
||||
// NOTE: Shallow copy is intentional. Provider objects are not mutated by downstream code.
|
||||
// Nested properties (if any) are never modified after creation.
|
||||
const providerWithRotatedKey = {
|
||||
...cloneDeep(provider),
|
||||
...provider,
|
||||
apiKey: getRotatedApiKey(provider)
|
||||
}
|
||||
|
||||
@ -493,8 +501,10 @@ function getRotatedApiKey(provider: Provider): string {
|
||||
|
||||
export async function fetchModels(provider: Provider): Promise<Model[]> {
|
||||
// Apply API key rotation
|
||||
// NOTE: Shallow copy is intentional. Provider objects are not mutated by downstream code.
|
||||
// Nested properties (if any) are never modified after creation.
|
||||
const providerWithRotatedKey = {
|
||||
...cloneDeep(provider),
|
||||
...provider,
|
||||
apiKey: getRotatedApiKey(provider)
|
||||
}
|
||||
|
||||
|
||||
@ -4,8 +4,6 @@ export const EventEmitter = new Emittery()
|
||||
|
||||
export const EVENT_NAMES = {
|
||||
SEND_MESSAGE: 'SEND_MESSAGE',
|
||||
// APPEND_MESSAGE: 'APPEND_MESSAGE',
|
||||
// RECEIVE_MESSAGE: 'RECEIVE_MESSAGE',
|
||||
MESSAGE_COMPLETE: 'MESSAGE_COMPLETE',
|
||||
AI_AUTO_RENAME: 'AI_AUTO_RENAME',
|
||||
CLEAR_MESSAGES: 'CLEAR_MESSAGES',
|
||||
@ -15,7 +13,6 @@ export const EVENT_NAMES = {
|
||||
CHAT_COMPLETION_PAUSED: 'CHAT_COMPLETION_PAUSED',
|
||||
ESTIMATED_TOKEN_COUNT: 'ESTIMATED_TOKEN_COUNT',
|
||||
SHOW_ASSISTANTS: 'SHOW_ASSISTANTS',
|
||||
SHOW_CHAT_SETTINGS: 'SHOW_CHAT_SETTINGS',
|
||||
SHOW_TOPIC_SIDEBAR: 'SHOW_TOPIC_SIDEBAR',
|
||||
SWITCH_TOPIC_SIDEBAR: 'SWITCH_TOPIC_SIDEBAR',
|
||||
NEW_CONTEXT: 'NEW_CONTEXT',
|
||||
|
||||
@ -162,7 +162,7 @@ export const searchKnowledgeBase = async (
|
||||
|
||||
const searchResults: KnowledgeSearchResult[] = await window.api.knowledgeBase.search(
|
||||
{
|
||||
search: rewrite || query,
|
||||
search: query || rewrite || '',
|
||||
base: baseParams
|
||||
},
|
||||
currentSpan?.spanContext()
|
||||
|
||||
@ -4,6 +4,7 @@ import i18n from '@renderer/i18n'
|
||||
import { EVENT_NAMES, EventEmitter } from '@renderer/services/EventService'
|
||||
import { NotificationService } from '@renderer/services/NotificationService'
|
||||
import { estimateMessagesUsage } from '@renderer/services/TokenService'
|
||||
import { updateOneBlock } from '@renderer/store/messageBlock'
|
||||
import { selectMessagesForTopic } from '@renderer/store/newMessage'
|
||||
import { newMessagesActions } from '@renderer/store/newMessage'
|
||||
import type { Assistant } from '@renderer/types'
|
||||
@ -104,6 +105,25 @@ export const createBaseCallbacks = (deps: BaseCallbacksDependencies) => {
|
||||
blockManager.smartBlockUpdate(possibleBlockId, changes, blockManager.lastBlockType!, true)
|
||||
}
|
||||
|
||||
// Fix: 更新所有仍处于 STREAMING 状态的 blocks 为 PAUSED/ERROR
|
||||
// 这修复了停止回复时思考计时器继续运行的问题
|
||||
const currentMessage = getState().messages.entities[assistantMsgId]
|
||||
if (currentMessage) {
|
||||
const allBlockRefs = findAllBlocks(currentMessage)
|
||||
const blockState = getState().messageBlocks
|
||||
for (const blockRef of allBlockRefs) {
|
||||
const block = blockState.entities[blockRef.id]
|
||||
if (block && block.status === MessageBlockStatus.STREAMING && block.id !== possibleBlockId) {
|
||||
dispatch(
|
||||
updateOneBlock({
|
||||
id: block.id,
|
||||
changes: { status: isErrorTypeAbort ? MessageBlockStatus.PAUSED : MessageBlockStatus.ERROR }
|
||||
})
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const errorBlock = createErrorBlock(assistantMsgId, serializableError, { status: MessageBlockStatus.SUCCESS })
|
||||
await blockManager.handleBlockTransition(errorBlock, MessageBlockType.ERROR)
|
||||
const messageErrorUpdate = {
|
||||
|
||||
@ -45,7 +45,7 @@ function normalizeModels<T>(models: T[], transformer: (entry: T) => Model | null
|
||||
}
|
||||
|
||||
function adaptSdkModel(provider: Provider, model: SdkModel): Model | null {
|
||||
const id = pickPreferredString([(model as any)?.id, (model as any)?.modelId])
|
||||
const id = pickPreferredString([(model as any)?.id, (model as any)?.modelId, (model as any)?.name])
|
||||
const name = pickPreferredString([
|
||||
(model as any)?.display_name,
|
||||
(model as any)?.displayName,
|
||||
|
||||
@ -71,7 +71,7 @@ const persistedReducer = persistReducer(
|
||||
{
|
||||
key: 'cherry-studio',
|
||||
storage,
|
||||
version: 182,
|
||||
version: 183,
|
||||
blacklist: ['runtime', 'messages', 'messageBlocks', 'tabs', 'toolPermissions'],
|
||||
migrate
|
||||
},
|
||||
|
||||
@ -2981,6 +2981,22 @@ const migrateConfig = {
|
||||
logger.error('migrate 182 error', error as Error)
|
||||
return state
|
||||
}
|
||||
},
|
||||
'183': (state: RootState) => {
|
||||
try {
|
||||
state.llm.providers.forEach((provider) => {
|
||||
if (provider.id === SystemProviderIds.cherryin) {
|
||||
provider.apiHost = 'https://open.cherryin.cc'
|
||||
provider.anthropicApiHost = 'https://open.cherryin.cc'
|
||||
}
|
||||
})
|
||||
state.llm.providers = moveProvider(state.llm.providers, SystemProviderIds.poe, 10)
|
||||
logger.info('migrate 183 success')
|
||||
return state
|
||||
} catch (error) {
|
||||
logger.error('migrate 183 error', error as Error)
|
||||
return state
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
export type Tab = 'assistants' | 'topic' | 'settings'
|
||||
export type Tab = 'assistants' | 'topic'
|
||||
|
||||
export type InputBarToolType =
|
||||
| 'new_topic'
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user