diff --git a/README.md b/README.md index efcc7db72c..96f727a96e 100644 --- a/README.md +++ b/README.md @@ -130,7 +130,7 @@ Thank you for your support and contributions! # 🚀 Contributors - +

diff --git a/docs/branching-strategy.md b/docs/branching-strategy.md index 3196d09fe7..897763af16 100644 --- a/docs/branching-strategy.md +++ b/docs/branching-strategy.md @@ -49,3 +49,4 @@ When contributing to Cherry Studio, please follow these guidelines: - Include relevant issue numbers in your PR description - Make sure all tests pass and code meets our quality standards - Critical hotfixes may be submitted against `main` but must also be merged into `develop` +- Add a photo to show what is different if you add a new feature or modify a component in the UI. diff --git a/package.json b/package.json index 618736ece9..b069265ab3 100644 --- a/package.json +++ b/package.json @@ -50,33 +50,32 @@ "test:renderer": "vitest run", "test:renderer:ui": "vitest --ui", "test:renderer:coverage": "vitest run --coverage", + "test:lint": "eslint . --ext .js,.jsx,.cjs,.mjs,.ts,.tsx,.cts,.mts", "format": "prettier --write .", "lint": "eslint . --ext .js,.jsx,.cjs,.mjs,.ts,.tsx,.cts,.mts --fix", "postinstall": "electron-builder install-app-deps", "prepare": "husky" }, "dependencies": { - "@cherrystudio/embedjs": "^0.1.28", - "@cherrystudio/embedjs-libsql": "^0.1.28", - "@cherrystudio/embedjs-loader-csv": "^0.1.28", - "@cherrystudio/embedjs-loader-image": "^0.1.28", - "@cherrystudio/embedjs-loader-markdown": "^0.1.28", - "@cherrystudio/embedjs-loader-msoffice": "^0.1.28", - "@cherrystudio/embedjs-loader-pdf": "^0.1.28", - "@cherrystudio/embedjs-loader-sitemap": "^0.1.28", - "@cherrystudio/embedjs-loader-web": "^0.1.28", - "@cherrystudio/embedjs-loader-xml": "^0.1.28", - "@cherrystudio/embedjs-openai": "^0.1.28", + "@cherrystudio/embedjs": "^0.1.31", + "@cherrystudio/embedjs-libsql": "^0.1.31", + "@cherrystudio/embedjs-loader-csv": "^0.1.31", + "@cherrystudio/embedjs-loader-image": "^0.1.31", + "@cherrystudio/embedjs-loader-markdown": "^0.1.31", + "@cherrystudio/embedjs-loader-msoffice": "^0.1.31", + "@cherrystudio/embedjs-loader-pdf": "^0.1.31", + "@cherrystudio/embedjs-loader-sitemap": "^0.1.31", + "@cherrystudio/embedjs-loader-web": "^0.1.31", + "@cherrystudio/embedjs-loader-xml": "^0.1.31", + "@cherrystudio/embedjs-openai": "^0.1.31", "@electron-toolkit/utils": "^3.0.0", "@electron/notarize": "^2.5.0", "@langchain/community": "^0.3.36", "@strongtz/win32-arm64-msvc": "^0.4.7", "@tanstack/react-query": "^5.27.0", "@types/react-infinite-scroll-component": "^5.0.0", - "adm-zip": "^0.5.16", "archiver": "^7.0.1", "async-mutex": "^0.5.0", - "bufferutil": "^4.0.9", "color": "^5.0.0", "diff": "^7.0.0", "docx": "^9.0.2", @@ -85,7 +84,6 @@ "electron-updater": "6.6.4", "electron-window-state": "^5.0.3", "epub": "patch:epub@npm%3A1.3.0#~/.yarn/patches/epub-npm-1.3.0-8325494ffe.patch", - "extract-zip": "^2.0.1", "fast-xml-parser": "^5.2.0", "fetch-socks": "^1.3.2", "fs-extra": "^11.2.0", @@ -101,7 +99,6 @@ "tar": "^7.4.3", "turndown": "^7.2.0", "turndown-plugin-gfm": "^1.0.2", - "undici": "^7.4.0", "webdav": "^5.8.0", "ws": "^8.18.1", "zipread": "^1.3.3" diff --git a/packages/shared/IpcChannel.ts b/packages/shared/IpcChannel.ts index 1dd61c6364..b4c4c571d6 100644 --- a/packages/shared/IpcChannel.ts +++ b/packages/shared/IpcChannel.ts @@ -1,4 +1,5 @@ export enum IpcChannel { + App_GetCacheSize = 'app:get-cache-size', App_ClearCache = 'app:clear-cache', App_SetLaunchOnBoot = 'app:set-launch-on-boot', App_SetLanguage = 'app:set-language', @@ -134,6 +135,9 @@ export enum IpcChannel { System_GetDeviceType = 'system:getDeviceType', System_GetHostname = 'system:getHostname', + // DevTools + System_ToggleDevTools = 'system:toggleDevTools', + // events BackupProgress = 'backup-progress', ThemeChange = 'theme:change', diff --git a/src/main/embeddings/EmbeddingsFactory.ts b/src/main/embeddings/EmbeddingsFactory.ts index 5924d00d7d..69de15171e 100644 --- a/src/main/embeddings/EmbeddingsFactory.ts +++ b/src/main/embeddings/EmbeddingsFactory.ts @@ -23,14 +23,14 @@ export default class EmbeddingsFactory { azureOpenAIApiVersion: apiVersion, azureOpenAIApiDeploymentName: model, azureOpenAIApiInstanceName: getInstanceName(baseURL), - dimensions, + // dimensions, batchSize }) } return new OpenAiEmbeddings({ model, apiKey, - dimensions, + // dimensions, batchSize, configuration: { baseURL } }) diff --git a/src/main/embeddings/VoyageEmbeddings.ts b/src/main/embeddings/VoyageEmbeddings.ts index fc0c8b9fe7..ce21afe580 100644 --- a/src/main/embeddings/VoyageEmbeddings.ts +++ b/src/main/embeddings/VoyageEmbeddings.ts @@ -1,6 +1,5 @@ import { BaseEmbeddings } from '@cherrystudio/embedjs-interfaces' import { VoyageEmbeddings as _VoyageEmbeddings } from '@langchain/community/embeddings/voyage' -import Logger from 'electron-log' export default class VoyageEmbeddings extends BaseEmbeddings { private model: _VoyageEmbeddings @@ -12,7 +11,6 @@ export default class VoyageEmbeddings extends BaseEmbeddings { if (!this.configuration.outputDimension) { throw new Error('You need to pass in the optional dimensions parameter for this model') } - Logger.log('VoyageEmbeddings', this.configuration) this.model = new _VoyageEmbeddings(this.configuration) } override async getDimensions(): Promise { diff --git a/src/main/index.ts b/src/main/index.ts index d7d4e23db5..fb79b1e842 100644 --- a/src/main/index.ts +++ b/src/main/index.ts @@ -1,10 +1,11 @@ import { electronApp, optimizer } from '@electron-toolkit/utils' import { replaceDevtoolsFont } from '@main/utils/windowUtil' import { IpcChannel } from '@shared/IpcChannel' -import { app, ipcMain } from 'electron' +import { app, BrowserWindow, ipcMain } from 'electron' import installExtension, { REACT_DEVELOPER_TOOLS, REDUX_DEVTOOLS } from 'electron-devtools-installer' import Logger from 'electron-log' +import { isDev, isMac, isWin } from './constant' import { registerIpc } from './ipc' import { configManager } from './services/ConfigManager' import mcpService from './services/MCPService' @@ -21,6 +22,19 @@ import { setUserDataDir } from './utils/file' Logger.initialize() +// in production mode, handle uncaught exception and unhandled rejection globally +if (!isDev) { + // handle uncaught exception + process.on('uncaughtException', (error) => { + Logger.error('Uncaught Exception:', error) + }) + + // handle unhandled rejection + process.on('unhandledRejection', (reason, promise) => { + Logger.error('Unhandled Rejection at:', promise, 'reason:', reason) + }) +} + // Check for single instance lock if (!app.requestSingleInstanceLock()) { app.quit() @@ -64,18 +78,23 @@ if (!app.requestSingleInstanceLock()) { // Setup deep link for AppImage on Linux await setupAppImageDeepLink() - if (process.env.NODE_ENV === 'development') { + if (isDev) { installExtension([REDUX_DEVTOOLS, REACT_DEVELOPER_TOOLS]) .then((name) => console.log(`Added Extension: ${name}`)) .catch((err) => console.log('An error occurred: ', err)) } ipcMain.handle(IpcChannel.System_GetDeviceType, () => { - return process.platform === 'darwin' ? 'mac' : process.platform === 'win32' ? 'windows' : 'linux' + return isMac ? 'mac' : isWin ? 'windows' : 'linux' }) ipcMain.handle(IpcChannel.System_GetHostname, () => { return require('os').hostname() }) + + ipcMain.handle(IpcChannel.System_ToggleDevTools, (e) => { + const win = BrowserWindow.fromWebContents(e.sender) + win && win.webContents.toggleDevTools() + }) }) registerProtocolClient(app) diff --git a/src/main/ipc.ts b/src/main/ipc.ts index ecb74a57b4..665e8114b7 100644 --- a/src/main/ipc.ts +++ b/src/main/ipc.ts @@ -29,10 +29,11 @@ import storeSyncService from './services/StoreSyncService' import { TrayService } from './services/TrayService' import { setOpenLinkExternal } from './services/WebviewService' import { windowService } from './services/WindowService' -import { getResourcePath } from './utils' +import { calculateDirectorySize, getResourcePath } from './utils' import { decrypt, encrypt } from './utils/aes' -import { getConfigDir, getFilesDir } from './utils/file' +import { getCacheDir, getConfigDir, getFilesDir } from './utils/file' import { compress, decompress } from './utils/zip' + const fileManager = new FileStorage() const backupManager = new BackupManager() const exportService = new ExportService(fileManager) @@ -179,6 +180,21 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) { } }) + // get cache size + ipcMain.handle(IpcChannel.App_GetCacheSize, async () => { + const cachePath = getCacheDir() + log.info(`Calculating cache size for path: ${cachePath}`) + + try { + const sizeInBytes = await calculateDirectorySize(cachePath) + const sizeInMB = (sizeInBytes / (1024 * 1024)).toFixed(2) + return `${sizeInMB}` + } catch (error: any) { + log.error(`Failed to calculate cache size for ${cachePath}: ${error.message}`) + return '0' + } + }) + // check for update ipcMain.handle(IpcChannel.App_CheckForUpdate, async () => { await appUpdater.checkForUpdates() diff --git a/src/main/reranker/BaseReranker.ts b/src/main/reranker/BaseReranker.ts index 5a8bd6ee2a..a88d0883ae 100644 --- a/src/main/reranker/BaseReranker.ts +++ b/src/main/reranker/BaseReranker.ts @@ -38,7 +38,7 @@ export default abstract class BaseReranker { protected getRerankRequestBody(query: string, searchResults: ExtractChunkData[]) { const provider = this.base.rerankModelProvider const documents = searchResults.map((doc) => doc.pageContent) - const topN = this.base.topN || 5 + const topN = this.base.topN || 10 if (provider === 'voyageai') { return { diff --git a/src/main/services/BackupManager.ts b/src/main/services/BackupManager.ts index ef96529903..ea8521aa16 100644 --- a/src/main/services/BackupManager.ts +++ b/src/main/services/BackupManager.ts @@ -4,8 +4,8 @@ import archiver from 'archiver' import { exec } from 'child_process' import { app } from 'electron' import Logger from 'electron-log' -import extract from 'extract-zip' import * as fs from 'fs-extra' +import StreamZip from 'node-stream-zip' import * as path from 'path' import { createClient, CreateDirectoryOptions, FileStat } from 'webdav' @@ -231,15 +231,10 @@ class BackupManager { Logger.log('[backup] step 1: unzip backup file', this.tempDir) - // 使用 extract-zip 解压 - await extract(backupPath, { - dir: this.tempDir, - onEntry: () => { - // 这里可以处理进度,但 extract-zip 不提供总条目数信息 - onProgress({ stage: 'extracting', progress: 15, total: 100 }) - } - }) - onProgress({ stage: 'extracting', progress: 25, total: 100 }) + const zip = new StreamZip.async({ file: backupPath }) + onProgress({ stage: 'extracting', progress: 15, total: 100 }) + await zip.extract(null, this.tempDir) + onProgress({ stage: 'extracted', progress: 25, total: 100 }) Logger.log('[backup] step 2: read data.json') // 读取 data.json diff --git a/src/main/services/ProxyManager.ts b/src/main/services/ProxyManager.ts index 84d3f84038..3a4aa09438 100644 --- a/src/main/services/ProxyManager.ts +++ b/src/main/services/ProxyManager.ts @@ -1,8 +1,7 @@ import { ProxyConfig as _ProxyConfig, session } from 'electron' -import { socksDispatcher } from 'fetch-socks' import { getSystemProxy } from 'os-proxy-config' import { ProxyAgent as GeneralProxyAgent } from 'proxy-agent' -import { ProxyAgent, setGlobalDispatcher } from 'undici' +// import { ProxyAgent, setGlobalDispatcher } from 'undici' type ProxyMode = 'system' | 'custom' | 'none' @@ -121,22 +120,22 @@ export class ProxyManager { return this.config.url || '' } - setGlobalProxy() { - const proxyUrl = this.config.url - if (proxyUrl) { - const [protocol, address] = proxyUrl.split('://') - const [host, port] = address.split(':') - if (!protocol.includes('socks')) { - setGlobalDispatcher(new ProxyAgent(proxyUrl)) - } else { - global[Symbol.for('undici.globalDispatcher.1')] = socksDispatcher({ - port: parseInt(port), - type: protocol === 'socks5' ? 5 : 4, - host: host - }) - } - } - } + // setGlobalProxy() { + // const proxyUrl = this.config.url + // if (proxyUrl) { + // const [protocol, address] = proxyUrl.split('://') + // const [host, port] = address.split(':') + // if (!protocol.includes('socks')) { + // setGlobalDispatcher(new ProxyAgent(proxyUrl)) + // } else { + // global[Symbol.for('undici.globalDispatcher.1')] = socksDispatcher({ + // port: parseInt(port), + // type: protocol === 'socks5' ? 5 : 4, + // host: host + // }) + // } + // } + // } } export const proxyManager = new ProxyManager() diff --git a/src/main/services/WebviewService.ts b/src/main/services/WebviewService.ts index 50da5cd1e5..7a14e65c19 100644 --- a/src/main/services/WebviewService.ts +++ b/src/main/services/WebviewService.ts @@ -6,12 +6,8 @@ import { session, shell, webContents } from 'electron' */ export function initSessionUserAgent() { const wvSession = session.fromPartition('persist:webview') - const newChromeVersion = '135.0.7049.96' const originUA = wvSession.getUserAgent() - const newUA = originUA - .replace(/CherryStudio\/\S+\s/, '') - .replace(/Electron\/\S+\s/, '') - .replace(/Chrome\/\d+\.\d+\.\d+\.\d+/, `Chrome/${newChromeVersion}`) + const newUA = originUA.replace(/CherryStudio\/\S+\s/, '').replace(/Electron\/\S+\s/, '') wvSession.setUserAgent(newUA) } diff --git a/src/main/services/WindowService.ts b/src/main/services/WindowService.ts index bac15cca07..aff511d748 100644 --- a/src/main/services/WindowService.ts +++ b/src/main/services/WindowService.ts @@ -198,10 +198,21 @@ export class WindowService { // 当按下Escape键且窗口处于全屏状态时退出全屏 if (input.key === 'Escape' && !input.alt && !input.control && !input.meta && !input.shift) { if (mainWindow.isFullScreen()) { - event.preventDefault() - mainWindow.setFullScreen(false) + // 获取 shortcuts 配置 + const shortcuts = configManager.getShortcuts() + const exitFullscreenShortcut = shortcuts.find((s) => s.key === 'exit_fullscreen') + if (exitFullscreenShortcut == undefined) { + mainWindow.setFullScreen(false) + return + } + if (exitFullscreenShortcut?.enabled) { + event.preventDefault() + mainWindow.setFullScreen(false) + return + } } } + return }) } @@ -306,7 +317,7 @@ export class WindowService { /** * 上述逻辑以下: - * win/linux: 是“开启托盘+设置关闭时最小化到托盘”的情况 + * win/linux: 是"开启托盘+设置关闭时最小化到托盘"的情况 * mac: 任何情况都会到这里,因此需要单独处理mac */ diff --git a/src/main/utils/file.ts b/src/main/utils/file.ts index 25a4ed7323..f01a6d47bf 100644 --- a/src/main/utils/file.ts +++ b/src/main/utils/file.ts @@ -81,6 +81,10 @@ export function getConfigDir() { return path.join(os.homedir(), '.cherrystudio', 'config') } +export function getCacheDir() { + return path.join(app.getPath('userData'), 'Cache') +} + export function getAppConfigDir(name: string) { return path.join(getConfigDir(), name) } diff --git a/src/main/utils/index.ts b/src/main/utils/index.ts index 4a6fde670d..a5f63fcc42 100644 --- a/src/main/utils/index.ts +++ b/src/main/utils/index.ts @@ -1,4 +1,5 @@ import fs from 'node:fs' +import fsAsync from 'node:fs/promises' import path from 'node:path' import { app } from 'electron' @@ -52,3 +53,20 @@ export function makeSureDirExists(dir: string) { fs.mkdirSync(dir, { recursive: true }) } } + +export async function calculateDirectorySize(directoryPath: string): Promise { + let totalSize = 0 + const items = await fsAsync.readdir(directoryPath) + + for (const item of items) { + const itemPath = path.join(directoryPath, item) + const stats = await fsAsync.stat(itemPath) + + if (stats.isFile()) { + totalSize += stats.size + } else if (stats.isDirectory()) { + totalSize += await calculateDirectorySize(itemPath) + } + } + return totalSize +} diff --git a/src/preload/index.ts b/src/preload/index.ts index eeea6ec3de..2a2f378fa2 100644 --- a/src/preload/index.ts +++ b/src/preload/index.ts @@ -23,11 +23,15 @@ const api = { ipcRenderer.invoke(IpcChannel.App_HandleZoomFactor, delta, reset), setAutoUpdate: (isActive: boolean) => ipcRenderer.invoke(IpcChannel.App_SetAutoUpdate, isActive), openWebsite: (url: string) => ipcRenderer.invoke(IpcChannel.Open_Website, url), + getCacheSize: () => ipcRenderer.invoke(IpcChannel.App_GetCacheSize), clearCache: () => ipcRenderer.invoke(IpcChannel.App_ClearCache), system: { getDeviceType: () => ipcRenderer.invoke(IpcChannel.System_GetDeviceType), getHostname: () => ipcRenderer.invoke(IpcChannel.System_GetHostname) }, + devTools: { + toggle: () => ipcRenderer.invoke(IpcChannel.System_ToggleDevTools) + }, zip: { compress: (text: string) => ipcRenderer.invoke(IpcChannel.Zip_Compress, text), decompress: (text: Buffer) => ipcRenderer.invoke(IpcChannel.Zip_Decompress, text) diff --git a/src/renderer/src/assets/styles/index.scss b/src/renderer/src/assets/styles/index.scss index 0662045cb9..3b5d98e941 100644 --- a/src/renderer/src/assets/styles/index.scss +++ b/src/renderer/src/assets/styles/index.scss @@ -230,6 +230,7 @@ body, display: -webkit-box !important; -webkit-line-clamp: 1; -webkit-box-orient: vertical; + line-clamp: 1; overflow: hidden; text-overflow: ellipsis; white-space: normal; diff --git a/src/renderer/src/components/Avatar/EmojiAvatar.tsx b/src/renderer/src/components/Avatar/EmojiAvatar.tsx new file mode 100644 index 0000000000..553869698a --- /dev/null +++ b/src/renderer/src/components/Avatar/EmojiAvatar.tsx @@ -0,0 +1,52 @@ +import React, { memo } from 'react' +import styled from 'styled-components' + +interface EmojiAvatarProps { + children: string + size?: number + fontSize?: number + onClick?: React.MouseEventHandler + className?: string + style?: React.CSSProperties +} + +const EmojiAvatar = ({ + ref, + children, + size = 31, + fontSize, + onClick, + className, + style +}: EmojiAvatarProps & { ref?: React.RefObject }) => ( + + {children} + +) + +EmojiAvatar.displayName = 'EmojiAvatar' + +const StyledEmojiAvatar = styled.div<{ $size: number; $fontSize: number }>` + display: flex; + align-items: center; + justify-content: center; + background-color: var(--color-background-soft); + border: 0.5px solid var(--color-border); + border-radius: 20%; + cursor: pointer; + width: ${(props) => props.$size}px; + height: ${(props) => props.$size}px; + font-size: ${(props) => props.$fontSize}px; + transition: opacity 0.3s ease; + &:hover { + opacity: 0.8; + } +` + +export default memo(EmojiAvatar) diff --git a/src/renderer/src/components/CustomCollapse.tsx b/src/renderer/src/components/CustomCollapse.tsx index c02f45c60c..2828379399 100644 --- a/src/renderer/src/components/CustomCollapse.tsx +++ b/src/renderer/src/components/CustomCollapse.tsx @@ -36,7 +36,7 @@ const CustomCollapse: FC = ({ const defaultCollapseItemStyles = { header: { - padding: '8px 16px', + padding: '3px 16px', alignItems: 'center', justifyContent: 'space-between', background: 'var(--color-background-soft)', diff --git a/src/renderer/src/components/CustomTag.tsx b/src/renderer/src/components/CustomTag.tsx index 76334ae6cb..c875ba01a4 100644 --- a/src/renderer/src/components/CustomTag.tsx +++ b/src/renderer/src/components/CustomTag.tsx @@ -1,6 +1,6 @@ import { CloseOutlined } from '@ant-design/icons' import { Tooltip } from 'antd' -import { FC, memo } from 'react' +import { FC, memo, useMemo } from 'react' import styled from 'styled-components' interface CustomTagProps { @@ -14,13 +14,22 @@ interface CustomTagProps { } const CustomTag: FC = ({ children, icon, color, size = 12, tooltip, closable = false, onClose }) => { - return ( - + const tagContent = useMemo( + () => ( {icon && icon} {children} {closable && } + ), + [children, closable, color, icon, onClose, size] + ) + + return tooltip ? ( + + {tagContent} + ) : ( + tagContent ) } diff --git a/src/renderer/src/components/ModelTagsWithLabel.tsx b/src/renderer/src/components/ModelTagsWithLabel.tsx index 9e5feb45b0..86a04dd454 100644 --- a/src/renderer/src/components/ModelTagsWithLabel.tsx +++ b/src/renderer/src/components/ModelTagsWithLabel.tsx @@ -23,6 +23,7 @@ interface ModelTagsProps { showToolsCalling?: boolean size?: number showLabel?: boolean + showTooltip?: boolean style?: React.CSSProperties } @@ -33,6 +34,7 @@ const ModelTagsWithLabel: FC = ({ showToolsCalling = true, size = 12, showLabel = true, + showTooltip = true, style }) => { const { t } = useTranslation() @@ -73,7 +75,7 @@ const ModelTagsWithLabel: FC = ({ size={size} color="#00b96b" icon={} - tooltip={t('models.type.vision')}> + tooltip={showTooltip ? t('models.type.vision') : undefined}> {shouldShowLabel ? t('models.type.vision') : ''} )} @@ -82,7 +84,7 @@ const ModelTagsWithLabel: FC = ({ size={size} color="#1677ff" icon={} - tooltip={t('models.type.websearch')}> + tooltip={showTooltip ? t('models.type.websearch') : undefined}> {shouldShowLabel ? t('models.type.websearch') : ''} )} @@ -91,7 +93,7 @@ const ModelTagsWithLabel: FC = ({ size={size} color="#6372bd" icon={} - tooltip={t('models.type.reasoning')}> + tooltip={showTooltip ? t('models.type.reasoning') : undefined}> {shouldShowLabel ? t('models.type.reasoning') : ''} )} @@ -100,19 +102,13 @@ const ModelTagsWithLabel: FC = ({ size={size} color="#f18737" icon={} - tooltip={t('models.type.function_calling')}> + tooltip={showTooltip ? t('models.type.function_calling') : undefined}> {shouldShowLabel ? t('models.type.function_calling') : ''} )} - {isEmbeddingModel(model) && ( - - )} - {showFree && isFreeModel(model) && ( - - )} - {isRerankModel(model) && ( - - )} + {isEmbeddingModel(model) && } + {showFree && isFreeModel(model) && } + {isRerankModel(model) && } ) } diff --git a/src/renderer/src/components/Popups/SelectModelPopup/hook.ts b/src/renderer/src/components/Popups/SelectModelPopup/hook.ts index 93441acb21..4a8206df69 100644 --- a/src/renderer/src/components/Popups/SelectModelPopup/hook.ts +++ b/src/renderer/src/components/Popups/SelectModelPopup/hook.ts @@ -21,9 +21,8 @@ export function useScrollState() { focusPage: (modelItems: FlatListItem[], currentIndex: number, step: number) => dispatch({ type: 'FOCUS_PAGE', payload: { modelItems, currentIndex, step } }), searchChanged: (searchText: string) => dispatch({ type: 'SEARCH_CHANGED', payload: { searchText } }), - updateOnListChange: (modelItems: FlatListItem[]) => - dispatch({ type: 'UPDATE_ON_LIST_CHANGE', payload: { modelItems } }), - initScroll: () => dispatch({ type: 'INIT_SCROLL' }) + focusOnListChange: (modelItems: FlatListItem[]) => + dispatch({ type: 'FOCUS_ON_LIST_CHANGE', payload: { modelItems } }) }), [] ) diff --git a/src/renderer/src/components/Popups/SelectModelPopup/popup.tsx b/src/renderer/src/components/Popups/SelectModelPopup/popup.tsx index 4a07809a68..a02c63bcb2 100644 --- a/src/renderer/src/components/Popups/SelectModelPopup/popup.tsx +++ b/src/renderer/src/components/Popups/SelectModelPopup/popup.tsx @@ -11,7 +11,16 @@ import { classNames } from '@renderer/utils/style' import { Avatar, Divider, Empty, Input, InputRef, Modal } from 'antd' import { first, sortBy } from 'lodash' import { Search } from 'lucide-react' -import { useCallback, useDeferredValue, useEffect, useMemo, useRef, useState } from 'react' +import { + startTransition, + useCallback, + useDeferredValue, + useEffect, + useLayoutEffect, + useMemo, + useRef, + useState +} from 'react' import React from 'react' import { useTranslation } from 'react-i18next' import { FixedSizeList } from 'react-window' @@ -34,7 +43,7 @@ interface Props extends PopupParams { const PopupContainer: React.FC = ({ model, resolve }) => { const { t } = useTranslation() const { providers } = useProviders() - const { pinnedModels, togglePinnedModel, loading: loadingPinnedModels } = usePinnedModels() + const { pinnedModels, togglePinnedModel, loading } = usePinnedModels() const [open, setOpen] = useState(true) const inputRef = useRef(null) const listRef = useRef(null) @@ -49,29 +58,40 @@ const PopupContainer: React.FC = ({ model, resolve }) => { focusedItemKey, scrollTrigger, lastScrollOffset, - stickyGroup: _stickyGroup, + stickyGroup, isMouseOver, - setFocusedItemKey, + setFocusedItemKey: _setFocusedItemKey, setScrollTrigger, - setLastScrollOffset, - setStickyGroup, + setLastScrollOffset: _setLastScrollOffset, + setStickyGroup: _setStickyGroup, setIsMouseOver, focusNextItem, focusPage, searchChanged, - updateOnListChange, - initScroll + focusOnListChange } = useScrollState() - const stickyGroup = useDeferredValue(_stickyGroup) const firstGroupRef = useRef(null) - const togglePin = useCallback( - async (modelId: string) => { - await togglePinnedModel(modelId) - setScrollTrigger('none') // pin操作不触发滚动 + const setFocusedItemKey = useCallback( + (key: string) => { + startTransition(() => _setFocusedItemKey(key)) }, - [togglePinnedModel, setScrollTrigger] + [_setFocusedItemKey] + ) + + const setLastScrollOffset = useCallback( + (offset: number) => { + startTransition(() => _setLastScrollOffset(offset)) + }, + [_setLastScrollOffset] + ) + + const setStickyGroup = useCallback( + (group: FlatListItem | null) => { + startTransition(() => _setStickyGroup(group)) + }, + [_setStickyGroup] ) // 根据输入的文本筛选模型 @@ -89,14 +109,11 @@ const PopupContainer: React.FC = ({ model, resolve }) => { const lowerFullName = fullName.toLowerCase() return keywords.every((keyword) => lowerFullName.includes(keyword)) }) - } else { - // 如果不是搜索状态,过滤掉已固定的模型 - models = models.filter((m) => !pinnedModels.includes(getModelUniqId(m))) } return sortBy(models, ['group', 'name']) }, - [searchText, t, pinnedModels] + [searchText, t] ) // 创建模型列表项 @@ -116,7 +133,7 @@ const PopupContainer: React.FC = ({ model, resolve }) => { ), tags: ( - + ), icon: ( @@ -137,7 +154,7 @@ const PopupContainer: React.FC = ({ model, resolve }) => { const items: FlatListItem[] = [] // 添加置顶模型分组(仅在无搜索文本时) - if (pinnedModels.length > 0 && searchText.length === 0) { + if (searchText.length === 0 && pinnedModels.length > 0) { const pinnedItems = providers.flatMap((p) => p.models.filter((m) => pinnedModels.includes(getModelUniqId(m))).map((m) => createModelItem(m, p, true)) ) @@ -158,7 +175,7 @@ const PopupContainer: React.FC = ({ model, resolve }) => { // 添加常规模型分组 providers.forEach((p) => { const filteredModels = getFilteredModels(p).filter( - (m) => !pinnedModels.includes(getModelUniqId(m)) || searchText.length > 0 + (m) => searchText.length > 0 || !pinnedModels.includes(getModelUniqId(m)) ) if (filteredModels.length === 0) return @@ -198,48 +215,53 @@ const PopupContainer: React.FC = ({ model, resolve }) => { const updateStickyGroup = useCallback( (scrollOffset?: number) => { if (listItems.length === 0) { - setStickyGroup(null) + stickyGroup && setStickyGroup(null) return } + let newStickyGroup: FlatListItem | null = null + // 基于滚动位置计算当前可见的第一个项的索引 const estimatedIndex = Math.floor((scrollOffset ?? lastScrollOffset) / ITEM_HEIGHT) // 从该索引向前查找最近的分组标题 for (let i = estimatedIndex - 1; i >= 0; i--) { if (i < listItems.length && listItems[i]?.type === 'group') { - setStickyGroup(listItems[i]) - return + newStickyGroup = listItems[i] + break } } // 找不到则使用第一个分组标题 - setStickyGroup(firstGroupRef.current) - }, - [listItems, lastScrollOffset, setStickyGroup] - ) + if (!newStickyGroup) newStickyGroup = firstGroupRef.current - // 在listItems变化时更新sticky group - useEffect(() => { - updateStickyGroup() - }, [listItems, updateStickyGroup]) + if (stickyGroup?.key !== newStickyGroup?.key) { + setStickyGroup(newStickyGroup) + } + }, + [listItems, lastScrollOffset, setStickyGroup, stickyGroup] + ) // 处理列表滚动事件,更新lastScrollOffset并更新sticky分组 const handleScroll = useCallback( ({ scrollOffset }) => { setLastScrollOffset(scrollOffset) - updateStickyGroup(scrollOffset) }, - [updateStickyGroup, setLastScrollOffset] + [setLastScrollOffset] ) - // 在列表项更新时,更新焦点项 + // 列表项更新时,更新焦点 useEffect(() => { - updateOnListChange(modelItems) - }, [modelItems, updateOnListChange]) + if (!loading) focusOnListChange(modelItems) + }, [modelItems, focusOnListChange, loading]) + + // 列表项更新时,更新sticky分组 + useEffect(() => { + if (!loading) updateStickyGroup() + }, [modelItems, updateStickyGroup, loading]) // 滚动到聚焦项 - useEffect(() => { + useLayoutEffect(() => { if (scrollTrigger === 'none' || !focusedItemKey) return const index = listItems.findIndex((item) => item.key === focusedItemKey) @@ -301,23 +323,12 @@ const PopupContainer: React.FC = ({ model, resolve }) => { break case 'Escape': e.preventDefault() - setScrollTrigger('none') setOpen(false) resolve(undefined) break } }, - [ - focusedItemKey, - modelItems, - handleItemClick, - open, - resolve, - setIsMouseOver, - focusNextItem, - focusPage, - setScrollTrigger - ] + [focusedItemKey, modelItems, handleItemClick, open, resolve, setIsMouseOver, focusNextItem, focusPage] ) useEffect(() => { @@ -326,11 +337,10 @@ const PopupContainer: React.FC = ({ model, resolve }) => { }, [handleKeyDown]) const onCancel = useCallback(() => { - setScrollTrigger('initial') setOpen(false) - }, [setScrollTrigger]) + }, []) - const onClose = useCallback(async () => { + const onAfterClose = useCallback(async () => { setScrollTrigger('initial') resolve(undefined) SelectModelPopup.hide() @@ -338,10 +348,16 @@ const PopupContainer: React.FC = ({ model, resolve }) => { // 初始化焦点和滚动位置 useEffect(() => { - if (!open || loadingPinnedModels) return + if (!open) return setTimeout(() => inputRef.current?.focus(), 0) - initScroll() - }, [open, initScroll, loadingPinnedModels]) + }, [open]) + + const togglePin = useCallback( + async (modelId: string) => { + await togglePinnedModel(modelId) + }, + [togglePinnedModel] + ) const RowData = useMemo( (): VirtualizedRowData => ({ @@ -364,7 +380,7 @@ const PopupContainer: React.FC = ({ model, resolve }) => { centered open={open} onCancel={onCancel} - afterClose={onClose} + afterClose={onAfterClose} width={600} transitionName="animation-move-down" styles={{ @@ -407,7 +423,7 @@ const PopupContainer: React.FC = ({ model, resolve }) => { {listItems.length > 0 ? ( - !isMouseOver && setIsMouseOver(true)}> + !isMouseOver && startTransition(() => setIsMouseOver(true))}> {/* Sticky Group Banner,它会替换第一个分组名称 */} {stickyGroup?.name} } + const isFocused = item.key === focusedItemKey + return (
{item.type === 'group' ? ( @@ -462,11 +480,11 @@ const VirtualizedRow = React.memo( ) : ( handleItemClick(item)} - onMouseEnter={() => setFocusedItemKey(item.key)}> + onMouseOver={() => !isFocused && setFocusedItemKey(item.key)}> {item.icon} {item.name} diff --git a/src/renderer/src/components/Popups/SelectModelPopup/reducer.ts b/src/renderer/src/components/Popups/SelectModelPopup/reducer.ts index 45e3390ea8..974fc5b509 100644 --- a/src/renderer/src/components/Popups/SelectModelPopup/reducer.ts +++ b/src/renderer/src/components/Popups/SelectModelPopup/reducer.ts @@ -72,7 +72,7 @@ export const scrollReducer = (state: ScrollState, action: ScrollAction): ScrollS scrollTrigger: action.payload.searchText ? 'search' : 'initial' } - case 'UPDATE_ON_LIST_CHANGE': { + case 'FOCUS_ON_LIST_CHANGE': { const { modelItems } = action.payload // 在列表变化时尝试聚焦一个模型: @@ -96,13 +96,6 @@ export const scrollReducer = (state: ScrollState, action: ScrollAction): ScrollS } } - case 'INIT_SCROLL': - return { - ...state, - scrollTrigger: 'initial', - lastScrollOffset: 0 - } - default: return state } diff --git a/src/renderer/src/components/Popups/SelectModelPopup/types.ts b/src/renderer/src/components/Popups/SelectModelPopup/types.ts index 41ec04c583..745e9688bb 100644 --- a/src/renderer/src/components/Popups/SelectModelPopup/types.ts +++ b/src/renderer/src/components/Popups/SelectModelPopup/types.ts @@ -38,5 +38,4 @@ export type ScrollAction = | { type: 'FOCUS_NEXT_ITEM'; payload: { modelItems: FlatListItem[]; step: number } } | { type: 'FOCUS_PAGE'; payload: { modelItems: FlatListItem[]; currentIndex: number; step: number } } | { type: 'SEARCH_CHANGED'; payload: { searchText: string } } - | { type: 'UPDATE_ON_LIST_CHANGE'; payload: { modelItems: FlatListItem[] } } - | { type: 'INIT_SCROLL'; payload?: void } + | { type: 'FOCUS_ON_LIST_CHANGE'; payload: { modelItems: FlatListItem[] } } diff --git a/src/renderer/src/components/Popups/UserPopup.tsx b/src/renderer/src/components/Popups/UserPopup.tsx index 9d7569effa..ac4b9eca93 100644 --- a/src/renderer/src/components/Popups/UserPopup.tsx +++ b/src/renderer/src/components/Popups/UserPopup.tsx @@ -1,4 +1,5 @@ import DefaultAvatar from '@renderer/assets/images/avatar.png' +import EmojiAvatar from '@renderer/components/Avatar/EmojiAvatar' import useAvatar from '@renderer/hooks/useAvatar' import { useSettings } from '@renderer/hooks/useSettings' import ImageStorage from '@renderer/services/ImageStorage' @@ -154,7 +155,13 @@ const PopupContainer: React.FC = ({ resolve }) => { } }} placement="bottom"> - {isEmoji(avatar) ? {avatar} : } + {isEmoji(avatar) ? ( + + {avatar} + + ) : ( + + )} @@ -182,23 +189,6 @@ const UserAvatar = styled(Avatar)` } ` -const EmojiAvatar = styled.div` - cursor: pointer; - width: 80px; - height: 80px; - border-radius: 20%; - background-color: var(--color-background-soft); - display: flex; - align-items: center; - justify-content: center; - font-size: 40px; - transition: opacity 0.3s ease; - border: 0.5px solid var(--color-border); - &:hover { - opacity: 0.8; - } -` - export default class UserPopup { static topviewId = 0 static hide() { diff --git a/src/renderer/src/components/QuickPanel/types.ts b/src/renderer/src/components/QuickPanel/types.ts index e122aa1d29..7cef05be23 100644 --- a/src/renderer/src/components/QuickPanel/types.ts +++ b/src/renderer/src/components/QuickPanel/types.ts @@ -64,3 +64,5 @@ export interface QuickPanelContextType { readonly beforeAction?: (Options: QuickPanelCallBackOptions) => void readonly afterAction?: (Options: QuickPanelCallBackOptions) => void } + +export type QuickPanelScrollTrigger = 'initial' | 'keyboard' | 'none' diff --git a/src/renderer/src/components/QuickPanel/view.tsx b/src/renderer/src/components/QuickPanel/view.tsx index 2bd1b14349..1602b6a4ac 100644 --- a/src/renderer/src/components/QuickPanel/view.tsx +++ b/src/renderer/src/components/QuickPanel/view.tsx @@ -6,13 +6,19 @@ import { theme } from 'antd' import Color from 'color' import { t } from 'i18next' import { Check } from 'lucide-react' -import React, { use, useCallback, useDeferredValue, useEffect, useMemo, useRef, useState } from 'react' +import React, { use, useCallback, useDeferredValue, useEffect, useLayoutEffect, useMemo, useRef, useState } from 'react' import { FixedSizeList } from 'react-window' import styled from 'styled-components' import * as tinyPinyin from 'tiny-pinyin' import { QuickPanelContext } from './provider' -import { QuickPanelCallBackOptions, QuickPanelCloseAction, QuickPanelListItem, QuickPanelOpenOptions } from './types' +import { + QuickPanelCallBackOptions, + QuickPanelCloseAction, + QuickPanelListItem, + QuickPanelOpenOptions, + QuickPanelScrollTrigger +} from './types' const ITEM_HEIGHT = 31 @@ -45,6 +51,7 @@ export const QuickPanelView: React.FC = ({ setInputText }) => { // 避免上下翻页时,鼠标干扰 const [isMouseOver, setIsMouseOver] = useState(false) + const scrollTriggerRef = useRef('initial') const [_index, setIndex] = useState(ctx.defaultIndex) const index = useDeferredValue(_index) const [historyPanel, setHistoryPanel] = useState([]) @@ -140,6 +147,7 @@ export const QuickPanelView: React.FC = ({ setInputText }) => { (action?: QuickPanelCloseAction) => { ctx.close(action) setHistoryPanel([]) + scrollTriggerRef.current = 'initial' if (action === 'delete-symbol') { const textArea = document.querySelector('.inputbar textarea') as HTMLTextAreaElement @@ -249,10 +257,13 @@ export const QuickPanelView: React.FC = ({ setInputText }) => { // eslint-disable-next-line react-hooks/exhaustive-deps }, [ctx.isVisible]) - useEffect(() => { - if (index >= 0) { - listRef.current?.scrollToItem(index, 'auto') - } + useLayoutEffect(() => { + if (!listRef.current || index < 0 || scrollTriggerRef.current === 'none') return + + const alignment = scrollTriggerRef.current === 'keyboard' ? 'auto' : 'smart' + listRef.current?.scrollToItem(index, alignment) + + scrollTriggerRef.current = 'none' }, [index]) // 处理键盘事件 @@ -277,6 +288,7 @@ export const QuickPanelView: React.FC = ({ setInputText }) => { switch (e.key) { case 'ArrowUp': + scrollTriggerRef.current = 'keyboard' if (isAssistiveKeyPressed) { setIndex((prev) => { const newIndex = prev - ctx.pageSize @@ -289,6 +301,7 @@ export const QuickPanelView: React.FC = ({ setInputText }) => { break case 'ArrowDown': + scrollTriggerRef.current = 'keyboard' if (isAssistiveKeyPressed) { setIndex((prev) => { const newIndex = prev + ctx.pageSize @@ -301,6 +314,7 @@ export const QuickPanelView: React.FC = ({ setInputText }) => { break case 'PageUp': + scrollTriggerRef.current = 'keyboard' setIndex((prev) => { const newIndex = prev - ctx.pageSize return newIndex < 0 ? 0 : newIndex @@ -308,6 +322,7 @@ export const QuickPanelView: React.FC = ({ setInputText }) => { break case 'PageDown': + scrollTriggerRef.current = 'keyboard' setIndex((prev) => { const newIndex = prev + ctx.pageSize return newIndex >= list.length ? list.length - 1 : newIndex @@ -317,6 +332,7 @@ export const QuickPanelView: React.FC = ({ setInputText }) => { case 'ArrowLeft': if (!isAssistiveKeyPressed) return if (!historyPanel.length) return + scrollTriggerRef.current = 'initial' clearSearchText(false) if (historyPanel.length > 0) { const lastPanel = historyPanel.pop() @@ -329,6 +345,7 @@ export const QuickPanelView: React.FC = ({ setInputText }) => { case 'ArrowRight': if (!isAssistiveKeyPressed) return if (!list?.[index]?.isMenu) return + scrollTriggerRef.current = 'initial' clearSearchText(false) handleItemAction(list[index], 'enter') break @@ -413,7 +430,14 @@ export const QuickPanelView: React.FC = ({ setInputText }) => { $selectedColor={selectedColor} $selectedColorHover={selectedColorHover} className={ctx.isVisible ? 'visible' : ''}> - setIsMouseOver(true)}> + + setIsMouseOver((prev) => { + scrollTriggerRef.current = 'initial' + return prev ? prev : true + }) + }> { return ( {isEmoji(avatar) ? ( - {avatar} + + {avatar} + ) : ( )} @@ -319,6 +322,12 @@ const Container = styled.div` height: ${isMac ? 'calc(100vh - var(--navbar-height))' : '100vh'}; -webkit-app-region: drag !important; margin-top: ${isMac ? 'var(--navbar-height)' : 0}; + + .sidebar-avatar { + margin-bottom: ${isMac ? '12px' : '12px'}; + margin-top: ${isMac ? '0px' : '2px'}; + -webkit-app-region: none; + } ` const AvatarImg = styled(Avatar)` @@ -331,23 +340,6 @@ const AvatarImg = styled(Avatar)` cursor: pointer; ` -const EmojiAvatar = styled.div` - width: 31px; - height: 31px; - background-color: var(--color-background-soft); - margin-bottom: ${isMac ? '12px' : '12px'}; - margin-top: ${isMac ? '0px' : '2px'}; - border-radius: 20%; - display: flex; - align-items: center; - justify-content: center; - font-size: 16px; - cursor: pointer; - -webkit-app-region: none; - border: 0.5px solid var(--color-border); - font-size: 20px; -` - const MainMenusContainer = styled.div` display: flex; flex: 1; diff --git a/src/renderer/src/config/models.ts b/src/renderer/src/config/models.ts index 5fe9b44525..be66317d88 100644 --- a/src/renderer/src/config/models.ts +++ b/src/renderer/src/config/models.ts @@ -2386,6 +2386,18 @@ export function isSupportedModel(model: OpenAI.Models.Model): boolean { return !NOT_SUPPORTED_REGEX.test(model.id) } +export function isNotSupportTemperatureAndTopP(model: Model): boolean { + if (!model) { + return true + } + + if (isOpenAIReasoningModel(model) || isOpenAIWebSearch(model)) { + return true + } + + return false +} + export function isWebSearchModel(model: Model): boolean { if (!model) { return false diff --git a/src/renderer/src/i18n/locales/en-us.json b/src/renderer/src/i18n/locales/en-us.json index 631f92bc11..12dcef76ce 100644 --- a/src/renderer/src/i18n/locales/en-us.json +++ b/src/renderer/src/i18n/locales/en-us.json @@ -44,6 +44,9 @@ "my_agents": "My Agents", "search.no_results": "No results found", "sorting.title": "Sorting", + "settings": { + "title": "Agent Setting" + }, "tag.agent": "Agent", "tag.default": "Default", "tag.new": "New", @@ -85,6 +88,9 @@ "settings.knowledge_base.recognition": "Use Knowledge Base", "settings.knowledge_base.recognition.off": "Force Search", "settings.knowledge_base.recognition.on": "Intent Recognition", + "settings.tool_use_mode": "Tool Use Mode", + "settings.tool_use_mode.function": "Function", + "settings.tool_use_mode.prompt": "Prompt", "settings.regular_phrases": { "title": "Regular Phrase", "add": "Add Phrase", @@ -205,9 +211,9 @@ "settings.context_count": "Context", "settings.context_count.tip": "The number of previous messages to keep in the context.", "settings.max": "Max", - "settings.max_tokens": "Enable max tokens limit", - "settings.max_tokens.confirm": "Enable max tokens limit", - "settings.max_tokens.confirm_content": "Enable max tokens limit, affects the length of the result. Need to consider the context limit of the model, otherwise an error will be reported", + "settings.max_tokens": "Set max tokens", + "settings.max_tokens.confirm": "Set max tokens", + "settings.max_tokens.confirm_content": "Set the maximum number of tokens the model can generate. Need to consider the context limit of the model, otherwise an error will be reported", "settings.max_tokens.tip": "The maximum number of tokens the model can generate. Need to consider the context limit of the model, otherwise an error will be reported", "settings.reset": "Reset", "settings.set_as_default": "Apply to default assistant", @@ -905,6 +911,8 @@ "about.checkUpdate.available": "Update", "about.contact.button": "Email", "about.contact.title": "Contact", + "about.debug.title": "Debug", + "about.debug.open": "Open", "about.description": "A powerful AI assistant for producer", "about.downloading": "Downloading...", "about.feedback.button": "Feedback", @@ -1524,6 +1532,7 @@ "clear_shortcut": "Clear Shortcut", "clear_topic": "Clear Messages", "copy_last_message": "Copy Last Message", + "exit_fullscreen": "Exit Fullscreen", "key": "Key", "mini_window": "Quick Assistant", "new_topic": "New Topic", diff --git a/src/renderer/src/i18n/locales/ja-jp.json b/src/renderer/src/i18n/locales/ja-jp.json index a403b3292c..4c743a4e34 100644 --- a/src/renderer/src/i18n/locales/ja-jp.json +++ b/src/renderer/src/i18n/locales/ja-jp.json @@ -48,7 +48,10 @@ "tag.default": "デフォルト", "tag.new": "新規", "tag.system": "システム", - "title": "エージェント" + "title": "エージェント", + "settings": { + "title": "エージェント設定" + } }, "assistants": { "title": "アシスタント", @@ -95,7 +98,10 @@ "settings.knowledge_base.recognition.tip": "アシスタントは大規模言語モデルの意図認識能力を使用して、ナレッジベースを参照する必要があるかどうかを判断します。この機能はモデルの能力に依存します", "settings.knowledge_base.recognition": "ナレッジベースの呼び出し", "settings.knowledge_base.recognition.off": "強制検索", - "settings.knowledge_base.recognition.on": "意図認識" + "settings.knowledge_base.recognition.on": "意図認識", + "settings.tool_use_mode": "工具調用方式", + "settings.tool_use_mode.function": "関数", + "settings.tool_use_mode.prompt": "提示詞" }, "auth": { "error": "APIキーの自動取得に失敗しました。手動で取得してください", @@ -205,9 +211,9 @@ "settings.context_count": "コンテキスト", "settings.context_count.tip": "コンテキストに保持する以前のメッセージの数", "settings.max": "最大", - "settings.max_tokens": "最大トークン制限を有効にする", - "settings.max_tokens.confirm": "最大トークン制限を有効にする", - "settings.max_tokens.confirm_content": "最大トークン制限を有効にすると、モデルが生成できる最大トークン数が制限されます。これにより、返される結果の長さに影響が出る可能性があります。モデルのコンテキスト制限に基づいて設定する必要があります。そうしないとエラーが発生します", + "settings.max_tokens": "最大トークン数", + "settings.max_tokens.confirm": "最大トークン数", + "settings.max_tokens.confirm_content": "最大トークン数を設定すると、モデルが生成できる最大トークン数が制限されます。これにより、返される結果の長さに影響が出る可能性があります。モデルのコンテキスト制限に基づいて設定する必要があります。そうしないとエラーが発生します", "settings.max_tokens.tip": "モデルが生成できる最大トークン数。モデルのコンテキスト制限に基づいて設定する必要があります。そうしないとエラーが発生します", "settings.reset": "リセット", "settings.set_as_default": "デフォルトのアシスタントに適用", @@ -1522,6 +1528,7 @@ "clear_shortcut": "ショートカットをクリア", "clear_topic": "メッセージを消去", "copy_last_message": "最後のメッセージをコピー", + "exit_fullscreen": "フルスクリーンを終了", "key": "キー", "mini_window": "クイックアシスタント", "new_topic": "新しいトピック", diff --git a/src/renderer/src/i18n/locales/ru-ru.json b/src/renderer/src/i18n/locales/ru-ru.json index d0920fd2cf..30332b88a3 100644 --- a/src/renderer/src/i18n/locales/ru-ru.json +++ b/src/renderer/src/i18n/locales/ru-ru.json @@ -48,6 +48,9 @@ }, "export": { "agent": "Экспорт агента" + }, + "settings": { + "title": "Настройки агента" } }, "assistants": { @@ -85,6 +88,9 @@ "settings.knowledge_base.recognition": "Использование базы знаний", "settings.knowledge_base.recognition.off": "Принудительный поиск", "settings.knowledge_base.recognition.on": "Распознавание намерений", + "settings.tool_use_mode": "Режим использования инструментов", + "settings.tool_use_mode.function": "Функция", + "settings.tool_use_mode.prompt": "Подсказка", "settings.regular_phrases": { "title": "Регулярные подсказки", "add": "Добавить подсказку", @@ -205,9 +211,9 @@ "settings.context_count": "Контекст", "settings.context_count.tip": "Количество предыдущих сообщений, которые нужно сохранить в контексте.", "settings.max": "Максимум", - "settings.max_tokens": "Включить лимит максимальных токенов", - "settings.max_tokens.confirm": "Включить лимит максимальных токенов", - "settings.max_tokens.confirm_content": "Включить лимит максимальных токенов, влияет на длину результата. Нужно учитывать контекст модели, иначе будет ошибка", + "settings.max_tokens": "Максимальное количество токенов", + "settings.max_tokens.confirm": "Максимальное количество токенов", + "settings.max_tokens.confirm_content": "Установить максимальное количество токенов, влияет на длину результата. Нужно учитывать контекст модели, иначе будет ошибка", "settings.max_tokens.tip": "Максимальное количество токенов, которые может сгенерировать модель. Нужно учитывать контекст модели, иначе будет ошибка", "settings.reset": "Сбросить", "settings.set_as_default": "Применить к ассистенту по умолчанию", @@ -1522,6 +1528,7 @@ "clear_shortcut": "Очистить сочетание клавиш", "clear_topic": "Очистить все сообщения", "copy_last_message": "Копировать последнее сообщение", + "exit_fullscreen": "Выйти из полноэкранного режима", "key": "Клавиша", "mini_window": "Быстрый помощник", "new_topic": "Новый топик", diff --git a/src/renderer/src/i18n/locales/zh-cn.json b/src/renderer/src/i18n/locales/zh-cn.json index 6ffb9620c1..df40aacdbe 100644 --- a/src/renderer/src/i18n/locales/zh-cn.json +++ b/src/renderer/src/i18n/locales/zh-cn.json @@ -48,7 +48,10 @@ "tag.default": "默认", "tag.new": "新建", "tag.system": "系统", - "title": "智能体" + "title": "智能体", + "settings": { + "title": "智能体配置" + } }, "assistants": { "title": "助手", @@ -75,6 +78,9 @@ "settings.knowledge_base.recognition": "调用知识库", "settings.knowledge_base.recognition.off": "强制检索", "settings.knowledge_base.recognition.on": "意图识别", + "settings.tool_use_mode": "工具调用方式", + "settings.tool_use_mode.function": "函数", + "settings.tool_use_mode.prompt": "提示词", "settings.model": "模型设置", "settings.preset_messages": "预设消息", "settings.prompt": "提示词设置", @@ -219,9 +225,9 @@ "settings.context_count": "上下文数", "settings.context_count.tip": "要保留在上下文中的消息数量,数值越大,上下文越长,消耗的 token 越多。普通聊天建议 5-10", "settings.max": "不限", - "settings.max_tokens": "开启消息长度限制", - "settings.max_tokens.confirm": "开启消息长度限制", - "settings.max_tokens.confirm_content": "开启消息长度限制后,单次交互所用的最大 Token 数, 会影响返回结果的长度。要根据模型上下文限制来设置,否则会报错", + "settings.max_tokens": "最大 Token 数", + "settings.max_tokens.confirm": "最大 Token 数", + "settings.max_tokens.confirm_content": "设置单次交互所用的最大 Token 数, 会影响返回结果的长度。要根据模型上下文限制来设置,否则会报错", "settings.max_tokens.tip": "单次交互所用的最大 Token 数, 会影响返回结果的长度。要根据模型上下文限制来设置,否则会报错", "settings.reset": "重置", "settings.set_as_default": "应用到默认助手", @@ -905,6 +911,8 @@ "about.checkUpdate.available": "立即更新", "about.contact.button": "邮件", "about.contact.title": "邮件联系", + "about.debug.title": "调试面板", + "about.debug.open": "打开", "about.description": "一款为创造者而生的 AI 助手", "about.downloading": "正在下载更新...", "about.feedback.button": "反馈", @@ -1524,6 +1532,7 @@ "clear_shortcut": "清除快捷键", "clear_topic": "清空消息", "copy_last_message": "复制上一条消息", + "exit_fullscreen": "退出全屏", "key": "按键", "mini_window": "快捷助手", "new_topic": "新建话题", diff --git a/src/renderer/src/i18n/locales/zh-tw.json b/src/renderer/src/i18n/locales/zh-tw.json index 17667120e0..de3c0a9593 100644 --- a/src/renderer/src/i18n/locales/zh-tw.json +++ b/src/renderer/src/i18n/locales/zh-tw.json @@ -48,7 +48,10 @@ "tag.default": "預設", "tag.new": "新增", "tag.system": "系統", - "title": "智慧代理人" + "title": "智慧代理人", + "settings": { + "title": "智慧代理人設定" + } }, "assistants": { "title": "助手", @@ -95,7 +98,10 @@ "settings.knowledge_base.recognition.tip": "智慧代理人將調用大語言模型的意圖識別能力,判斷是否需要調用知識庫進行回答,該功能將依賴模型的能力", "settings.knowledge_base.recognition": "調用知識庫", "settings.knowledge_base.recognition.off": "強制檢索", - "settings.knowledge_base.recognition.on": "意圖識別" + "settings.knowledge_base.recognition.on": "意圖識別", + "settings.tool_use_mode": "工具調用方式", + "settings.tool_use_mode.function": "函數", + "settings.tool_use_mode.prompt": "提示詞" }, "auth": { "error": "自動取得金鑰失敗,請手動取得", @@ -205,9 +211,9 @@ "settings.context_count": "上下文", "settings.context_count.tip": "在上下文中保留的前幾則訊息。", "settings.max": "最大", - "settings.max_tokens": "啟用最大 Token 限制", - "settings.max_tokens.confirm": "啟用訊息長度限制", - "settings.max_tokens.confirm_content": "啟用訊息長度限制後,單次互動所用的最大 Token 數,會影響返回結果的長度。要根據模型上下文限制來設定,否則會發生錯誤。", + "settings.max_tokens": "最大 Token 數", + "settings.max_tokens.confirm": "設置最大 Token 數", + "settings.max_tokens.confirm_content": "設置單次交互所用的最大 Token 數,會影響返回結果的長度。要根據模型上下文限制來設定,否則會發生錯誤。", "settings.max_tokens.tip": "模型可以生成的最大 Token 數。要根據模型上下文限制來設定,否則會發生錯誤。", "settings.reset": "重設", "settings.set_as_default": "設為預設助手", @@ -905,6 +911,8 @@ "about.checkUpdate.available": "立即更新", "about.contact.button": "電子郵件", "about.contact.title": "聯絡方式", + "about.debug.title": "調試面板", + "about.debug.open": "開啟", "about.description": "一款為創作者而生的強大 AI 助手", "about.downloading": "正在下載...", "about.feedback.button": "回饋", diff --git a/src/renderer/src/pages/agents/components/AddAgentPopup.tsx b/src/renderer/src/pages/agents/components/AddAgentPopup.tsx index eeca7a39c8..fc341e970e 100644 --- a/src/renderer/src/pages/agents/components/AddAgentPopup.tsx +++ b/src/renderer/src/pages/agents/components/AddAgentPopup.tsx @@ -1,6 +1,6 @@ import 'emoji-picker-element' -import { CheckOutlined, LoadingOutlined, ThunderboltOutlined, RollbackOutlined } from '@ant-design/icons' +import { CheckOutlined, LoadingOutlined, RollbackOutlined, ThunderboltOutlined } from '@ant-design/icons' import EmojiPicker from '@renderer/components/EmojiPicker' import { TopView } from '@renderer/components/TopView' import { AGENT_PROMPT } from '@renderer/config/prompts' @@ -132,8 +132,8 @@ const PopupContainer: React.FC = ({ resolve }) => { } const handleUndoButtonClick = async () => { - form.setFieldsValue({ prompt: originalPrompt }) - setShowUndoButton(false) + form.setFieldsValue({ prompt: originalPrompt }) + setShowUndoButton(false) } // Compute label width based on the longest label @@ -191,11 +191,13 @@ const PopupContainer: React.FC = ({ resolve }) => { style={{ position: 'absolute', top: 8, right: 8 }} disabled={loading} /> - {showUndoButton &&
{showKnowledgeIcon && ( diff --git a/src/renderer/src/pages/agents/index.ts b/src/renderer/src/pages/agents/index.ts index cf07d1df95..3bc31bd1be 100644 --- a/src/renderer/src/pages/agents/index.ts +++ b/src/renderer/src/pages/agents/index.ts @@ -1,7 +1,9 @@ import { useRuntime } from '@renderer/hooks/useRuntime' import { useSettings } from '@renderer/hooks/useSettings' +import store from '@renderer/store' import { Agent } from '@renderer/types' import { useEffect, useState } from 'react' + let _agents: Agent[] = [] export const getAgentsFromSystemAgents = (systemAgents: any) => { @@ -19,27 +21,44 @@ export function useSystemAgents() { const { defaultAgent } = useSettings() const [agents, setAgents] = useState([]) const { resourcesPath } = useRuntime() + const { agentssubscribeUrl } = store.getState().settings useEffect(() => { const loadAgents = async () => { try { - // 始终加载本地 agents + // 检查是否使用远程数据源 + if (agentssubscribeUrl && agentssubscribeUrl.startsWith('http')) { + try { + await new Promise((resolve) => setTimeout(resolve, 500)) + const response = await fetch(agentssubscribeUrl) + if (!response.ok) { + throw new Error(`HTTP error! Status: ${response.status}`) + } + const agentsData = (await response.json()) as Agent[] + setAgents(agentsData) + return + } catch (error) { + console.error('Failed to load remote agents:', error) + // 远程加载失败,继续尝试加载本地数据 + } + } + + // 如果没有远程配置或获取失败,加载本地代理 if (resourcesPath && _agents.length === 0) { const localAgentsData = await window.api.fs.read(resourcesPath + '/data/agents.json') _agents = JSON.parse(localAgentsData) as Agent[] } - // 如果没有远程配置或获取失败,使用本地 agents setAgents(_agents) } catch (error) { console.error('Failed to load agents:', error) - // 发生错误时使用本地 agents + // 发生错误时使用已加载的本地 agents setAgents(_agents) } } loadAgents() - }, [defaultAgent, resourcesPath]) + }, [defaultAgent, resourcesPath, agentssubscribeUrl]) return agents } diff --git a/src/renderer/src/pages/home/Inputbar/Inputbar.tsx b/src/renderer/src/pages/home/Inputbar/Inputbar.tsx index ab13fe2489..39621cc85d 100644 --- a/src/renderer/src/pages/home/Inputbar/Inputbar.tsx +++ b/src/renderer/src/pages/home/Inputbar/Inputbar.tsx @@ -215,6 +215,10 @@ const Inputbar: FC = ({ assistant: _assistant, setActiveTopic, topic }) = ) } + if (topic.prompt) { + baseUserMessage.assistant.prompt = assistant.prompt ? `${assistant.prompt}\n${topic.prompt}` : topic.prompt + } + baseUserMessage.usage = await estimateUserPromptUsage(baseUserMessage) const { message, blocks } = getUserMessage(baseUserMessage) diff --git a/src/renderer/src/pages/home/Inputbar/TokenCount.tsx b/src/renderer/src/pages/home/Inputbar/TokenCount.tsx index cb08b9b5fd..c2a0ee5068 100644 --- a/src/renderer/src/pages/home/Inputbar/TokenCount.tsx +++ b/src/renderer/src/pages/home/Inputbar/TokenCount.tsx @@ -22,7 +22,7 @@ const TokenCount: FC = ({ estimateTokenCount, inputTokenCount, contextCou } const formatMaxCount = (max: number) => { - if (max == 20) { + if (max == 100) { return ( = ({ block, citationBlockId, role, mentions // Use the passed citationBlockId directly in the selector const { renderInputMessageAsMarkdown } = useSettings() - const formattedCitations = useSelector((state: RootState) => - selectFormattedCitationsByBlockId(state, citationBlockId) - ) + const formattedCitations = useSelector((state: RootState) => { + const citations = selectFormattedCitationsByBlockId(state, citationBlockId) + return citations.map((citation) => ({ + ...citation, + content: citation.content ? cleanMarkdownContent(citation.content) : citation.content + })) + }) const processedContent = useMemo(() => { let content = block.content diff --git a/src/renderer/src/pages/home/Messages/ChatFlowHistory.tsx b/src/renderer/src/pages/home/Messages/ChatFlowHistory.tsx index 73a3208ca2..99d3c51193 100644 --- a/src/renderer/src/pages/home/Messages/ChatFlowHistory.tsx +++ b/src/renderer/src/pages/home/Messages/ChatFlowHistory.tsx @@ -1,14 +1,17 @@ import '@xyflow/react/dist/style.css' import { RobotOutlined, UserOutlined } from '@ant-design/icons' +import EmojiAvatar from '@renderer/components/Avatar/EmojiAvatar' import ModelAvatar from '@renderer/components/Avatar/ModelAvatar' import { getModelLogo } from '@renderer/config/models' import { useTheme } from '@renderer/context/ThemeProvider' +import useAvatar from '@renderer/hooks/useAvatar' import { useSettings } from '@renderer/hooks/useSettings' import { EVENT_NAMES, EventEmitter } from '@renderer/services/EventService' import { RootState } from '@renderer/store' import { selectMessagesForTopic } from '@renderer/store/newMessage' import { Model } from '@renderer/types' +import { isEmoji } from '@renderer/utils' import { getMainTextContent } from '@renderer/utils/messageUtils/find' import { Controls, Handle, MiniMap, ReactFlow, ReactFlowProvider } from '@xyflow/react' import { Edge, Node, NodeTypes, Position, useEdgesState, useNodesState } from '@xyflow/react' @@ -63,7 +66,11 @@ const CustomNode: FC<{ data: any }> = ({ data }) => { // 用户头像 if (data.userAvatar) { - avatar = + if (isEmoji(data.userAvatar)) { + avatar = {data.userAvatar} + } else { + avatar = + } } else { avatar = } style={{ backgroundColor: 'var(--color-info)' }} /> } @@ -221,7 +228,7 @@ const ChatFlowHistory: FC = ({ conversationId }) => { ) // 获取用户头像 - const userAvatar = useSelector((state: RootState) => state.runtime.avatar) + const userAvatar = useAvatar() // 消息过滤 const { userMessages, assistantMessages } = useMemo(() => { diff --git a/src/renderer/src/pages/home/Messages/CitationsList.tsx b/src/renderer/src/pages/home/Messages/CitationsList.tsx index d674db4e18..230767a3a8 100644 --- a/src/renderer/src/pages/home/Messages/CitationsList.tsx +++ b/src/renderer/src/pages/home/Messages/CitationsList.tsx @@ -1,9 +1,10 @@ import Favicon from '@renderer/components/Icons/FallbackFavicon' import { HStack } from '@renderer/components/Layout' import { fetchWebContent } from '@renderer/utils/fetch' +import { cleanMarkdownContent } from '@renderer/utils/formats' import { QueryClient, QueryClientProvider, useQuery } from '@tanstack/react-query' -import { Button, Drawer, Skeleton } from 'antd' -import { FileSearch } from 'lucide-react' +import { Button, Drawer, message, Skeleton } from 'antd' +import { Check, Copy, FileSearch } from 'lucide-react' import React, { useState } from 'react' import { useTranslation } from 'react-i18next' import styled from 'styled-components' @@ -44,21 +45,6 @@ const truncateText = (text: string, maxLength = 100) => { return text.length > maxLength ? text.slice(0, maxLength) + '...' : text } -/** - * 清理Markdown内容 - * @param text - */ -const cleanMarkdownContent = (text: string): string => { - if (!text) return '' - let cleaned = text.replace(/!\[.*?]\(.*?\)/g, '') - cleaned = cleaned.replace(/\[(.*?)]\(.*?\)/g, '$1') - cleaned = cleaned.replace(/https?:\/\/\S+/g, '') - cleaned = cleaned.replace(/[-—–_=+]{3,}/g, ' ') - cleaned = cleaned.replace(/[¥$€£¥%@#&*^()[\]{}<>~`'"\\|/_.]+/g, '') - cleaned = cleaned.replace(/\s+/g, ' ').trim() - return cleaned -} - const CitationsList: React.FC = ({ citations }) => { const { t } = useTranslation() const [open, setOpen] = useState(false) @@ -115,6 +101,27 @@ const handleLinkClick = (url: string, event: React.MouseEvent) => { else window.api.file.openPath(url) } +const CopyButton: React.FC<{ content: string }> = ({ content }) => { + const [copied, setCopied] = useState(false) + const { t } = useTranslation() + + const handleCopy = () => { + if (!content) return + navigator.clipboard + .writeText(content) + .then(() => { + setCopied(true) + message.success(t('common.copied')) + setTimeout(() => setCopied(false), 2000) + }) + .catch(() => { + message.error(t('message.copy.failed')) + }) + } + + return {copied ? : } +} + const WebSearchCitation: React.FC<{ citation: Citation }> = ({ citation }) => { const { data: fetchedContent, isLoading } = useQuery({ queryKey: ['webContent', citation.url], @@ -136,6 +143,7 @@ const WebSearchCitation: React.FC<{ citation: Citation }> = ({ citation }) => { handleLinkClick(citation.url, e)}> {citation.title || {citation.hostname}} + {fetchedContent && } {isLoading ? ( @@ -153,6 +161,7 @@ const KnowledgeCitation: React.FC<{ citation: Citation }> = ({ citation }) => ( handleLinkClick(citation.url, e)}> {citation.title} + {citation.content && } {citation.content && truncateText(citation.content, 100)} @@ -203,6 +212,23 @@ const CitationLink = styled.a` } ` +const CopyIconWrapper = styled.div` + cursor: pointer; + display: flex; + align-items: center; + justify-content: center; + color: var(--color-text-2); + opacity: 0.6; + margin-left: auto; + padding: 4px; + border-radius: 4px; + + &:hover { + opacity: 1; + background-color: var(--color-background-soft); + } +` + const WebSearchCard = styled.div` display: flex; flex-direction: column; @@ -219,6 +245,7 @@ const WebSearchCardHeader = styled.div` align-items: center; gap: 8px; margin-bottom: 6px; + width: 100%; ` const WebSearchCardContent = styled.div` diff --git a/src/renderer/src/pages/home/Messages/MessageAnchorLine.tsx b/src/renderer/src/pages/home/Messages/MessageAnchorLine.tsx index adffe18737..258c9d264e 100644 --- a/src/renderer/src/pages/home/Messages/MessageAnchorLine.tsx +++ b/src/renderer/src/pages/home/Messages/MessageAnchorLine.tsx @@ -1,4 +1,5 @@ import { DownOutlined } from '@ant-design/icons' +import EmojiAvatar from '@renderer/components/Avatar/EmojiAvatar' import { APP_NAME, AppLogo, isLocalAi } from '@renderer/config/env' import { getModelLogo } from '@renderer/config/models' import { useTheme } from '@renderer/context/ThemeProvider' @@ -16,6 +17,7 @@ import { Avatar } from 'antd' import { type FC, useCallback, useEffect, useRef, useState } from 'react' import { useTranslation } from 'react-i18next' import styled from 'styled-components' + interface MessageLineProps { messages: Message[] } @@ -230,7 +232,15 @@ const MessageAnchorLine: FC = ({ messages }) => { ) : ( <> {isEmoji(avatar) ? ( - {avatar} + + {avatar} + ) : ( )} @@ -314,16 +324,4 @@ const MessageItemContent = styled.div` max-width: 200px; ` -const EmojiAvatar = styled.div<{ size: number }>` - width: ${(props) => props.size}px; - height: ${(props) => props.size}px; - background-color: var(--color-background-soft); - border-radius: 20%; - display: flex; - align-items: center; - justify-content: center; - font-size: ${(props) => props.size * 0.6}px; - border: 0.5px solid var(--color-border); -` - export default MessageAnchorLine diff --git a/src/renderer/src/pages/home/Messages/MessageHeader.tsx b/src/renderer/src/pages/home/Messages/MessageHeader.tsx index cf25fd36f1..eaed4b08a5 100644 --- a/src/renderer/src/pages/home/Messages/MessageHeader.tsx +++ b/src/renderer/src/pages/home/Messages/MessageHeader.tsx @@ -1,3 +1,4 @@ +import EmojiAvatar from '@renderer/components/Avatar/EmojiAvatar' import UserPopup from '@renderer/components/Popups/UserPopup' import { APP_NAME, AppLogo, isLocalAi } from '@renderer/config/env' import { getModelLogo } from '@renderer/config/models' @@ -87,7 +88,9 @@ const MessageHeader: FC = memo(({ assistant, model, message }) => { ) : ( <> {isEmoji(avatar) ? ( - UserPopup.show()}>{avatar} + UserPopup.show()} size={35} fontSize={20}> + {avatar} + ) : ( = memo(({ assistant, model, message }) => { MessageHeader.displayName = 'MessageHeader' -const EmojiAvatar = styled.div` - width: 35px; - height: 35px; - background-color: var(--color-background-soft); - border-radius: 20%; - display: flex; - align-items: center; - justify-content: center; - font-size: 18px; - cursor: pointer; - border: 0.5px solid var(--color-border); - font-size: 20px; -` - const Container = styled.div` display: flex; flex-direction: row; diff --git a/src/renderer/src/pages/home/Messages/MessageTokens.tsx b/src/renderer/src/pages/home/Messages/MessageTokens.tsx index 390b3530df..98d3f8f88a 100644 --- a/src/renderer/src/pages/home/Messages/MessageTokens.tsx +++ b/src/renderer/src/pages/home/Messages/MessageTokens.tsx @@ -44,7 +44,7 @@ const MessgeTokens: React.FC = ({ message }) => { {metrixs} - Tokens: + Tokens: {message?.usage?.total_tokens} ↑{message?.usage?.prompt_tokens} ↓{message?.usage?.completion_tokens} @@ -72,7 +72,7 @@ const MessageMetadata = styled.div` display: block; span { - padding:0 2px; + padding: 0 2px; } } diff --git a/src/renderer/src/pages/home/Messages/MessageTools.tsx b/src/renderer/src/pages/home/Messages/MessageTools.tsx index 433f1af051..b381fc171d 100644 --- a/src/renderer/src/pages/home/Messages/MessageTools.tsx +++ b/src/renderer/src/pages/home/Messages/MessageTools.tsx @@ -212,7 +212,8 @@ const MessageTools: FC = ({ blocks }) => { } const CollapseContainer = styled(Collapse)` - margin-bottom: 15px; + margin-top: 10px; + margin-bottom: 12px; border-radius: 8px; overflow: hidden; diff --git a/src/renderer/src/pages/home/Messages/Messages.tsx b/src/renderer/src/pages/home/Messages/Messages.tsx index 6da7985fd4..f69dc678aa 100644 --- a/src/renderer/src/pages/home/Messages/Messages.tsx +++ b/src/renderer/src/pages/home/Messages/Messages.tsx @@ -225,7 +225,7 @@ const Messages: React.FC = ({ assistant, topic, setActiveTopic }) return ( @@ -319,7 +319,7 @@ interface ContainerProps { const Container = styled(Scrollbar)` display: flex; flex-direction: column-reverse; - padding: 10px 0 10px; + padding: 10px 0 20px; overflow-x: hidden; background-color: var(--color-background); z-index: 1; diff --git a/src/renderer/src/pages/home/Tabs/SettingsTab.tsx b/src/renderer/src/pages/home/Tabs/SettingsTab.tsx index 1b35190be0..06bd5e335b 100644 --- a/src/renderer/src/pages/home/Tabs/SettingsTab.tsx +++ b/src/renderer/src/pages/home/Tabs/SettingsTab.tsx @@ -71,7 +71,6 @@ const SettingsTab: FC = (props) => { const [maxTokens, setMaxTokens] = useState(assistant?.settings?.maxTokens ?? 0) const [fontSizeValue, setFontSizeValue] = useState(fontSize) const [streamOutput, setStreamOutput] = useState(assistant?.settings?.streamOutput ?? true) - const [enableToolUse, setEnableToolUse] = useState(assistant?.settings?.enableToolUse ?? false) const { t } = useTranslation() const dispatch = useAppDispatch() @@ -153,10 +152,8 @@ const SettingsTab: FC = (props) => { setStreamOutput(assistant?.settings?.streamOutput ?? true) }, [assistant]) - const formatSliderTooltip = (value?: number) => { - if (value === undefined) return '' - return value === 20 ? '∞' : value.toString() - } + const assistantContextCount = assistant?.settings?.contextCount || 20 + const maxContextCount = assistantContextCount > 20 ? assistantContextCount : 20 return ( @@ -204,12 +201,11 @@ const SettingsTab: FC = (props) => { @@ -225,18 +221,6 @@ const SettingsTab: FC = (props) => { /> - - {t('models.enable_tool_use')} - { - setEnableToolUse(checked) - updateAssistantSettings({ enableToolUse: checked }) - }} - /> - - diff --git a/src/renderer/src/pages/knowledge/KnowledgeContent.tsx b/src/renderer/src/pages/knowledge/KnowledgeContent.tsx index fb8ec0f95d..3afccbd43d 100644 --- a/src/renderer/src/pages/knowledge/KnowledgeContent.tsx +++ b/src/renderer/src/pages/knowledge/KnowledgeContent.tsx @@ -245,9 +245,11 @@ const KnowledgeContent: FC = ({ selectedBase }) => {
- - {t('models.dimensions', { dimensions: base.dimensions || 0 })} - + {base.rerankModel && ( + + {base.rerankModel.name} + + )} diff --git a/src/renderer/src/pages/knowledge/components/KnowledgeSettingsPopup.tsx b/src/renderer/src/pages/knowledge/components/KnowledgeSettingsPopup.tsx index bf318fb265..f409990094 100644 --- a/src/renderer/src/pages/knowledge/components/KnowledgeSettingsPopup.tsx +++ b/src/renderer/src/pages/knowledge/components/KnowledgeSettingsPopup.tsx @@ -291,7 +291,7 @@ const PopupContainer: React.FC = ({ base: _base, resolve }) => { rules={[ { validator(_, value) { - if (value && (value < 0 || value > 10)) { + if (value && (value < 0 || value > 30)) { return Promise.reject(new Error(t('knowledge.topN_too_large_or_small'))) } return Promise.resolve() diff --git a/src/renderer/src/pages/settings/AboutSettings.tsx b/src/renderer/src/pages/settings/AboutSettings.tsx index 62a4971caa..bd7e174f6f 100644 --- a/src/renderer/src/pages/settings/AboutSettings.tsx +++ b/src/renderer/src/pages/settings/AboutSettings.tsx @@ -12,7 +12,7 @@ import { ThemeMode } from '@renderer/types' import { compareVersions, runAsyncFunction } from '@renderer/utils' import { Avatar, Button, Progress, Row, Switch, Tag } from 'antd' import { debounce } from 'lodash' -import { FileCheck, Github, Globe, Mail, Rss } from 'lucide-react' +import { Bug, FileCheck, Github, Globe, Mail, Rss } from 'lucide-react' import { FC, useEffect, useState } from 'react' import { useTranslation } from 'react-i18next' import Markdown from 'react-markdown' @@ -69,6 +69,10 @@ const AboutSettings: FC = () => { onOpenWebsite(url) } + const debug = async () => { + await window.api.devTools.toggle() + } + const showLicense = async () => { const { appPath } = await window.api.getAppInfo() openMinapp({ @@ -219,6 +223,14 @@ const AboutSettings: FC = () => { + + + + + {t('settings.about.debug.title')} + + + ) diff --git a/src/renderer/src/pages/settings/AssistantSettings/AssistantModelSettings.tsx b/src/renderer/src/pages/settings/AssistantSettings/AssistantModelSettings.tsx index 1c494b2ee2..1a22848ce5 100644 --- a/src/renderer/src/pages/settings/AssistantSettings/AssistantModelSettings.tsx +++ b/src/renderer/src/pages/settings/AssistantSettings/AssistantModelSettings.tsx @@ -24,7 +24,7 @@ const AssistantModelSettings: FC = ({ assistant, updateAssistant, updateA const [enableMaxTokens, setEnableMaxTokens] = useState(assistant?.settings?.enableMaxTokens ?? false) const [maxTokens, setMaxTokens] = useState(assistant?.settings?.maxTokens ?? 0) const [streamOutput, setStreamOutput] = useState(assistant?.settings?.streamOutput ?? true) - const [enableToolUse, setEnableToolUse] = useState(assistant?.settings?.enableToolUse ?? false) + const [toolUseMode, setToolUseMode] = useState(assistant?.settings?.toolUseMode ?? 'prompt') const [defaultModel, setDefaultModel] = useState(assistant?.defaultModel) const [topP, setTopP] = useState(assistant?.settings?.topP ?? 1) const [customParameters, setCustomParameters] = useState( @@ -150,6 +150,7 @@ const AssistantModelSettings: FC = ({ assistant, updateAssistant, updateA setStreamOutput(true) setTopP(1) setCustomParameters([]) + setToolUseMode('prompt') updateAssistantSettings({ temperature: DEFAULT_TEMPERATURE, contextCount: DEFAULT_CONTEXTCOUNT, @@ -157,7 +158,8 @@ const AssistantModelSettings: FC = ({ assistant, updateAssistant, updateA maxTokens: 0, streamOutput: true, topP: 1, - customParameters: [] + customParameters: [], + toolUseMode: 'prompt' }) } @@ -181,7 +183,7 @@ const AssistantModelSettings: FC = ({ assistant, updateAssistant, updateA const formatSliderTooltip = (value?: number) => { if (value === undefined) return '' - return value === 20 ? '∞' : value.toString() + return value.toString() } return ( @@ -292,11 +294,11 @@ const AssistantModelSettings: FC = ({ assistant, updateAssistant, updateA @@ -379,14 +381,17 @@ const AssistantModelSettings: FC = ({ assistant, updateAssistant, updateA - - { - setEnableToolUse(checked) - updateAssistantSettings({ enableToolUse: checked }) - }} - /> + + diff --git a/src/renderer/src/pages/settings/DataSettings/AgentsSubscribeUrlSettings.tsx b/src/renderer/src/pages/settings/DataSettings/AgentsSubscribeUrlSettings.tsx new file mode 100755 index 0000000000..eb37f41737 --- /dev/null +++ b/src/renderer/src/pages/settings/DataSettings/AgentsSubscribeUrlSettings.tsx @@ -0,0 +1,47 @@ +import { HStack } from '@renderer/components/Layout' +import { useTheme } from '@renderer/context/ThemeProvider' +import { useSettings } from '@renderer/hooks/useSettings' +import { useAppDispatch } from '@renderer/store' +import { setAgentssubscribeUrl } from '@renderer/store/settings' +import Input from 'antd/es/input/Input' +import { FC } from 'react' +import { useTranslation } from 'react-i18next' + +import { SettingDivider, SettingGroup, SettingRow, SettingRowTitle, SettingTitle } from '..' + +const AgentsSubscribeUrlSettings: FC = () => { + const { t } = useTranslation() + const { theme } = useTheme() + const dispatch = useAppDispatch() + + const { agentssubscribeUrl } = useSettings() + + const handleAgentChange = (e: React.ChangeEvent) => { + dispatch(setAgentssubscribeUrl(e.target.value)) + } + + return ( + + + {t('agents.tag.agent')} + {t('settings.websearch.subscribe_add')} + + + + {t('settings.websearch.subscribe_url')} + + + + + + + ) +} + +export default AgentsSubscribeUrlSettings diff --git a/src/renderer/src/pages/settings/DataSettings/DataSettings.tsx b/src/renderer/src/pages/settings/DataSettings/DataSettings.tsx index 4178ac9df7..ec31fd6ae7 100644 --- a/src/renderer/src/pages/settings/DataSettings/DataSettings.tsx +++ b/src/renderer/src/pages/settings/DataSettings/DataSettings.tsx @@ -17,12 +17,13 @@ import { reset } from '@renderer/services/BackupService' import { AppInfo } from '@renderer/types' import { formatFileSize } from '@renderer/utils' import { Button, Typography } from 'antd' -import { FileText, FolderCog, FolderInput } from 'lucide-react' +import { FileText, FolderCog, FolderInput, Sparkle } from 'lucide-react' import { FC, useEffect, useState } from 'react' import { useTranslation } from 'react-i18next' import styled from 'styled-components' import { SettingContainer, SettingDivider, SettingGroup, SettingRow, SettingRowTitle, SettingTitle } from '..' +import AgentsSubscribeUrlSettings from './AgentsSubscribeUrlSettings' import ExportMenuOptions from './ExportMenuSettings' import JoplinSettings from './JoplinSettings' import MarkdownExportSettings from './MarkdownExportSettings' @@ -36,6 +37,7 @@ import YuqueSettings from './YuqueSettings' const DataSettings: FC = () => { const { t } = useTranslation() const [appInfo, setAppInfo] = useState() + const [cacheSize, setCacheSize] = useState('') const { size, removeAllFiles } = useKnowledgeFiles() const { theme } = useTheme() const [menu, setMenu] = useState('data') @@ -80,6 +82,7 @@ const DataSettings: FC = () => { title: 'settings.data.markdown_export.title', icon: }, + { key: 'divider_3', isDivider: true, text: t('settings.data.divider.third_party') }, { key: 'notion', title: 'settings.data.notion.title', icon: }, { @@ -101,11 +104,17 @@ const DataSettings: FC = () => { key: 'siyuan', title: 'settings.data.siyuan.title', icon: + }, + { + key: 'agentssubscribe_url', + title: 'agents.settings.title', + icon: } ] useEffect(() => { window.api.getAppInfo().then(setAppInfo) + window.api.getCacheSize().then(setCacheSize) }, []) const handleOpenPath = (path?: string) => { @@ -130,6 +139,7 @@ const DataSettings: FC = () => { onOk: async () => { try { await window.api.clearCache() + await window.api.getCacheSize().then(setCacheSize) window.message.success(t('settings.data.clear_cache.success')) } catch (error) { window.message.error(t('settings.data.clear_cache.error')) @@ -228,7 +238,10 @@ const DataSettings: FC = () => { - {t('settings.data.clear_cache.title')} + + {t('settings.data.clear_cache.title')} + {cacheSize && ({cacheSize}MB)} + + onClick={() => handleZoomFactor(0, true)} + style={{ marginLeft: 8 }} + icon={} + /> {isMac && ( diff --git a/src/renderer/src/pages/settings/MCPSettings/McpSettings.tsx b/src/renderer/src/pages/settings/MCPSettings/McpSettings.tsx index 2443d3e142..18b3c18cdd 100644 --- a/src/renderer/src/pages/settings/MCPSettings/McpSettings.tsx +++ b/src/renderer/src/pages/settings/MCPSettings/McpSettings.tsx @@ -167,6 +167,7 @@ const McpSettings: React.FC = () => { const localTools = await window.api.mcp.listTools(server) setTools(localTools) } catch (error) { + setLoadingServer(server.id) window.message.error({ content: t('settings.mcp.tools.loadError') + ' ' + formatError(error), key: 'mcp-tools-error' diff --git a/src/renderer/src/pages/settings/ProviderSettings/ModelListSearchBar.tsx b/src/renderer/src/pages/settings/ProviderSettings/ModelListSearchBar.tsx index bf59cf2e50..8a9e7cd68d 100644 --- a/src/renderer/src/pages/settings/ProviderSettings/ModelListSearchBar.tsx +++ b/src/renderer/src/pages/settings/ProviderSettings/ModelListSearchBar.tsx @@ -78,7 +78,7 @@ const ModelListSearchBar: React.FC = ({ onSearch }) => visible: { opacity: 1, transition: { duration: 0.1, delay: 0.3, ease: 'easeInOut' } }, hidden: { opacity: 0, transition: { duration: 0.1, ease: 'easeInOut' } } }} - style={{ cursor: 'pointer' }} + style={{ cursor: 'pointer', display: 'flex' }} onClick={() => setSearchVisible(true)}> diff --git a/src/renderer/src/pages/settings/ProviderSettings/ProviderSetting.tsx b/src/renderer/src/pages/settings/ProviderSettings/ProviderSetting.tsx index 2f58a924dd..31ea151cbb 100644 --- a/src/renderer/src/pages/settings/ProviderSettings/ProviderSetting.tsx +++ b/src/renderer/src/pages/settings/ProviderSettings/ProviderSetting.tsx @@ -404,7 +404,7 @@ const ProviderSetting: FC = ({ provider: _provider }) => { {provider.id === 'copilot' && } - + {t('common.models')} {!isEmpty(models) && } diff --git a/src/renderer/src/providers/AiProvider/AnthropicProvider.ts b/src/renderer/src/providers/AiProvider/AnthropicProvider.ts index 2bfd4b6fb5..159b5f4292 100644 --- a/src/renderer/src/providers/AiProvider/AnthropicProvider.ts +++ b/src/renderer/src/providers/AiProvider/AnthropicProvider.ts @@ -13,7 +13,7 @@ import { WebSearchToolResultError } from '@anthropic-ai/sdk/resources' import { DEFAULT_MAX_TOKENS } from '@renderer/config/constant' -import { isReasoningModel, isWebSearchModel } from '@renderer/config/models' +import { isClaudeReasoningModel, isReasoningModel, isWebSearchModel } from '@renderer/config/models' import { getStoreSetting } from '@renderer/hooks/useSettings' import i18n from '@renderer/i18n' import { getAssistantSettings, getDefaultModel, getTopNamingModel } from '@renderer/services/AssistantService' @@ -43,6 +43,7 @@ import type { Message } from '@renderer/types/newMessage' import { removeSpecialCharactersForTopicName } from '@renderer/utils' import { anthropicToolUseToMcpTool, + isEnabledToolUse, mcpToolCallResponseToAnthropicMessage, mcpToolsToAnthropicTools, parseAndCallTools @@ -152,24 +153,18 @@ export default class AnthropicProvider extends BaseProvider { } as WebSearchTool20250305 } - /** - * Get the temperature - * @param assistant - The assistant - * @param model - The model - * @returns The temperature - */ - private getTemperature(assistant: Assistant, model: Model) { - return isReasoningModel(model) ? undefined : assistant?.settings?.temperature + override getTemperature(assistant: Assistant, model: Model): number | undefined { + if (assistant.settings?.reasoning_effort && isClaudeReasoningModel(model)) { + return undefined + } + return assistant.settings?.temperature } - /** - * Get the top P - * @param assistant - The assistant - * @param model - The model - * @returns The top P - */ - private getTopP(assistant: Assistant, model: Model) { - return isReasoningModel(model) ? undefined : assistant?.settings?.topP + override getTopP(assistant: Assistant, model: Model): number | undefined { + if (assistant.settings?.reasoning_effort && isClaudeReasoningModel(model)) { + return undefined + } + return assistant.settings?.topP } /** @@ -213,7 +208,7 @@ export default class AnthropicProvider extends BaseProvider { public async completions({ messages, assistant, mcpTools, onChunk, onFilterMessages }: CompletionsParams) { const defaultModel = getDefaultModel() const model = assistant.model || defaultModel - const { contextCount, maxTokens, streamOutput, enableToolUse } = getAssistantSettings(assistant) + const { contextCount, maxTokens, streamOutput } = getAssistantSettings(assistant) const userMessagesParams: MessageParam[] = [] @@ -235,7 +230,7 @@ export default class AnthropicProvider extends BaseProvider { const { tools } = this.setupToolsConfig({ model, mcpTools, - enableToolUse + enableToolUse: isEnabledToolUse(assistant) }) if (this.useSystemPromptForTools && mcpTools && mcpTools.length) { @@ -678,7 +673,7 @@ export default class AnthropicProvider extends BaseProvider { const body = { model: model.id, messages: [{ role: 'user' as const, content: 'hi' }], - max_tokens: 100, + max_tokens: 2, // api文档写的 x>1 stream } diff --git a/src/renderer/src/providers/AiProvider/BaseProvider.ts b/src/renderer/src/providers/AiProvider/BaseProvider.ts index 5773550022..48c1e34839 100644 --- a/src/renderer/src/providers/AiProvider/BaseProvider.ts +++ b/src/renderer/src/providers/AiProvider/BaseProvider.ts @@ -1,5 +1,5 @@ import Logger from '@renderer/config/logger' -import { isFunctionCallingModel } from '@renderer/config/models' +import { isFunctionCallingModel, isNotSupportTemperatureAndTopP } from '@renderer/config/models' import { REFERENCE_PROMPT } from '@renderer/config/prompts' import { getLMStudioKeepAliveTime } from '@renderer/hooks/useLMStudio' import type { @@ -103,6 +103,14 @@ export default abstract class BaseProvider { return this.provider.id === 'lmstudio' ? getLMStudioKeepAliveTime() : undefined } + public getTemperature(assistant: Assistant, model: Model): number | undefined { + return isNotSupportTemperatureAndTopP(model) ? undefined : assistant.settings?.temperature + } + + public getTopP(assistant: Assistant, model: Model): number | undefined { + return isNotSupportTemperatureAndTopP(model) ? undefined : assistant.settings?.topP + } + public async fakeCompletions({ onChunk }: CompletionsParams) { for (let i = 0; i < 100; i++) { await delay(0.01) diff --git a/src/renderer/src/providers/AiProvider/GeminiProvider.ts b/src/renderer/src/providers/AiProvider/GeminiProvider.ts index 234b600477..46ea431f6f 100644 --- a/src/renderer/src/providers/AiProvider/GeminiProvider.ts +++ b/src/renderer/src/providers/AiProvider/GeminiProvider.ts @@ -1,6 +1,7 @@ import { Content, File, + FinishReason, FunctionCall, GenerateContentConfig, GenerateContentResponse, @@ -53,6 +54,7 @@ import type { Message, Response } from '@renderer/types/newMessage' import { removeSpecialCharactersForTopicName } from '@renderer/utils' import { geminiFunctionCallToMcpTool, + isEnabledToolUse, mcpToolCallResponseToGeminiMessage, mcpToolsToGeminiTools, parseAndCallTools @@ -339,7 +341,7 @@ export default class GeminiProvider extends BaseProvider { await this.generateImageByChat({ messages, assistant, onChunk }) return } - const { contextCount, maxTokens, streamOutput, enableToolUse } = getAssistantSettings(assistant) + const { contextCount, maxTokens, streamOutput } = getAssistantSettings(assistant) const userMessages = filterUserRoleStartMessages( filterEmptyMessages(filterContextMessages(takeRight(messages, contextCount + 2))) @@ -359,7 +361,7 @@ export default class GeminiProvider extends BaseProvider { const { tools } = this.setupToolsConfig({ mcpTools, model, - enableToolUse + enableToolUse: isEnabledToolUse(assistant) }) if (this.useSystemPromptForTools) { @@ -379,8 +381,8 @@ export default class GeminiProvider extends BaseProvider { safetySettings: this.getSafetySettings(), // generate image don't need system instruction systemInstruction: isGemmaModel(model) ? undefined : systemInstruction, - temperature: assistant?.settings?.temperature, - topP: assistant?.settings?.topP, + temperature: this.getTemperature(assistant, model), + topP: this.getTopP(assistant, model), maxOutputTokens: maxTokens, tools: tools, ...this.getBudgetToken(assistant, model), @@ -912,14 +914,32 @@ export default class GeminiProvider extends BaseProvider { return { valid: false, error: new Error('No model found') } } + let config: GenerateContentConfig = { + maxOutputTokens: 1 + } + if (isGeminiReasoningModel(model)) { + config = { + ...config, + thinkingConfig: { + includeThoughts: false + } as ThinkingConfig + } + } + + if (isGenerateImageModel(model)) { + config = { + ...config, + responseModalities: [Modality.TEXT, Modality.IMAGE], + responseMimeType: 'text/plain' + } + } + try { if (!stream) { const result = await this.sdk.models.generateContent({ model: model.id, contents: [{ role: 'user', parts: [{ text: 'hi' }] }], - config: { - maxOutputTokens: 100 - } + config: config }) if (isEmpty(result.text)) { throw new Error('Empty response') @@ -928,14 +948,12 @@ export default class GeminiProvider extends BaseProvider { const response = await this.sdk.models.generateContentStream({ model: model.id, contents: [{ role: 'user', parts: [{ text: 'hi' }] }], - config: { - maxOutputTokens: 100 - } + config: config }) // 等待整个流式响应结束 let hasContent = false for await (const chunk of response) { - if (chunk.text && chunk.text.length > 0) { + if (chunk.candidates && chunk.candidates[0].finishReason === FinishReason.MAX_TOKENS) { hasContent = true break } diff --git a/src/renderer/src/providers/AiProvider/OpenAIProvider.ts b/src/renderer/src/providers/AiProvider/OpenAIProvider.ts index 3c59f0e71b..f9f78cebe4 100644 --- a/src/renderer/src/providers/AiProvider/OpenAIProvider.ts +++ b/src/renderer/src/providers/AiProvider/OpenAIProvider.ts @@ -1,9 +1,9 @@ import { findTokenLimit, getOpenAIWebSearchParams, + isClaudeReasoningModel, isHunyuanSearchModel, isOpenAIReasoningModel, - isOpenAIWebSearch, isReasoningModel, isSupportedModel, isSupportedReasoningEffortGrokModel, @@ -53,6 +53,7 @@ import { convertLinksToZhipu } from '@renderer/utils/linkConverter' import { + isEnabledToolUse, mcpToolCallResponseToOpenAICompatibleMessage, mcpToolsToOpenAIChatTools, openAIToolsToMcpTool, @@ -192,14 +193,18 @@ export default class OpenAIProvider extends BaseOpenAIProvider { } as ChatCompletionMessageParam } - /** - * Get the temperature for the assistant - * @param assistant - The assistant - * @param model - The model - * @returns The temperature - */ - override getTemperature(assistant: Assistant, model: Model) { - return isReasoningModel(model) || isOpenAIWebSearch(model) ? undefined : assistant?.settings?.temperature + override getTemperature(assistant: Assistant, model: Model): number | undefined { + if (isOpenAIReasoningModel(model) || (assistant.settings?.reasoning_effort && isClaudeReasoningModel(model))) { + return undefined + } + return assistant.settings?.temperature + } + + override getTopP(assistant: Assistant, model: Model): number | undefined { + if (isOpenAIReasoningModel(model) || (assistant.settings?.reasoning_effort && isClaudeReasoningModel(model))) { + return undefined + } + return assistant.settings?.topP } /** @@ -229,20 +234,6 @@ export default class OpenAIProvider extends BaseOpenAIProvider { return {} } - /** - * Get the top P for the assistant - * @param assistant - The assistant - * @param model - The model - * @returns The top P - */ - override getTopP(assistant: Assistant, model: Model) { - if (isReasoningModel(model) || isOpenAIWebSearch(model)) { - return undefined - } - - return assistant?.settings?.topP - } - /** * Get the reasoning effort for the assistant * @param assistant - The assistant @@ -361,7 +352,7 @@ export default class OpenAIProvider extends BaseOpenAIProvider { const defaultModel = getDefaultModel() const model = assistant.model || defaultModel - const { contextCount, maxTokens, streamOutput, enableToolUse } = getAssistantSettings(assistant) + const { contextCount, maxTokens, streamOutput } = getAssistantSettings(assistant) const isEnabledBultinWebSearch = assistant.enableWebSearch messages = addImageFileToContents(messages) const enableReasoning = @@ -375,7 +366,11 @@ export default class OpenAIProvider extends BaseOpenAIProvider { content: `Formatting re-enabled${systemMessage ? '\n' + systemMessage.content : ''}` } } - const { tools } = this.setupToolsConfig({ mcpTools, model, enableToolUse }) + const { tools } = this.setupToolsConfig({ + mcpTools, + model, + enableToolUse: isEnabledToolUse(assistant) + }) if (this.useSystemPromptForTools) { systemMessage.content = buildSystemPrompt(systemMessage.content || '', mcpTools) @@ -1113,6 +1108,9 @@ export default class OpenAIProvider extends BaseOpenAIProvider { const body = { model: model.id, messages: [{ role: 'user', content: 'hi' }], + max_completion_tokens: 1, // openAI + max_tokens: 1, // openAI deprecated 但大部分OpenAI兼容的提供商继续用这个头 + enable_thinking: false, // qwen3 stream } @@ -1199,11 +1197,15 @@ export default class OpenAIProvider extends BaseOpenAIProvider { public async getEmbeddingDimensions(model: Model): Promise { await this.checkIsCopilot() - const data = await this.sdk.embeddings.create({ - model: model.id, - input: model?.provider === 'baidu-cloud' ? ['hi'] : 'hi' - }) - return data.data[0].embedding.length + try { + const data = await this.sdk.embeddings.create({ + model: model.id, + input: model?.provider === 'baidu-cloud' ? ['hi'] : 'hi' + }) + return data.data[0].embedding.length + } catch (e) { + return 0 + } } public async checkIsCopilot() { diff --git a/src/renderer/src/providers/AiProvider/OpenAIResponseProvider.ts b/src/renderer/src/providers/AiProvider/OpenAIResponseProvider.ts index 500d212a6b..baacc96c44 100644 --- a/src/renderer/src/providers/AiProvider/OpenAIResponseProvider.ts +++ b/src/renderer/src/providers/AiProvider/OpenAIResponseProvider.ts @@ -37,6 +37,7 @@ import { removeSpecialCharactersForTopicName } from '@renderer/utils' import { addImageFileToContents } from '@renderer/utils/formats' import { convertLinks } from '@renderer/utils/linkConverter' import { + isEnabledToolUse, mcpToolCallResponseToOpenAIMessage, mcpToolsToOpenAIResponseTools, openAIToolsToMcpTool, @@ -191,26 +192,6 @@ export abstract class BaseOpenAIProvider extends BaseProvider { return 5 * 1000 * 60 } - /** - * Get the temperature for the assistant - * @param assistant - The assistant - * @param model - The model - * @returns The temperature - */ - protected getTemperature(assistant: Assistant, model: Model) { - return isOpenAIReasoningModel(model) || isOpenAILLMModel(model) ? undefined : assistant?.settings?.temperature - } - - /** - * Get the top P for the assistant - * @param assistant - The assistant - * @param model - The model - * @returns The top P - */ - protected getTopP(assistant: Assistant, model: Model) { - return isOpenAIReasoningModel(model) || isOpenAILLMModel(model) ? undefined : assistant?.settings?.topP - } - private getResponseReasoningEffort(assistant: Assistant, model: Model) { if (!isSupportedReasoningEffortOpenAIModel(model)) { return {} @@ -309,7 +290,7 @@ export abstract class BaseOpenAIProvider extends BaseProvider { } const defaultModel = getDefaultModel() const model = assistant.model || defaultModel - const { contextCount, maxTokens, streamOutput, enableToolUse } = getAssistantSettings(assistant) + const { contextCount, maxTokens, streamOutput } = getAssistantSettings(assistant) const isEnabledBuiltinWebSearch = assistant.enableWebSearch let tools: OpenAI.Responses.Tool[] = [] @@ -338,7 +319,7 @@ export abstract class BaseOpenAIProvider extends BaseProvider { const { tools: extraTools } = this.setupToolsConfig({ mcpTools, model, - enableToolUse + enableToolUse: isEnabledToolUse(assistant) }) tools = tools.concat(extraTools) @@ -926,6 +907,7 @@ export abstract class BaseOpenAIProvider extends BaseProvider { const response = await this.sdk.responses.create({ model: model.id, input: [{ role: 'user', content: 'hi' }], + max_output_tokens: 1, stream: true }) let hasContent = false @@ -942,6 +924,7 @@ export abstract class BaseOpenAIProvider extends BaseProvider { const response = await this.sdk.responses.create({ model: model.id, input: [{ role: 'user', content: 'hi' }], + max_output_tokens: 1, stream: false }) if (!response.output_text) { diff --git a/src/renderer/src/services/AssistantService.ts b/src/renderer/src/services/AssistantService.ts index 418ce6b4a3..6ef0a4474f 100644 --- a/src/renderer/src/services/AssistantService.ts +++ b/src/renderer/src/services/AssistantService.ts @@ -102,13 +102,13 @@ export const getAssistantSettings = (assistant: Assistant): AssistantSettings => } return { - contextCount: contextCount === 20 ? 100000 : contextCount, + contextCount: contextCount === 100 ? 100000 : contextCount, temperature: assistant?.settings?.temperature ?? DEFAULT_TEMPERATURE, topP: assistant?.settings?.topP ?? 1, enableMaxTokens: assistant?.settings?.enableMaxTokens ?? false, maxTokens: getAssistantMaxTokens(), streamOutput: assistant?.settings?.streamOutput ?? true, - enableToolUse: assistant?.settings?.enableToolUse ?? false, + toolUseMode: assistant?.settings?.toolUseMode ?? 'prompt', hideMessages: assistant?.settings?.hideMessages ?? false, defaultModel: assistant?.defaultModel ?? undefined, customParameters: assistant?.settings?.customParameters ?? [] diff --git a/src/renderer/src/services/MessagesService.ts b/src/renderer/src/services/MessagesService.ts index d1dea18b9f..91a676e7a1 100644 --- a/src/renderer/src/services/MessagesService.ts +++ b/src/renderer/src/services/MessagesService.ts @@ -41,9 +41,9 @@ export { export function getContextCount(assistant: Assistant, messages: Message[]) { const rawContextCount = assistant?.settings?.contextCount ?? DEFAULT_CONTEXTCOUNT - const maxContextCount = rawContextCount === 20 ? 100000 : rawContextCount + const maxContextCount = rawContextCount === 100 ? 100000 : rawContextCount - const _messages = rawContextCount === 20 ? takeRight(messages, 1000) : takeRight(messages, maxContextCount) + const _messages = takeRight(messages, maxContextCount) const clearIndex = _messages.findLastIndex((message) => message.type === 'clear') diff --git a/src/renderer/src/services/ModelMessageService.ts b/src/renderer/src/services/ModelMessageService.ts index 4e9c1d5729..b48543ea19 100644 --- a/src/renderer/src/services/ModelMessageService.ts +++ b/src/renderer/src/services/ModelMessageService.ts @@ -57,7 +57,7 @@ export function processPostsuffixQwen3Model( } else { // 思考模式未启用,添加 postsuffix if (!content.endsWith(postsuffix)) { - return content + postsuffix + return content + ' ' + postsuffix } } } else if (Array.isArray(content)) { diff --git a/src/renderer/src/store/messageBlock.ts b/src/renderer/src/store/messageBlock.ts index 2a4ac9845c..c9cc55cec3 100644 --- a/src/renderer/src/store/messageBlock.ts +++ b/src/renderer/src/store/messageBlock.ts @@ -236,10 +236,11 @@ const formatCitationsFromBlock = (block: CitationMessageBlock | undefined): Cita }) ) } - // 4. Deduplicate by URL and Renumber Sequentially + // 4. Deduplicate non-knowledge citations by URL and Renumber Sequentially const urlSet = new Set() return formattedCitations .filter((citation) => { + if (citation.type === 'knowledge') return true if (!citation.url || urlSet.has(citation.url)) return false urlSet.add(citation.url) return true diff --git a/src/renderer/src/store/migrate.ts b/src/renderer/src/store/migrate.ts index 4e60ee922b..029b05125c 100644 --- a/src/renderer/src/store/migrate.ts +++ b/src/renderer/src/store/migrate.ts @@ -1316,6 +1316,33 @@ const migrateConfig = { } catch (error) { return state } + }, + '101': (state: RootState) => { + try { + state.assistants.assistants.forEach((assistant) => { + if (assistant.settings) { + // @ts-ignore eslint-disable-next-line + if (assistant.settings.enableToolUse) { + // @ts-ignore eslint-disable-next-line + assistant.settings.toolUseMode = assistant.settings.enableToolUse ? 'function' : 'prompt' + // @ts-ignore eslint-disable-next-line + delete assistant.settings.enableToolUse + } + } + }) + if (state.shortcuts) { + state.shortcuts.shortcuts.push({ + key: 'exit_fullscreen', + shortcut: ['Escape'], + editable: false, + enabled: true, + system: true + }) + } + return state + } catch (error) { + return state + } } } diff --git a/src/renderer/src/store/settings.ts b/src/renderer/src/store/settings.ts index 639646717b..4dcc7203a8 100644 --- a/src/renderer/src/store/settings.ts +++ b/src/renderer/src/store/settings.ts @@ -111,6 +111,8 @@ export interface SettingsState { siyuanToken: string | null siyuanBoxId: string | null siyuanRootPath: string | null + // 订阅的助手地址 + agentssubscribeUrl: string | null // MinApps maxKeepAliveMinapps: number showOpenedMinappsInSidebar: boolean @@ -218,6 +220,7 @@ export const initialState: SettingsState = { siyuanToken: null, siyuanBoxId: null, siyuanRootPath: null, + agentssubscribeUrl: '', // MinApps maxKeepAliveMinapps: 3, showOpenedMinappsInSidebar: true, @@ -493,6 +496,9 @@ const settingsSlice = createSlice({ setSiyuanRootPath: (state, action: PayloadAction) => { state.siyuanRootPath = action.payload }, + setAgentssubscribeUrl: (state, action: PayloadAction) => { + state.agentssubscribeUrl = action.payload + }, setMaxKeepAliveMinapps: (state, action: PayloadAction) => { state.maxKeepAliveMinapps = action.payload }, @@ -599,6 +605,7 @@ export const { setSiyuanApiUrl, setSiyuanToken, setSiyuanBoxId, + setAgentssubscribeUrl, setSiyuanRootPath, setMaxKeepAliveMinapps, setShowOpenedMinappsInSidebar, diff --git a/src/renderer/src/store/shortcuts.ts b/src/renderer/src/store/shortcuts.ts index 03f7eaee3c..cafe278856 100644 --- a/src/renderer/src/store/shortcuts.ts +++ b/src/renderer/src/store/shortcuts.ts @@ -79,6 +79,13 @@ const initialState: ShortcutsState = { editable: true, enabled: true, system: false + }, + { + key: 'exit_fullscreen', + shortcut: ['Escape'], + editable: false, + enabled: true, + system: true } ] } diff --git a/src/renderer/src/store/thunk/messageThunk.ts b/src/renderer/src/store/thunk/messageThunk.ts index a8a58dc3d1..86707a938c 100644 --- a/src/renderer/src/store/thunk/messageThunk.ts +++ b/src/renderer/src/store/thunk/messageThunk.ts @@ -726,6 +726,7 @@ export const loadTopicMessagesThunk = async (dispatch: AppDispatch, getState: () => RootState) => { const state = getState() const topicMessagesExist = !!state.messages.messageIdsByTopic[topicId] + dispatch(newMessagesActions.setCurrentTopicId(topicId)) if (topicMessagesExist && !forceReload) { return diff --git a/src/renderer/src/types/index.ts b/src/renderer/src/types/index.ts index e66e629043..ac10c11b3d 100644 --- a/src/renderer/src/types/index.ts +++ b/src/renderer/src/types/index.ts @@ -56,12 +56,12 @@ export type AssistantSettings = { maxTokens: number | undefined enableMaxTokens: boolean streamOutput: boolean - enableToolUse: boolean hideMessages: boolean defaultModel?: Model customParameters?: AssistantSettingCustomParameters[] reasoning_effort?: ReasoningEffortOptions qwenThinkMode?: boolean + toolUseMode?: 'function' | 'prompt' } export type Agent = Omit & { diff --git a/src/renderer/src/utils/extract.ts b/src/renderer/src/utils/extract.ts index 4dd02ead69..2c71345255 100644 --- a/src/renderer/src/utils/extract.ts +++ b/src/renderer/src/utils/extract.ts @@ -1,4 +1,5 @@ import { XMLParser } from 'fast-xml-parser' + export interface ExtractResults { websearch?: WebsearchExtractResults knowledge?: KnowledgeExtractResults @@ -27,7 +28,6 @@ export const extractInfoFromXML = (text: string): ExtractResults => { return name === 'question' || name === 'links' } }) - const extractResults: ExtractResults = parser.parse(text) // Logger.log('Extracted results:', extractResults) - return extractResults + return parser.parse(text) } diff --git a/src/renderer/src/utils/formats.ts b/src/renderer/src/utils/formats.ts index 43f539d79f..a83ca4c632 100644 --- a/src/renderer/src/utils/formats.ts +++ b/src/renderer/src/utils/formats.ts @@ -2,6 +2,22 @@ import type { Message } from '@renderer/types/newMessage' import { findImageBlocks, getMainTextContent } from './messageUtils/find' +/** + * 清理Markdown内容 + * @param text 要清理的文本 + * @returns 清理后的文本 + */ +export function cleanMarkdownContent(text: string): string { + if (!text) return '' + let cleaned = text.replace(/!\[.*?]\(.*?\)/g, '') // 移除图片 + cleaned = cleaned.replace(/\[(.*?)]\(.*?\)/g, '$1') // 替换链接为纯文本 + cleaned = cleaned.replace(/https?:\/\/\S+/g, '') // 移除URL + cleaned = cleaned.replace(/[-—–_=+]{3,}/g, ' ') // 替换分隔符为空格 + cleaned = cleaned.replace(/[¥$€£¥%@#&*^()[\]{}<>~`'"\\|/_.]+/g, '') // 移除特殊字符 + cleaned = cleaned.replace(/\s+/g, ' ').trim() // 规范化空白 + return cleaned +} + export function escapeDollarNumber(text: string) { let escapedText = '' @@ -20,7 +36,7 @@ export function escapeDollarNumber(text: string) { } export function escapeBrackets(text: string) { - const pattern = /(```[\s\S]*?```|`.*?`)|\\\[([\s\S]*?[^\\])\\\]|\\\((.*?)\\\)/g + const pattern = /(```[\s\S]*?```|`.*?`)|\\\[([\s\S]*?[^\\])\\]|\\\((.*?)\\\)/g return text.replace(pattern, (match, codeBlock, squareBracket, roundBracket) => { if (codeBlock) { return codeBlock @@ -102,7 +118,7 @@ export function withGenerateImage(message: Message): { content: string; images?: const originalContent = getMainTextContent(message) const imagePattern = new RegExp(`!\\[[^\\]]*\\]\\((.*?)\\s*("(?:.*[^"])")?\\s*\\)`) const images: string[] = [] - let processedContent = originalContent + let processedContent: string processedContent = originalContent.replace(imagePattern, (_, url) => { if (url) { diff --git a/src/renderer/src/utils/mcp-tools.ts b/src/renderer/src/utils/mcp-tools.ts index e59b9ff1e5..4c446ffa78 100644 --- a/src/renderer/src/utils/mcp-tools.ts +++ b/src/renderer/src/utils/mcp-tools.ts @@ -7,10 +7,18 @@ import { } from '@anthropic-ai/sdk/resources' import { Content, FunctionCall, Part, Tool, Type as GeminiSchemaType } from '@google/genai' import Logger from '@renderer/config/logger' -import { isVisionModel } from '@renderer/config/models' +import { isFunctionCallingModel, isVisionModel } from '@renderer/config/models' import store from '@renderer/store' import { addMCPServer } from '@renderer/store/mcp' -import { MCPCallToolResponse, MCPServer, MCPTool, MCPToolResponse, Model, ToolUseResponse } from '@renderer/types' +import { + Assistant, + MCPCallToolResponse, + MCPServer, + MCPTool, + MCPToolResponse, + Model, + ToolUseResponse +} from '@renderer/types' import type { MCPToolCompleteChunk, MCPToolInProgressChunk } from '@renderer/types/chunk' import { ChunkType } from '@renderer/types/chunk' import { isArray, isObject, pull, transform } from 'lodash' @@ -824,3 +832,13 @@ export function mcpToolCallResponseToGeminiMessage( return message } + +export function isEnabledToolUse(assistant: Assistant) { + if (assistant.model) { + if (isFunctionCallingModel(assistant.model)) { + return assistant.settings?.toolUseMode === 'function' + } + } + + return false +} diff --git a/yarn.lock b/yarn.lock index 20ecf4c6a1..1dcaa49011 100644 --- a/yarn.lock +++ b/yarn.lock @@ -452,173 +452,194 @@ __metadata: languageName: node linkType: hard -"@cherrystudio/embedjs-interfaces@npm:0.1.29": - version: 0.1.29 - resolution: "@cherrystudio/embedjs-interfaces@npm:0.1.29" +"@cherrystudio/embedjs-interfaces@npm:0.1.30": + version: 0.1.30 + resolution: "@cherrystudio/embedjs-interfaces@npm:0.1.30" dependencies: "@langchain/core": "npm:^0.3.26" debug: "npm:^4.4.0" md5: "npm:^2.3.0" uuid: "npm:^11.0.3" - checksum: 10c0/df7ac19aecae137e5af427bb1605232993ef1510e3d41875cb8dd604e7a692778758e5f640a6ab839aaa4193c21012a50b7dfb0aa42c434e668f99faa0fd04d7 + checksum: 10c0/1d0eca816d89df25adfa15eb0b6ce67e8b3446966886c4e5e84f4c657daf3b5cad728c953479e8f317136a3c86ca512ebf13ceb070462da733eaab02937bc460 languageName: node linkType: hard -"@cherrystudio/embedjs-libsql@npm:^0.1.28": - version: 0.1.29 - resolution: "@cherrystudio/embedjs-libsql@npm:0.1.29" +"@cherrystudio/embedjs-interfaces@npm:0.1.31": + version: 0.1.31 + resolution: "@cherrystudio/embedjs-interfaces@npm:0.1.31" dependencies: - "@cherrystudio/embedjs-interfaces": "npm:0.1.29" - "@cherrystudio/embedjs-utils": "npm:0.1.29" + "@langchain/core": "npm:^0.3.26" + debug: "npm:^4.4.0" + md5: "npm:^2.3.0" + uuid: "npm:^11.0.3" + checksum: 10c0/da4de44f48a332c20ed891899c1e0fc06e0238df8d34450f58a52394efe3cd598f21f6feaedb3410cef6d3a86c6a2e2ca1fad574fedb5e256c38b82c72668d55 + languageName: node + linkType: hard + +"@cherrystudio/embedjs-libsql@npm:^0.1.31": + version: 0.1.31 + resolution: "@cherrystudio/embedjs-libsql@npm:0.1.31" + dependencies: + "@cherrystudio/embedjs-interfaces": "npm:0.1.30" + "@cherrystudio/embedjs-utils": "npm:0.1.30" "@libsql/client": "npm:^0.14.0" debug: "npm:^4.4.0" - checksum: 10c0/655da13d5e192bb0d46e36cbe1ee444b290f6d98d39ad4ac72004fa0aa1a2f5c6dc7a41dd2a9631cc80014501e3423fbd3a822f9fcb331617ab0a43db9692410 + checksum: 10c0/248453e07b7ff1661f18213f69d74a0ab2e5d722d3ae5409240fd38cf3c263da5c8a224635f6ec4cf823cdaa91846ba0f4890d64872133950810afcfd8512498 languageName: node linkType: hard -"@cherrystudio/embedjs-loader-csv@npm:^0.1.28": - version: 0.1.29 - resolution: "@cherrystudio/embedjs-loader-csv@npm:0.1.29" +"@cherrystudio/embedjs-loader-csv@npm:^0.1.31": + version: 0.1.31 + resolution: "@cherrystudio/embedjs-loader-csv@npm:0.1.31" dependencies: - "@cherrystudio/embedjs-interfaces": "npm:0.1.29" - "@cherrystudio/embedjs-utils": "npm:0.1.29" + "@cherrystudio/embedjs-interfaces": "npm:0.1.31" + "@cherrystudio/embedjs-utils": "npm:0.1.31" csv-parse: "npm:^5.6.0" debug: "npm:^4.4.0" md5: "npm:^2.3.0" - checksum: 10c0/7c54791fe836839bf3b6a882a9e4c5656b4f30e54aa4e8967b380bc858fd76e03b2ca39b050bc5c06ffbc9e0c722d91d5dd0acf9edf576ff279805d718dfd437 + checksum: 10c0/810a1eaf6bad7364856933b3752c698df033c4eb4c857eea22bd4da2143ae074e67857f106a7c9499817601e3420247d1d0e5ba319cf28d27f26a7274d75460e languageName: node linkType: hard -"@cherrystudio/embedjs-loader-image@npm:^0.1.28": - version: 0.1.29 - resolution: "@cherrystudio/embedjs-loader-image@npm:0.1.29" +"@cherrystudio/embedjs-loader-image@npm:^0.1.31": + version: 0.1.31 + resolution: "@cherrystudio/embedjs-loader-image@npm:0.1.31" dependencies: - "@cherrystudio/embedjs-interfaces": "npm:0.1.29" - "@cherrystudio/embedjs-utils": "npm:0.1.29" + "@cherrystudio/embedjs-interfaces": "npm:0.1.31" + "@cherrystudio/embedjs-utils": "npm:0.1.31" "@langchain/core": "npm:^0.3.26" debug: "npm:^4.4.0" exifremove: "npm:^1.0.1" md5: "npm:^2.3.0" mime: "npm:^4.0.6" stream-mime-type: "npm:^2.0.0" - checksum: 10c0/cbc43bf0be38ccd231a35ee06f160fee4628267a912f222c2c326e6d383a0477e1faad1910c9cd485ef8857b63d4404a9797c7e6a9661773345a4cca1ce956cf + checksum: 10c0/7e367a9722c30423dc26af795ef553120cc61c807c6d13e222eee336732bbe5a5e211cc66fb15941bc9fc05749a00200df6d741e3ae9ae9ec7a7d29db8dea075 languageName: node linkType: hard -"@cherrystudio/embedjs-loader-markdown@npm:^0.1.28": - version: 0.1.29 - resolution: "@cherrystudio/embedjs-loader-markdown@npm:0.1.29" +"@cherrystudio/embedjs-loader-markdown@npm:^0.1.31": + version: 0.1.31 + resolution: "@cherrystudio/embedjs-loader-markdown@npm:0.1.31" dependencies: - "@cherrystudio/embedjs-interfaces": "npm:0.1.29" - "@cherrystudio/embedjs-loader-web": "npm:0.1.29" + "@cherrystudio/embedjs-interfaces": "npm:0.1.31" + "@cherrystudio/embedjs-loader-web": "npm:0.1.31" debug: "npm:^4.4.0" md5: "npm:^2.3.0" micromark: "npm:^4.0.1" micromark-extension-gfm: "npm:^3.0.0" micromark-extension-mdx-jsx: "npm:^3.0.1" - checksum: 10c0/068393c00321a03a7b7881bb9b4b3b115440a91496b4fef43ad9b7f352c763aa59a6a33f69c9db39f2ed4c0a727f7e1dcd28448f7a446a5ec6a62edbd035f5a8 + checksum: 10c0/3e917eafe12331750a2702ae50d3aaf81a33ba43de111edcd0fe59a464c02149c44a124cfae8125374973313d70cd8d5ed2f091a85421238301859ec48aec4b0 languageName: node linkType: hard -"@cherrystudio/embedjs-loader-msoffice@npm:^0.1.28": - version: 0.1.29 - resolution: "@cherrystudio/embedjs-loader-msoffice@npm:0.1.29" +"@cherrystudio/embedjs-loader-msoffice@npm:^0.1.31": + version: 0.1.31 + resolution: "@cherrystudio/embedjs-loader-msoffice@npm:0.1.31" dependencies: - "@cherrystudio/embedjs-interfaces": "npm:0.1.29" - "@cherrystudio/embedjs-utils": "npm:0.1.29" + "@cherrystudio/embedjs-interfaces": "npm:0.1.31" + "@cherrystudio/embedjs-utils": "npm:0.1.31" "@langchain/textsplitters": "npm:^0.1.0" md5: "npm:^2.3.0" office-text-extractor: "npm:^3.0.3" - checksum: 10c0/be97eb2278d29f06b569b0aa2fd2b3640caf43207268f5c5cbe16fb77776fe026e4e0d5a9c6360f61c4af439561022f6f4becbe97c2b903d1d446021ab3bf38f + checksum: 10c0/57e688611dfa3481710231721608d0934ab31b726a22d3932a88fc911a16e1c7781537ec01e2906a7d22173950e7e46f4a5c56e315415222087ee09fefbcb98d languageName: node linkType: hard -"@cherrystudio/embedjs-loader-pdf@npm:^0.1.28": - version: 0.1.29 - resolution: "@cherrystudio/embedjs-loader-pdf@npm:0.1.29" +"@cherrystudio/embedjs-loader-pdf@npm:^0.1.31": + version: 0.1.31 + resolution: "@cherrystudio/embedjs-loader-pdf@npm:0.1.31" dependencies: - "@cherrystudio/embedjs-interfaces": "npm:0.1.29" - "@cherrystudio/embedjs-utils": "npm:0.1.29" + "@cherrystudio/embedjs-interfaces": "npm:0.1.31" + "@cherrystudio/embedjs-utils": "npm:0.1.31" "@langchain/textsplitters": "npm:^0.1.0" md5: "npm:^2.3.0" office-text-extractor: "npm:^3.0.3" - checksum: 10c0/cd45963f9405cd1b817f9539ad876dd32e214d21b651459822fc9f829105dc4934daf1aded9cc7084efd9dc914901b4b72fc52a7c5aa9fb550454b0e465844cf + checksum: 10c0/67c36c8a18ce7ed7312544bda37d2378a4d604eefa454700b142fd5ed1cb3a80c9b02bb392610a9b7b797ebb5aab0bd2437b74c2b125765dabf3064d4f55c2fd languageName: node linkType: hard -"@cherrystudio/embedjs-loader-sitemap@npm:^0.1.28": - version: 0.1.29 - resolution: "@cherrystudio/embedjs-loader-sitemap@npm:0.1.29" +"@cherrystudio/embedjs-loader-sitemap@npm:^0.1.31": + version: 0.1.31 + resolution: "@cherrystudio/embedjs-loader-sitemap@npm:0.1.31" dependencies: - "@cherrystudio/embedjs-interfaces": "npm:0.1.29" - "@cherrystudio/embedjs-loader-web": "npm:0.1.29" + "@cherrystudio/embedjs-interfaces": "npm:0.1.31" + "@cherrystudio/embedjs-loader-web": "npm:0.1.31" debug: "npm:^4.4.0" md5: "npm:^2.3.0" sitemapper: "npm:^3.2.20" - checksum: 10c0/2cb5fba68f3e89026b08274f3d286b46c44192f3e8b499d72984e63f36d174bdc7da6c8122c922b8fd5660fa0bc1fbbdbaecc37dae134467d2a501fd1642f0d2 + checksum: 10c0/9e8f644b7f248c3db86cae0945d841c35a4ecc8101a7737b8476250d6acf8b1b176d7144b044a6687dcbe1439528abdb4b0cc15057fd78b95dc65f4426eac50d languageName: node linkType: hard -"@cherrystudio/embedjs-loader-web@npm:0.1.29, @cherrystudio/embedjs-loader-web@npm:^0.1.28": - version: 0.1.29 - resolution: "@cherrystudio/embedjs-loader-web@npm:0.1.29" +"@cherrystudio/embedjs-loader-web@npm:0.1.31, @cherrystudio/embedjs-loader-web@npm:^0.1.31": + version: 0.1.31 + resolution: "@cherrystudio/embedjs-loader-web@npm:0.1.31" dependencies: - "@cherrystudio/embedjs-interfaces": "npm:0.1.29" - "@cherrystudio/embedjs-utils": "npm:0.1.29" + "@cherrystudio/embedjs-interfaces": "npm:0.1.31" + "@cherrystudio/embedjs-utils": "npm:0.1.31" "@langchain/textsplitters": "npm:^0.1.0" debug: "npm:^4.4.0" html-to-text: "npm:^9.0.5" md5: "npm:^2.3.0" - checksum: 10c0/a15529e45e309993644db7ee8546f970c0d94fd54baf5044d650a0af66cddb9729af5f3a3fb115c13cd7b7b2ab38bb3809cc088d4c6346e9ca33e478845820f3 + checksum: 10c0/a5b06d597794520fd92aa4a711e5e8ea4858573a1d079981753ffe2d3dcb1a6212fe467695fe92a0400409db3148495bee91d1ee82863f8053698d7ffc2a792e languageName: node linkType: hard -"@cherrystudio/embedjs-loader-xml@npm:^0.1.28": - version: 0.1.29 - resolution: "@cherrystudio/embedjs-loader-xml@npm:0.1.29" +"@cherrystudio/embedjs-loader-xml@npm:^0.1.31": + version: 0.1.31 + resolution: "@cherrystudio/embedjs-loader-xml@npm:0.1.31" dependencies: - "@cherrystudio/embedjs-interfaces": "npm:0.1.29" + "@cherrystudio/embedjs-interfaces": "npm:0.1.31" debug: "npm:^4.4.0" fast-xml-parser: "npm:^4.5.1" md5: "npm:^2.3.0" - checksum: 10c0/cf24dc1b48d55197f3773a9f7490c521461c6cade86869a333bac6c05dae10529ecdbaa03dbfce0994e07215fe343c9801b81356a6141965aa10d50fe2e6c858 + checksum: 10c0/bc41eb67741a2e2cc6a48147eb0c8600e4876a72259d5ce347fed1ad48d7efc186030301ff6965728d7b3c2cbfdbc867558c764b40edaffaa2480f004dc1d2ae languageName: node linkType: hard -"@cherrystudio/embedjs-openai@npm:^0.1.28": - version: 0.1.29 - resolution: "@cherrystudio/embedjs-openai@npm:0.1.29" +"@cherrystudio/embedjs-openai@npm:^0.1.31": + version: 0.1.31 + resolution: "@cherrystudio/embedjs-openai@npm:0.1.31" dependencies: - "@cherrystudio/embedjs-interfaces": "npm:0.1.29" + "@cherrystudio/embedjs-interfaces": "npm:0.1.31" "@langchain/core": "npm:^0.3.26" "@langchain/openai": "npm:^0.3.16" debug: "npm:^4.4.0" - checksum: 10c0/10451eb9e0c8f613ea3829b478120890ee44e2a9c7707a48797c21cbd4f4479ad56f86bd38099762900ddf17d42758dba938325eaaa9fae66f71e033c6b64dd5 + checksum: 10c0/2fb50cbdd0f226c5be34ed5fc8b34f544cff17e3b1a295cba26404d327e08a369af30dd73f18632d8855adbaa063c3e14033cda1795a17b883d248d43feae0d1 languageName: node linkType: hard -"@cherrystudio/embedjs-utils@npm:0.1.29": - version: 0.1.29 - resolution: "@cherrystudio/embedjs-utils@npm:0.1.29" +"@cherrystudio/embedjs-utils@npm:0.1.30": + version: 0.1.30 + resolution: "@cherrystudio/embedjs-utils@npm:0.1.30" dependencies: - "@cherrystudio/embedjs-interfaces": "npm:0.1.29" - checksum: 10c0/1b8d8e38207e7588ee134c316bc566dfb68d56023887be61793480cdeac0fc5a2dcc3c72f7727daae801abcaf16e8518ab9dcb7b0b0f7d3a16473a8830ba9dff + "@cherrystudio/embedjs-interfaces": "npm:0.1.30" + checksum: 10c0/1bd6151a69b6e4db6c93528622ff4f7834f80834681f28758d19f9780e8da36f29c21737d49809021ba5b6b1127dd7d2891e26864e2d696f83f577966d1cbf2c languageName: node linkType: hard -"@cherrystudio/embedjs@npm:^0.1.28": - version: 0.1.29 - resolution: "@cherrystudio/embedjs@npm:0.1.29" +"@cherrystudio/embedjs-utils@npm:0.1.31": + version: 0.1.31 + resolution: "@cherrystudio/embedjs-utils@npm:0.1.31" dependencies: - "@cherrystudio/embedjs-interfaces": "npm:0.1.29" - "@cherrystudio/embedjs-utils": "npm:0.1.29" + "@cherrystudio/embedjs-interfaces": "npm:0.1.31" + checksum: 10c0/249e0cbf84adf04948ef5d3071df56ceb1804a716ae577e68c167415ac90eb711ded49375c95de3d68b49700dbfdfc16ac80b00b571d15469672898dfc82be77 + languageName: node + linkType: hard + +"@cherrystudio/embedjs@npm:^0.1.31": + version: 0.1.31 + resolution: "@cherrystudio/embedjs@npm:0.1.31" + dependencies: + "@cherrystudio/embedjs-interfaces": "npm:0.1.31" + "@cherrystudio/embedjs-utils": "npm:0.1.31" "@langchain/textsplitters": "npm:^0.1.0" debug: "npm:^4.4.0" langchain: "npm:^0.3.8" md5: "npm:^2.3.0" mime: "npm:^4.0.6" stream-mime-type: "npm:^2.0.0" - checksum: 10c0/01487ab886e7c5c260fc65dee0d67407988e58fa82a1f0fdc4a332548a570cc63f5d39cc87878a7e3272be547a306fee5ec6caa805614eb4e139ce1259b7f6c9 + checksum: 10c0/632d82848e24e57bbd4cd3bcfd63e753d7c694879d701b93c7eab1ad110b0655c6bf9f3781f63a5e564fac1a450cfe7f8ec848d8a12b6a7a1d0bf7f73eb3fa4a languageName: node linkType: hard @@ -4324,17 +4345,17 @@ __metadata: "@agentic/tavily": "npm:^7.3.3" "@ant-design/v5-patch-for-react-19": "npm:^1.0.3" "@anthropic-ai/sdk": "npm:^0.41.0" - "@cherrystudio/embedjs": "npm:^0.1.28" - "@cherrystudio/embedjs-libsql": "npm:^0.1.28" - "@cherrystudio/embedjs-loader-csv": "npm:^0.1.28" - "@cherrystudio/embedjs-loader-image": "npm:^0.1.28" - "@cherrystudio/embedjs-loader-markdown": "npm:^0.1.28" - "@cherrystudio/embedjs-loader-msoffice": "npm:^0.1.28" - "@cherrystudio/embedjs-loader-pdf": "npm:^0.1.28" - "@cherrystudio/embedjs-loader-sitemap": "npm:^0.1.28" - "@cherrystudio/embedjs-loader-web": "npm:^0.1.28" - "@cherrystudio/embedjs-loader-xml": "npm:^0.1.28" - "@cherrystudio/embedjs-openai": "npm:^0.1.28" + "@cherrystudio/embedjs": "npm:^0.1.31" + "@cherrystudio/embedjs-libsql": "npm:^0.1.31" + "@cherrystudio/embedjs-loader-csv": "npm:^0.1.31" + "@cherrystudio/embedjs-loader-image": "npm:^0.1.31" + "@cherrystudio/embedjs-loader-markdown": "npm:^0.1.31" + "@cherrystudio/embedjs-loader-msoffice": "npm:^0.1.31" + "@cherrystudio/embedjs-loader-pdf": "npm:^0.1.31" + "@cherrystudio/embedjs-loader-sitemap": "npm:^0.1.31" + "@cherrystudio/embedjs-loader-web": "npm:^0.1.31" + "@cherrystudio/embedjs-loader-xml": "npm:^0.1.31" + "@cherrystudio/embedjs-openai": "npm:^0.1.31" "@electron-toolkit/eslint-config-prettier": "npm:^3.0.0" "@electron-toolkit/eslint-config-ts": "npm:^3.0.0" "@electron-toolkit/preload": "npm:^3.0.0" @@ -4377,7 +4398,6 @@ __metadata: "@vitest/coverage-v8": "npm:^3.1.1" "@vitest/ui": "npm:^3.1.1" "@xyflow/react": "npm:^12.4.4" - adm-zip: "npm:^0.5.16" antd: "npm:^5.22.5" applescript: "npm:^1.0.0" archiver: "npm:^7.0.1" @@ -4385,7 +4405,6 @@ __metadata: axios: "npm:^1.7.3" babel-plugin-styled-components: "npm:^2.1.4" browser-image-compression: "npm:^2.0.2" - bufferutil: "npm:^4.0.9" color: "npm:^5.0.0" dayjs: "npm:^1.11.11" dexie: "npm:^4.0.8" @@ -4409,7 +4428,6 @@ __metadata: eslint-plugin-react-hooks: "npm:^5.2.0" eslint-plugin-simple-import-sort: "npm:^12.1.1" eslint-plugin-unused-imports: "npm:^4.1.4" - extract-zip: "npm:^2.0.1" fast-xml-parser: "npm:^5.2.0" fetch-socks: "npm:^1.3.2" fs-extra: "npm:^11.2.0" @@ -4465,7 +4483,6 @@ __metadata: turndown: "npm:^7.2.0" turndown-plugin-gfm: "npm:^1.0.2" typescript: "npm:^5.6.2" - undici: "npm:^7.4.0" uuid: "npm:^10.0.0" vite: "npm:6.2.6" vitest: "npm:^3.1.1" @@ -4526,7 +4543,7 @@ __metadata: languageName: node linkType: hard -"adm-zip@npm:^0.5.16, adm-zip@npm:^0.5.9": +"adm-zip@npm:^0.5.9": version: 0.5.16 resolution: "adm-zip@npm:0.5.16" checksum: 10c0/6f10119d4570c7ba76dcf428abb8d3f69e63f92e51f700a542b43d4c0130373dd2ddfc8f85059f12d4a843703a90c3970cfd17876844b4f3f48bf042bfa6b49f @@ -5299,16 +5316,6 @@ __metadata: languageName: node linkType: hard -"bufferutil@npm:^4.0.9": - version: 4.0.9 - resolution: "bufferutil@npm:4.0.9" - dependencies: - node-gyp: "npm:latest" - node-gyp-build: "npm:^4.3.0" - checksum: 10c0/f8a93279fc9bdcf32b42eba97edc672b39ca0fe5c55a8596099886cffc76ea9dd78e0f6f51ecee3b5ee06d2d564aa587036b5d4ea39b8b5ac797262a363cdf7d - languageName: node - linkType: hard - "builder-util-runtime@npm:9.3.2": version: 9.3.2 resolution: "builder-util-runtime@npm:9.3.2" @@ -12574,17 +12581,6 @@ __metadata: languageName: node linkType: hard -"node-gyp-build@npm:^4.3.0": - version: 4.8.4 - resolution: "node-gyp-build@npm:4.8.4" - bin: - node-gyp-build: bin.js - node-gyp-build-optional: optional.js - node-gyp-build-test: build-test.js - checksum: 10c0/444e189907ece2081fe60e75368784f7782cfddb554b60123743dfb89509df89f1f29c03bbfa16b3a3e0be3f48799a4783f487da6203245fa5bed239ba7407e1 - languageName: node - linkType: hard - "node-gyp@npm:^9.1.0": version: 9.4.1 resolution: "node-gyp@npm:9.4.1" @@ -16892,7 +16888,7 @@ __metadata: languageName: node linkType: hard -"undici@npm:>=6, undici@npm:^7.4.0": +"undici@npm:>=6": version: 7.8.0 resolution: "undici@npm:7.8.0" checksum: 10c0/7141f63ea405208a88120d211d83d77bf21327b16b451d3149fb266c28884fbcf78ec370ac2d3412a0e68ba6132ab85265ba85a2f4fde24cb47dc77f5c5a158c