Merge branch 'main' into v2

This commit is contained in:
fullex 2025-12-26 14:18:06 +08:00
commit f84a2588fd
8 changed files with 119 additions and 48 deletions

View File

@ -135,38 +135,68 @@ artifactBuildCompleted: scripts/artifact-build-completed.js
releaseInfo: releaseInfo:
releaseNotes: | releaseNotes: |
<!--LANG:en--> <!--LANG:en-->
Cherry Studio 1.7.6 - New Models & MCP Enhancements Cherry Studio 1.7.7 - New Models & UI Improvements
This release adds support for new AI models and includes a new MCP server for memory management. This release adds new AI model support, OpenRouter integration, and UI redesigns.
✨ New Features ✨ New Features
- [Models] Add support for Xiaomi MiMo model - [Models] Add GLM-4.7 and MiniMax-M2.1 model support
- [Models] Add support for Gemini 3 Flash and Pro model detection - [Provider] Add OpenRouter provider support
- [Models] Add support for Volcengine Doubao-Seed-1.8 model - [OVMS] Upgrade to 2025.4 with Qwen3-4B-int4-ov preset model
- [MCP] Add Nowledge Mem builtin MCP server for memory management - [OVMS] Close OVMS process when app quits
- [Settings] Add default reasoning effort option to resolve confusion between undefined and none - [Search] Show keyword-adjacent snippets in history search
- [Painting] Add extend_params support for DMX painting
- [UI] Add MCP logo and replace Hammer icon
🎨 UI Improvements
- [Notes] Move notes settings to popup in NotesPage for quick access
- [WebSearch] Redesign settings with two-column layout and "Set as Default" button
- [Display] Improve font selector for long font names
- [Transfer] Rename LanDrop to LanTransfer
🐛 Bug Fixes 🐛 Bug Fixes
- [Azure] Restore deployment-based URLs for non-v1 apiVersion - [API] Correct aihubmix Anthropic API path
- [Translation] Disable reasoning mode for translation to improve efficiency - [OpenRouter] Support GPT-5.1/5.2 reasoning effort 'none' and improve error handling
- [Image] Update API path for image generation requests in OpenAIBaseClient - [Thinking] Fix interleaved thinking support
- [Windows] Auto-discover and persist Git Bash path on Windows for scoop users - [Memory] Fix retrieval issues and enable database backup
- [Settings] Update default assistant settings to disable temperature
- [OpenAI] Add persistent server configuration support
- [Azure] Normalize Azure endpoint
- [MCP] Check system npx/uvx before falling back to bundled binaries
- [Prompt] Improve language instruction clarity
- [Models] Include GPT5.2 series in verbosity check
- [URL] Enhance urlContext validation for supported providers and models
<!--LANG:zh-CN--> <!--LANG:zh-CN-->
Cherry Studio 1.7.6 - 新模型与 MCP 增强 Cherry Studio 1.7.7 - 新模型与界面改进
本次更新添加了多个新 AI 模型支持,并新增记忆管理 MCP 服务器。 本次更新添加了新 AI 模型支持、OpenRouter 集成以及界面重新设计
✨ 新功能 ✨ 新功能
- [模型] 添加小米 MiMo 模型支持 - [模型] 添加 GLM-4.7 和 MiniMax-M2.1 模型支持
- [模型] 添加 Gemini 3 Flash 和 Pro 模型检测支持 - [服务商] 添加 OpenRouter 服务商支持
- [模型] 添加火山引擎 Doubao-Seed-1.8 模型支持 - [OVMS] 升级至 2025.4,新增 Qwen3-4B-int4-ov 预设模型
- [MCP] 新增 Nowledge Mem 内置 MCP 服务器,用于记忆管理 - [OVMS] 应用退出时关闭 OVMS 进程
- [设置] 添加默认推理强度选项,解决 undefined 和 none 之间的混淆 - [搜索] 历史搜索显示关键词上下文片段
- [绘图] DMX 绘图添加扩展参数支持
- [界面] 添加 MCP 图标并替换锤子图标
🎨 界面改进
- [笔记] 将笔记设置移至笔记页弹窗,快速访问无需离开当前页面
- [网页搜索] 采用两栏布局重新设计设置界面,添加"设为默认"按钮
- [显示] 改进长字体名称的字体选择器
- [传输] LanDrop 重命名为 LanTransfer
🐛 问题修复 🐛 问题修复
- [Azure] 修复非 v1 apiVersion 的部署 URL 问题 - [API] 修复 aihubmix Anthropic API 路径
- [翻译] 禁用翻译时的推理模式以提高效率 - [OpenRouter] 支持 GPT-5.1/5.2 reasoning effort 'none' 并改进错误处理
- [图像] 更新 OpenAIBaseClient 中图像生成请求的 API 路径 - [思考] 修复交错思考支持
- [Windows] 自动发现并保存 Windows scoop 用户的 Git Bash 路径 - [记忆] 修复检索问题并启用数据库备份
- [设置] 更新默认助手设置禁用温度
- [OpenAI] 添加持久化服务器配置支持
- [Azure] 规范化 Azure 端点
- [MCP] 优先检查系统 npx/uvx 再回退到内置二进制文件
- [提示词] 改进语言指令清晰度
- [模型] GPT5.2 系列添加到 verbosity 检查
- [URL] 增强 urlContext 对支持的服务商和模型的验证
<!--LANG:END--> <!--LANG:END-->

View File

@ -2,7 +2,7 @@ import { loggerService } from '@logger'
import { import {
checkName, checkName,
getFilesDir, getFilesDir,
getFileType, getFileType as getFileTypeByExt,
getName, getName,
getNotesDir, getNotesDir,
getTempDir, getTempDir,
@ -11,13 +11,13 @@ import {
} from '@main/utils/file' } from '@main/utils/file'
import { documentExts, imageExts, KB, MB } from '@shared/config/constant' import { documentExts, imageExts, KB, MB } from '@shared/config/constant'
import type { FileMetadata, NotesTreeNode } from '@types' import type { FileMetadata, NotesTreeNode } from '@types'
import { FileTypes } from '@types'
import chardet from 'chardet' import chardet from 'chardet'
import type { FSWatcher } from 'chokidar' import type { FSWatcher } from 'chokidar'
import chokidar from 'chokidar' import chokidar from 'chokidar'
import * as crypto from 'crypto' import * as crypto from 'crypto'
import type { OpenDialogOptions, OpenDialogReturnValue, SaveDialogOptions, SaveDialogReturnValue } from 'electron' import type { OpenDialogOptions, OpenDialogReturnValue, SaveDialogOptions, SaveDialogReturnValue } from 'electron'
import { app } from 'electron' import { app, dialog, net, shell } from 'electron'
import { dialog, net, shell } from 'electron'
import * as fs from 'fs' import * as fs from 'fs'
import { writeFileSync } from 'fs' import { writeFileSync } from 'fs'
import { readFile } from 'fs/promises' import { readFile } from 'fs/promises'
@ -185,7 +185,7 @@ class FileStorage {
}) })
} }
findDuplicateFile = async (filePath: string): Promise<FileMetadata | null> => { private findDuplicateFile = async (filePath: string): Promise<FileMetadata | null> => {
const stats = fs.statSync(filePath) const stats = fs.statSync(filePath)
logger.debug(`stats: ${stats}, filePath: ${filePath}`) logger.debug(`stats: ${stats}, filePath: ${filePath}`)
const fileSize = stats.size const fileSize = stats.size
@ -204,6 +204,8 @@ class FileStorage {
if (originalHash === storedHash) { if (originalHash === storedHash) {
const ext = path.extname(file) const ext = path.extname(file)
const id = path.basename(file, ext) const id = path.basename(file, ext)
const type = await this.getFileType(filePath)
return { return {
id, id,
origin_name: file, origin_name: file,
@ -212,7 +214,7 @@ class FileStorage {
created_at: storedStats.birthtime.toISOString(), created_at: storedStats.birthtime.toISOString(),
size: storedStats.size, size: storedStats.size,
ext, ext,
type: getFileType(ext), type,
count: 2 count: 2
} }
} }
@ -222,6 +224,13 @@ class FileStorage {
return null return null
} }
public getFileType = async (filePath: string): Promise<FileTypes> => {
const ext = path.extname(filePath)
const fileType = getFileTypeByExt(ext)
return fileType === FileTypes.OTHER && (await this._isTextFile(filePath)) ? FileTypes.TEXT : fileType
}
public selectFile = async ( public selectFile = async (
_: Electron.IpcMainInvokeEvent, _: Electron.IpcMainInvokeEvent,
options?: OpenDialogOptions options?: OpenDialogOptions
@ -241,7 +250,7 @@ class FileStorage {
const fileMetadataPromises = result.filePaths.map(async (filePath) => { const fileMetadataPromises = result.filePaths.map(async (filePath) => {
const stats = fs.statSync(filePath) const stats = fs.statSync(filePath)
const ext = path.extname(filePath) const ext = path.extname(filePath)
const fileType = getFileType(ext) const fileType = await this.getFileType(filePath)
return { return {
id: uuidv4(), id: uuidv4(),
@ -307,7 +316,7 @@ class FileStorage {
} }
const stats = await fs.promises.stat(destPath) const stats = await fs.promises.stat(destPath)
const fileType = getFileType(ext) const fileType = await this.getFileType(destPath)
const fileMetadata: FileMetadata = { const fileMetadata: FileMetadata = {
id: uuid, id: uuid,
@ -332,8 +341,7 @@ class FileStorage {
} }
const stats = fs.statSync(filePath) const stats = fs.statSync(filePath)
const ext = path.extname(filePath) const fileType = await this.getFileType(filePath)
const fileType = getFileType(ext)
return { return {
id: uuidv4(), id: uuidv4(),
@ -342,7 +350,7 @@ class FileStorage {
path: filePath, path: filePath,
created_at: stats.birthtime.toISOString(), created_at: stats.birthtime.toISOString(),
size: stats.size, size: stats.size,
ext: ext, ext: path.extname(filePath),
type: fileType, type: fileType,
count: 1 count: 1
} }
@ -690,7 +698,7 @@ class FileStorage {
created_at: new Date().toISOString(), created_at: new Date().toISOString(),
size: buffer.length, size: buffer.length,
ext: ext.slice(1), ext: ext.slice(1),
type: getFileType(ext), type: getFileTypeByExt(ext),
count: 1 count: 1
} }
} catch (error) { } catch (error) {
@ -740,7 +748,7 @@ class FileStorage {
created_at: new Date().toISOString(), created_at: new Date().toISOString(),
size: stats.size, size: stats.size,
ext: ext.slice(1), ext: ext.slice(1),
type: getFileType(ext), type: getFileTypeByExt(ext),
count: 1 count: 1
} }
} catch (error) { } catch (error) {
@ -1317,7 +1325,7 @@ class FileStorage {
await fs.promises.writeFile(destPath, buffer) await fs.promises.writeFile(destPath, buffer)
const stats = await fs.promises.stat(destPath) const stats = await fs.promises.stat(destPath)
const fileType = getFileType(ext) const fileType = await this.getFileType(destPath)
return { return {
id: uuid, id: uuid,
@ -1604,6 +1612,10 @@ class FileStorage {
} }
public isTextFile = async (_: Electron.IpcMainInvokeEvent, filePath: string): Promise<boolean> => { public isTextFile = async (_: Electron.IpcMainInvokeEvent, filePath: string): Promise<boolean> => {
return this._isTextFile(filePath)
}
private _isTextFile = async (filePath: string): Promise<boolean> => {
try { try {
const isBinary = await isBinaryFile(filePath) const isBinary = await isBinaryFile(filePath)
if (isBinary) { if (isBinary) {

View File

@ -107,7 +107,7 @@ export const SYSTEM_PROVIDERS_CONFIG: Record<SystemProviderId, SystemProvider> =
type: 'openai', type: 'openai',
apiKey: '', apiKey: '',
apiHost: 'https://aihubmix.com', apiHost: 'https://aihubmix.com',
anthropicApiHost: 'https://aihubmix.com/anthropic', anthropicApiHost: 'https://aihubmix.com',
models: SYSTEM_MODELS.aihubmix, models: SYSTEM_MODELS.aihubmix,
isSystem: true, isSystem: true,
enabled: false enabled: false
@ -289,7 +289,7 @@ export const SYSTEM_PROVIDERS_CONFIG: Record<SystemProviderId, SystemProvider> =
ollama: { ollama: {
id: 'ollama', id: 'ollama',
name: 'Ollama', name: 'Ollama',
type: 'openai', type: 'ollama',
apiKey: '', apiKey: '',
apiHost: 'http://localhost:11434', apiHost: 'http://localhost:11434',
models: SYSTEM_MODELS.ollama, models: SYSTEM_MODELS.ollama,

View File

@ -136,11 +136,14 @@ const DmxapiPage: FC<{ Options: string[] }> = ({ Options }) => {
let model = '' let model = ''
let priceModel = '' let priceModel = ''
let image_size = '' let image_size = ''
let extend_params = {}
for (const provider of Object.keys(modelGroups)) { for (const provider of Object.keys(modelGroups)) {
if (modelGroups[provider] && modelGroups[provider].length > 0) { if (modelGroups[provider] && modelGroups[provider].length > 0) {
model = modelGroups[provider][0].id model = modelGroups[provider][0].id
priceModel = modelGroups[provider][0].price priceModel = modelGroups[provider][0].price
image_size = modelGroups[provider][0].image_sizes[0].value image_size = modelGroups[provider][0].image_sizes[0].value
extend_params = modelGroups[provider][0].extend_params
break break
} }
} }
@ -149,7 +152,8 @@ const DmxapiPage: FC<{ Options: string[] }> = ({ Options }) => {
model, model,
priceModel, priceModel,
image_size, image_size,
modelGroups modelGroups,
extend_params
} }
} }
@ -158,7 +162,7 @@ const DmxapiPage: FC<{ Options: string[] }> = ({ Options }) => {
const generationMode = params?.generationMode || painting?.generationMode || MODEOPTIONS[0].value const generationMode = params?.generationMode || painting?.generationMode || MODEOPTIONS[0].value
const { model, priceModel, image_size, modelGroups } = getFirstModelInfo(generationMode) const { model, priceModel, image_size, modelGroups, extend_params } = getFirstModelInfo(generationMode)
return { return {
...DEFAULT_PAINTING, ...DEFAULT_PAINTING,
@ -169,6 +173,7 @@ const DmxapiPage: FC<{ Options: string[] }> = ({ Options }) => {
modelGroups, modelGroups,
priceModel, priceModel,
image_size, image_size,
extend_params,
...params ...params
} }
} }
@ -186,7 +191,12 @@ const DmxapiPage: FC<{ Options: string[] }> = ({ Options }) => {
const onSelectModel = (modelId: string) => { const onSelectModel = (modelId: string) => {
const model = allModels.find((m) => m.id === modelId) const model = allModels.find((m) => m.id === modelId)
if (model) { if (model) {
updatePaintingState({ model: modelId, priceModel: model.price, image_size: model.image_sizes[0].value }) updatePaintingState({
model: modelId,
priceModel: model.price,
image_size: model.image_sizes[0].value,
extend_params: model.extend_params
})
} }
} }
@ -289,7 +299,7 @@ const DmxapiPage: FC<{ Options: string[] }> = ({ Options }) => {
clearImages() clearImages()
const { model, priceModel, image_size, modelGroups } = getFirstModelInfo(v) const { model, priceModel, image_size, modelGroups, extend_params } = getFirstModelInfo(v)
setModelOptions(modelGroups) setModelOptions(modelGroups)
@ -305,9 +315,10 @@ const DmxapiPage: FC<{ Options: string[] }> = ({ Options }) => {
// 否则更新当前painting // 否则更新当前painting
updatePaintingState({ updatePaintingState({
generationMode: v, generationMode: v,
model: model, model,
image_size: image_size, image_size,
priceModel: priceModel priceModel,
extend_params
}) })
} }
} }
@ -351,7 +362,8 @@ const DmxapiPage: FC<{ Options: string[] }> = ({ Options }) => {
const params = { const params = {
prompt, prompt,
model: painting.model, model: painting.model,
n: painting.n n: painting.n,
...painting?.extend_params
} }
const headerExpand = { const headerExpand = {
@ -393,7 +405,8 @@ const DmxapiPage: FC<{ Options: string[] }> = ({ Options }) => {
const params = { const params = {
prompt, prompt,
n: painting.n, n: painting.n,
model: painting.model model: painting.model,
...painting?.extend_params
} }
if (painting.image_size) { if (painting.image_size) {

View File

@ -84,7 +84,7 @@ export const MODEOPTIONS = [
// 获取模型分组数据 // 获取模型分组数据
export const GetModelGroup = async (): Promise<DMXApiModelGroups> => { export const GetModelGroup = async (): Promise<DMXApiModelGroups> => {
try { try {
const response = await fetch('https://dmxapi.cn/cherry_painting_models_v2.json') const response = await fetch('https://dmxapi.cn/cherry_painting_models_v3.json')
if (response.ok) { if (response.ok) {
const data = await response.json() const data = await response.json()

View File

@ -71,7 +71,7 @@ const persistedReducer = persistReducer(
{ {
key: 'cherry-studio', key: 'cherry-studio',
storage, storage,
version: 189, version: 190,
blacklist: ['runtime', 'messages', 'messageBlocks', 'tabs', 'toolPermissions'], blacklist: ['runtime', 'messages', 'messageBlocks', 'tabs', 'toolPermissions'],
migrate migrate
}, },

View File

@ -3103,6 +3103,21 @@ const migrateConfig = {
logger.error('migrate 189 error', error as Error) logger.error('migrate 189 error', error as Error)
return state return state
} }
},
// 1.7.8
'190': (state: RootState) => {
try {
state.llm.providers.forEach((provider) => {
if (provider.id === SystemProviderIds.ollama) {
provider.type = 'ollama'
}
})
logger.info('migrate 190 success')
return state
} catch (error) {
logger.error('migrate 190 error', error as Error)
return state
}
} }
} }

View File

@ -395,6 +395,7 @@ export interface DmxapiPainting extends PaintingParams {
autoCreate?: boolean autoCreate?: boolean
generationMode?: generationModeType generationMode?: generationModeType
priceModel?: string priceModel?: string
extend_params?: Record<string, unknown>
} }
export interface TokenFluxPainting extends PaintingParams { export interface TokenFluxPainting extends PaintingParams {