Merge branch 'main' into develop

This commit is contained in:
kangfenmao 2025-05-17 21:41:38 +08:00
commit 47c9465699
9 changed files with 88 additions and 36 deletions

View File

@ -91,6 +91,7 @@ afterSign: scripts/notarize.js
artifactBuildCompleted: scripts/artifact-build-completed.js artifactBuildCompleted: scripts/artifact-build-completed.js
releaseInfo: releaseInfo:
releaseNotes: | releaseNotes: |
⚠️ 注意:升级前请备份数据,否则将无法降级
重构消息结构,支持不同类型消息按时间顺序显示 重构消息结构,支持不同类型消息按时间顺序显示
智能体支持导入和导出 智能体支持导入和导出
快捷面板增加网络搜索引擎选择 快捷面板增加网络搜索引擎选择

View File

@ -1,6 +1,6 @@
{ {
"name": "CherryStudio", "name": "CherryStudio",
"version": "1.3.4", "version": "1.3.5",
"private": true, "private": true,
"description": "A powerful AI assistant for producer.", "description": "A powerful AI assistant for producer.",
"main": "./out/main/index.js", "main": "./out/main/index.js",

View File

@ -5,7 +5,7 @@ import { lightbulbVariants } from '@renderer/utils/motionVariants'
import { Collapse, message as antdMessage, Tooltip } from 'antd' import { Collapse, message as antdMessage, Tooltip } from 'antd'
import { Lightbulb } from 'lucide-react' import { Lightbulb } from 'lucide-react'
import { motion } from 'motion/react' import { motion } from 'motion/react'
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react' import { memo, useCallback, useEffect, useMemo, useState } from 'react'
import { useTranslation } from 'react-i18next' import { useTranslation } from 'react-i18next'
import styled from 'styled-components' import styled from 'styled-components'
@ -20,8 +20,6 @@ const ThinkingBlock: React.FC<Props> = ({ block }) => {
const { t } = useTranslation() const { t } = useTranslation()
const { messageFont, fontSize, thoughtAutoCollapse } = useSettings() const { messageFont, fontSize, thoughtAutoCollapse } = useSettings()
const [activeKey, setActiveKey] = useState<'thought' | ''>(thoughtAutoCollapse ? '' : 'thought') const [activeKey, setActiveKey] = useState<'thought' | ''>(thoughtAutoCollapse ? '' : 'thought')
const [thinkingTime, setThinkingTime] = useState(block.thinking_millsec || 0)
const intervalId = useRef<NodeJS.Timeout>(null)
const isThinking = useMemo(() => block.status === MessageBlockStatus.STREAMING, [block.status]) const isThinking = useMemo(() => block.status === MessageBlockStatus.STREAMING, [block.status])
@ -55,28 +53,6 @@ const ThinkingBlock: React.FC<Props> = ({ block }) => {
} }
}, [block.content, t]) }, [block.content, t])
// FIXME: 这里统计的和请求处统计的有一定误差
useEffect(() => {
if (isThinking) {
intervalId.current = setInterval(() => {
setThinkingTime((prev) => prev + 100)
}, 100)
} else if (intervalId.current) {
// 立即清除计时器
clearInterval(intervalId.current)
intervalId.current = null
}
return () => {
if (intervalId.current) {
clearInterval(intervalId.current)
intervalId.current = null
}
}
}, [isThinking])
const thinkingTimeSeconds = useMemo(() => (thinkingTime / 1000).toFixed(1), [thinkingTime])
if (!block.content) { if (!block.content) {
return null return null
} }
@ -101,9 +77,7 @@ const ThinkingBlock: React.FC<Props> = ({ block }) => {
<Lightbulb size={18} /> <Lightbulb size={18} />
</motion.span> </motion.span>
<ThinkingText> <ThinkingText>
{t(isThinking ? 'chat.thinking' : 'chat.deeply_thought', { <ThinkingTimeSeconds blockThinkingTime={block.thinking_millsec} isThinking={isThinking} />
seconds: thinkingTimeSeconds
})}
</ThinkingText> </ThinkingText>
{/* {isThinking && <BarLoader color="#9254de" />} */} {/* {isThinking && <BarLoader color="#9254de" />} */}
{!isThinking && ( {!isThinking && (
@ -134,6 +108,41 @@ const ThinkingBlock: React.FC<Props> = ({ block }) => {
) )
} }
const ThinkingTimeSeconds = memo(
({ blockThinkingTime, isThinking }: { blockThinkingTime?: number; isThinking: boolean }) => {
const { t } = useTranslation()
const [thinkingTime, setThinkingTime] = useState(blockThinkingTime || 0)
// FIXME: 这里统计的和请求处统计的有一定误差
useEffect(() => {
let timer: NodeJS.Timeout | null = null
if (isThinking) {
timer = setInterval(() => {
setThinkingTime((prev) => prev + 100)
}, 100)
} else if (timer) {
// 立即清除计时器
clearInterval(timer)
timer = null
}
return () => {
if (timer) {
clearInterval(timer)
timer = null
}
}
}, [isThinking])
const thinkingTimeSeconds = useMemo(() => (thinkingTime / 1000).toFixed(1), [thinkingTime])
return t(isThinking ? 'chat.thinking' : 'chat.deeply_thought', {
seconds: thinkingTimeSeconds
})
}
)
const CollapseContainer = styled(Collapse)` const CollapseContainer = styled(Collapse)`
margin-bottom: 15px; margin-bottom: 15px;
` `

View File

@ -237,6 +237,7 @@ const DisplaySettings: FC = () => {
minHeight: 200, minHeight: 200,
fontFamily: 'monospace' fontFamily: 'monospace'
}} }}
spellCheck={false}
/> />
</SettingGroup> </SettingGroup>
</SettingContainer> </SettingContainer>

View File

@ -287,7 +287,8 @@ export default class GeminiProvider extends BaseProvider {
if (reasoningEffort === undefined) { if (reasoningEffort === undefined) {
return { return {
thinkingConfig: { thinkingConfig: {
includeThoughts: false includeThoughts: false,
thinkingBudget: 0
} as ThinkingConfig } as ThinkingConfig
} }
} }
@ -921,7 +922,8 @@ export default class GeminiProvider extends BaseProvider {
config = { config = {
...config, ...config,
thinkingConfig: { thinkingConfig: {
includeThoughts: false includeThoughts: false,
thinkingBudget: 0
} as ThinkingConfig } as ThinkingConfig
} }
} }

View File

@ -10,6 +10,7 @@ import {
isSupportedReasoningEffortModel, isSupportedReasoningEffortModel,
isSupportedReasoningEffortOpenAIModel, isSupportedReasoningEffortOpenAIModel,
isSupportedThinkingTokenClaudeModel, isSupportedThinkingTokenClaudeModel,
isSupportedThinkingTokenGeminiModel,
isSupportedThinkingTokenModel, isSupportedThinkingTokenModel,
isSupportedThinkingTokenQwenModel, isSupportedThinkingTokenQwenModel,
isVisionModel, isVisionModel,
@ -258,6 +259,19 @@ export default class OpenAIProvider extends BaseOpenAIProvider {
return { thinking: { type: 'disabled' } } return { thinking: { type: 'disabled' } }
} }
if (isSupportedThinkingTokenGeminiModel(model)) {
// openrouter没有提供一个不推理的选项先隐藏
if (this.provider.id === 'openrouter') {
return { reasoning: { maxTokens: 0, exclude: true } }
}
return {
thinkingConfig: {
includeThoughts: false,
thinkingBudget: 0
}
}
}
return {} return {}
} }
const effortRatio = EFFORT_RATIO[reasoningEffort] const effortRatio = EFFORT_RATIO[reasoningEffort]
@ -313,6 +327,16 @@ export default class OpenAIProvider extends BaseOpenAIProvider {
} }
} }
// Gemini models
if (isSupportedThinkingTokenGeminiModel(model)) {
return {
thinkingConfig: {
thinkingBudget: budgetTokens,
includeThoughts: true
}
}
}
// Default case: no special thinking settings // Default case: no special thinking settings
return {} return {}
} }
@ -718,9 +742,17 @@ export default class OpenAIProvider extends BaseOpenAIProvider {
const usage = chunk.usage const usage = chunk.usage
const originalFinishDelta = chunk.delta const originalFinishDelta = chunk.delta
const originalFinishRawChunk = chunk.chunk const originalFinishRawChunk = chunk.chunk
if (!isEmpty(finishReason)) { if (!isEmpty(finishReason)) {
onChunk({ type: ChunkType.TEXT_COMPLETE, text: content }) if (content) {
onChunk({ type: ChunkType.TEXT_COMPLETE, text: content })
}
if (thinkingContent) {
onChunk({
type: ChunkType.THINKING_COMPLETE,
text: thinkingContent,
thinking_millsec: new Date().getTime() - time_first_token_millsec
})
}
if (usage) { if (usage) {
finalUsage.completion_tokens += usage.completion_tokens || 0 finalUsage.completion_tokens += usage.completion_tokens || 0
finalUsage.prompt_tokens += usage.prompt_tokens || 0 finalUsage.prompt_tokens += usage.prompt_tokens || 0
@ -812,7 +844,6 @@ export default class OpenAIProvider extends BaseOpenAIProvider {
if (toolResults.length) { if (toolResults.length) {
await processToolResults(toolResults, idx) await processToolResults(toolResults, idx)
} }
onChunk({ onChunk({
type: ChunkType.BLOCK_COMPLETE, type: ChunkType.BLOCK_COMPLETE,
response: { response: {

View File

@ -593,7 +593,7 @@ export abstract class BaseOpenAIProvider extends BaseProvider {
onChunk({ onChunk({
type: ChunkType.LLM_WEB_SEARCH_COMPLETE, type: ChunkType.LLM_WEB_SEARCH_COMPLETE,
llm_web_search: { llm_web_search: {
source: WebSearchSource.OPENAI, source: WebSearchSource.OPENAI_RESPONSE,
results: chunk.part.annotations results: chunk.part.annotations
} }
}) })

View File

@ -622,6 +622,14 @@ const fetchAndProcessAssistantResponseImpl = async (
const contextForUsage = userMsgIndex !== -1 ? orderedMsgs.slice(0, userMsgIndex + 1) : [] const contextForUsage = userMsgIndex !== -1 ? orderedMsgs.slice(0, userMsgIndex + 1) : []
const finalContextWithAssistant = [...contextForUsage, finalAssistantMsg] const finalContextWithAssistant = [...contextForUsage, finalAssistantMsg]
if (lastBlockId) {
const changes: Partial<MessageBlock> = {
status: MessageBlockStatus.SUCCESS
}
dispatch(updateOneBlock({ id: lastBlockId, changes }))
saveUpdatedBlockToDB(lastBlockId, assistantMsgId, topicId, getState)
}
// 更新topic的name // 更新topic的name
autoRenameTopic(assistant, topicId) autoRenameTopic(assistant, topicId)
@ -734,7 +742,6 @@ export const loadTopicMessagesThunk =
try { try {
const topic = await db.topics.get(topicId) const topic = await db.topics.get(topicId)
if (!topic) { if (!topic) {
await db.topics.add({ id: topicId, messages: [] }) await db.topics.add({ id: topicId, messages: [] })
} }

View File

@ -412,6 +412,7 @@ export function upsertMCPToolResponse(
const cur = { const cur = {
...results[index], ...results[index],
response: resp.response, response: resp.response,
arguments: resp.arguments,
status: resp.status status: resp.status
} }
results[index] = cur results[index] = cur